forked from borgmatic-collective/borgmatic
Compare commits
207 Commits
Author | SHA1 | Date |
---|---|---|
Dan Helfman | 5e15c9f2bc | |
Dan Helfman | 442641f9f6 | |
Dan Helfman | f67c544be6 | |
Dan Helfman | 437fd4dbae | |
Dan Helfman | 36873252d6 | |
Dan Helfman | 1ef82a27fa | |
Dan Helfman | 6837dcbf42 | |
Dan Helfman | c657764367 | |
Dan Helfman | f79286fc91 | |
Dan Helfman | 694d376d15 | |
Dan Helfman | ab4c08019c | |
Dan Helfman | fd39f54df7 | |
Dan Helfman | ca7e18bb29 | |
Dan Helfman | 6975a5b155 | |
Dan Helfman | b627d00595 | |
Dan Helfman | 9bd8f1a6df | |
Javier Paniagua | faf682ca35 | |
Dan Helfman | 6aeb74550d | |
Dan Helfman | 89500df429 | |
Dan Helfman | 82b072d0b7 | |
Dan Helfman | 018c0296fd | |
Dan Helfman | 9c42e7e817 | |
Dan Helfman | 953277a066 | |
Dan Helfman | e2002b5488 | |
Dan Helfman | c9742e1d04 | |
Dan Helfman | 906da838ef | |
Dan Helfman | d7f1c10c8c | |
Dan Helfman | e8e4d17168 | |
Dan Helfman | a31ce337e9 | |
Dan Helfman | 902730df46 | |
Dan Helfman | c969c822ee | |
Dan Helfman | c31702d092 | |
Dan Helfman | ba8fbe7a44 | |
Dan Helfman | 2774c2e4c0 | |
Dan Helfman | ae036aebd7 | |
LaserEyess | 2e9f70d496 | |
Dan Helfman | 90be5b84b1 | |
Dan Helfman | 80e95f20a3 | |
Dan Helfman | ac7c7d4036 | |
Dan Helfman | 858b0b9fbe | |
Dan Helfman | 9cc043f60e | |
Dan Helfman | 276a27d485 | |
Dan Helfman | 679bb839d7 | |
Dan Helfman | 9e64d847ef | |
Dan Helfman | 61fb275896 | |
Dan Helfman | ca0c79c93c | |
Dan Helfman | 87c97b7568 | |
Dan Helfman | 80b8c25bba | |
Dan Helfman | d1837cd1d3 | |
Dan Helfman | c46f2b8508 | |
Dan Helfman | a274c0dbf7 | |
Dan Helfman | ef7e95e22a | |
Dan Helfman | 3be99de5b1 | |
Dan Helfman | e7b7560477 | |
Dan Helfman | 317dc7fbce | |
Dan Helfman | 97fad15009 | |
Dan Helfman | 462326406e | |
Dan Helfman | bbdf4893d1 | |
Dan Helfman | ef6617cfe6 | |
Dan Helfman | dbef0a440f | |
Dan Helfman | 22628ba5d4 | |
Dan Helfman | 8576ac86b9 | |
Dan Helfman | 540f9f6b72 | |
Dan Helfman | f9d7faf884 | |
Dan Helfman | 7dee6194a2 | |
Dan Helfman | 68f9c1b950 | |
Dan Helfman | 43d711463c | |
Dan Helfman | 00255a2437 | |
Dan Helfman | b40e9b7da2 | |
Dan Helfman | 89d201c8ff | |
Dan Helfman | f47c98c4a5 | |
Dan Helfman | 3b6ed06686 | |
Dan Helfman | 57009e22b5 | |
Dan Helfman | 3ab7a3b64a | |
Dan Helfman | 596dd49cf5 | |
Dan Helfman | 28d847b8b1 | |
Dan Helfman | 2a1c6b1477 | |
Dan Helfman | 30abd0e3de | |
Dan Helfman | f36e38ec20 | |
Dan Helfman | d807ce095e | |
Dan Helfman | 7626fe1189 | |
Dan Helfman | cc04bf57df | |
Dan Helfman | cce6d56661 | |
Dan Helfman | a05d0f378e | |
Dan Helfman | 94321aec7a | |
Dan Helfman | 4a55749bd2 | |
Dan Helfman | 2898e63166 | |
Dan Helfman | c7176bd00a | |
Dan Helfman | 647ecdac29 | |
Dan Helfman | e7a8acfb96 | |
Dan Helfman | 622caa0c21 | |
Dan Helfman | 22149c6401 | |
Dan Helfman | 9aece3936a | |
Dan Helfman | c7e4e6f6c9 | |
Dan Helfman | bcad0de1a4 | |
Uli | 5c6407047f | |
Dan Helfman | 6ddae20fa1 | |
Dan Helfman | 23feac2f4c | |
Dan Helfman | 16066942e3 | |
Jelle @ Samson-IT | f7c8e89a9f | |
Jelle @ Samson-IT | ba377952fd | |
Dan Helfman | e85d551eac | |
Dan Helfman | 2b23a63a08 | |
Dan Helfman | c0f48e1071 | |
Dan Helfman | 6005426684 | |
Dan Helfman | 673ed1a2d3 | |
Dan Helfman | 992f62edd2 | |
Dan Helfman | f1ffa1da1d | |
Dan Helfman | 457ed80744 | |
Dan Helfman | 1fc028ffae | |
Dan Helfman | 10723efc68 | |
Dan Helfman | 2e0b2a308f | |
Dan Helfman | bd4d109009 | |
Dan Helfman | ae25386336 | |
Dan Helfman | d929313d45 | |
Dan Helfman | d372a86fe6 | |
Dan Helfman | e306f03e1d | |
Dan Helfman | 8336165f23 | |
Dan Helfman | c664c6b17b | |
Sébastien MB | b63c854509 | |
Dan Helfman | aa013af25e | |
Dan Helfman | cc32f0018b | |
Dan Helfman | dfc4db1860 | |
Dan Helfman | 35706604ea | |
Dan Helfman | 6d76e8e5cb | |
Dan Helfman | aecb6fcd74 | |
Dan Helfman | ea45f6c4c8 | |
Sébastien MB | 97b5cd089d | |
Dan Helfman | f2c2f3139e | |
Dan Helfman | dc4e7093e5 | |
Dan Helfman | b6f1025ecb | |
Dan Helfman | 65b2fe86c6 | |
Dan Helfman | 0e90a80680 | |
Dan Helfman | 7648bcff39 | |
Gavin Chappell | a8b8d507b6 | |
Dan Helfman | 3561c93d74 | |
Dan Helfman | 331a503a25 | |
Dan Helfman | 9aefb5179f | |
Dan Helfman | d14f22e121 | |
Dan Helfman | b6893f6455 | |
Dan Helfman | 80ec3e7d97 | |
Dan Helfman | cd834311eb | |
Dan Helfman | d751cceeb0 | |
Dan Helfman | ce78b07e4b | |
adidalal | 87f3c50931 | |
Dan Helfman | 8e9e06afe6 | |
Dan Helfman | 2bc91ac3d2 | |
Dan Helfman | 5b615d51a4 | |
Dan Helfman | c7f5d5fd0b | |
Dan Helfman | 6ef7538eb0 | |
Dan Helfman | 8fa90053cf | |
Dan Helfman | b3682b61d1 | |
Dan Helfman | ad0e2e0d7c | |
Dan Helfman | 6629f40cab | |
Dan Helfman | e76bfa555f | |
Dan Helfman | 8ddb7268eb | |
Dan Helfman | cb5fe02ebd | |
Dan Helfman | 77b84f8a48 | |
Dan Helfman | 691ec96909 | |
Steve Atwell | 29b4666205 | |
Dan Helfman | 316a22701f | |
Dan Helfman | be59a3e574 | |
Dan Helfman | 37327379bc | |
Dan Helfman | 22c2f13611 | |
polyzen | 8708ca07f4 | |
Dan Helfman | 634d9e4946 | |
Dan Helfman | 54933ebef5 | |
Dan Helfman | 157e59ac88 | |
Dan Helfman | 666f0dd751 | |
Dan Helfman | 8b179e4647 | |
Dan Helfman | 865eff7d98 | |
Dan Helfman | b9741f4d0b | |
Dan Helfman | 02781662f8 | |
Dan Helfman | 32a1043468 | |
Dan Helfman | 3e4aeec649 | |
Dan Helfman | b98b827594 | |
Dan Helfman | 255cc6ec23 | |
Dan Helfman | 51fc37d57a | |
Dan Helfman | 1921f55a9d | |
Dan Helfman | fbd381fcc1 | |
Dan Helfman | cd88f9f2ea | |
Dan Helfman | 788281cfb9 | |
Dan Helfman | cd234b689d | |
Dan Helfman | 92354a77ee | |
Dan Helfman | 48ff3e70d1 | |
Dan Helfman | 7e9adfb899 | |
Dan Helfman | e238e256f7 | |
Daniel Lo Nigro | 3ecb92a8d2 | |
Dan Helfman | d58d450628 | |
Dan Helfman | dee9c6e293 | |
Dan Helfman | 897c4487de | |
Dan Helfman | 48b50b5209 | |
Dan Helfman | 13bae8c23b | |
Dan Helfman | 4a48e6aa04 | |
Dan Helfman | 525266ede6 | |
Dan Helfman | d045eb55ac | |
Dan Helfman | 0e6b425ac5 | |
Dan Helfman | bdc26f2117 | |
Dan Helfman | ed7fe5c6d0 | |
Dan Helfman | cbce6707f4 | |
Dan Helfman | e40e726687 | |
Dan Helfman | 0c027a3050 | |
Dan Helfman | 9f44bbad65 | |
Dan Helfman | 413a079f51 | |
gerdneuman | 6f3accf691 | |
Dan Helfman | 5b3cfc542d | |
Dan Helfman | c838c1d11b |
|
@ -42,7 +42,9 @@ steps:
|
||||||
from_secret: docker_username
|
from_secret: docker_username
|
||||||
password:
|
password:
|
||||||
from_secret: docker_password
|
from_secret: docker_password
|
||||||
repo: witten/borgmatic-docs
|
registry: projects.torsion.org
|
||||||
|
repo: projects.torsion.org/borgmatic-collective/borgmatic
|
||||||
|
tags: docs
|
||||||
dockerfile: docs/Dockerfile
|
dockerfile: docs/Dockerfile
|
||||||
|
|
||||||
trigger:
|
trigger:
|
||||||
|
|
|
@ -23,8 +23,7 @@ module.exports = function(eleventyConfig) {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let markdownItAnchorOptions = {
|
let markdownItAnchorOptions = {
|
||||||
permalink: true,
|
permalink: markdownItAnchor.permalink.headerLink()
|
||||||
permalinkClass: "direct-link"
|
|
||||||
};
|
};
|
||||||
|
|
||||||
eleventyConfig.setLibrary(
|
eleventyConfig.setLibrary(
|
||||||
|
|
184
NEWS
184
NEWS
|
@ -1,3 +1,187 @@
|
||||||
|
1.7.6.dev0
|
||||||
|
* #438, #560: Optionally dump "all" PostgreSQL databases to separate files instead of one combined
|
||||||
|
dump file, allowing more convenient restores of individual databases. You can enable this by
|
||||||
|
specifying the database dump "format" option when the database is named "all".
|
||||||
|
* #602: Fix logs that interfere with JSON output by making warnings go to stderr instead of stdout.
|
||||||
|
* #622: Fix traceback when include merging on ARM64.
|
||||||
|
|
||||||
|
1.7.5
|
||||||
|
* #311: Override PostgreSQL dump/restore commands via configuration options.
|
||||||
|
* #604: Fix traceback when a configuration section is present but lacking any options.
|
||||||
|
* #607: Clarify documentation examples for include merging and deep merging.
|
||||||
|
* #611: Fix "data" consistency check to support "check_last" and consistency "prefix" options.
|
||||||
|
* #613: Clarify documentation about multiple repositories and separate configuration files.
|
||||||
|
|
||||||
|
1.7.4
|
||||||
|
* #596: Fix special file detection erroring when broken symlinks are encountered.
|
||||||
|
* #597, #598: Fix regression in which "check" action errored on certain systems ("Cannot determine
|
||||||
|
Borg repository ID").
|
||||||
|
|
||||||
|
1.7.3
|
||||||
|
* #357: Add "break-lock" action for removing any repository and cache locks leftover from Borg
|
||||||
|
aborting.
|
||||||
|
* #360: To prevent Borg hangs, unconditionally delete stale named pipes before dumping databases.
|
||||||
|
* #587: When database hooks are enabled, auto-exclude special files from a "create" action to
|
||||||
|
prevent Borg from hanging. You can override/prevent this behavior by explicitly setting the
|
||||||
|
"read_special" option to true.
|
||||||
|
* #587: Warn when ignoring a configured "read_special" value of false, as true is needed when
|
||||||
|
database hooks are enabled.
|
||||||
|
* #589: Update sample systemd service file to allow system "idle" (e.g. a video monitor turning
|
||||||
|
off) while borgmatic is running.
|
||||||
|
* #590: Fix for potential data loss (data not getting backed up) when the "patterns_from" option
|
||||||
|
was used with "source_directories" (or the "~/.borgmatic" path existed, which got injected into
|
||||||
|
"source_directories" implicitly). The fix is for borgmatic to convert "source_directories" into
|
||||||
|
patterns whenever "patterns_from" is used, working around a Borg bug:
|
||||||
|
https://github.com/borgbackup/borg/issues/6994
|
||||||
|
* #590: In "borgmatic create --list" output, display which files get excluded from the backup due
|
||||||
|
to patterns or excludes.
|
||||||
|
* #591: Add support for Borg 2's "--match-archives" flag. This replaces "--glob-archives", which
|
||||||
|
borgmatic now treats as an alias for "--match-archives". But note that the two flags have
|
||||||
|
slightly different syntax. See the Borg 2 changelog for more information:
|
||||||
|
https://borgbackup.readthedocs.io/en/2.0.0b3/changes.html#version-2-0-0b3-2022-10-02
|
||||||
|
* Fix for "borgmatic --archive latest" not finding the latest archive when a verbosity is set.
|
||||||
|
|
||||||
|
1.7.2
|
||||||
|
* #577: Fix regression in which "borgmatic info --archive ..." showed repository info instead of
|
||||||
|
archive info with Borg 1.
|
||||||
|
* #582: Fix hang when database hooks are enabled and "patterns" contains a parent directory of
|
||||||
|
"~/.borgmatic".
|
||||||
|
|
||||||
|
1.7.1
|
||||||
|
* #542: Make the "source_directories" option optional. This is useful for "check"-only setups or
|
||||||
|
using "patterns" exclusively.
|
||||||
|
* #574: Fix for potential data loss (data not getting backed up) when the "patterns" option was
|
||||||
|
used with "source_directories" (or the "~/.borgmatic" path existed, which got injected into
|
||||||
|
"source_directories" implicitly). The fix is for borgmatic to convert "source_directories" into
|
||||||
|
patterns whenever "patterns" is used, working around a Borg bug:
|
||||||
|
https://github.com/borgbackup/borg/issues/6994
|
||||||
|
|
||||||
|
1.7.0
|
||||||
|
* #463: Add "before_actions" and "after_actions" command hooks that run before/after all the
|
||||||
|
actions for each repository. These new hooks are a good place to run per-repository steps like
|
||||||
|
mounting/unmounting a remote filesystem.
|
||||||
|
* #463: Update documentation to cover per-repository configurations:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/
|
||||||
|
* #557: Support for Borg 2 while still working with Borg 1. This includes new borgmatic actions
|
||||||
|
like "rcreate" (replaces "init"), "rlist" (list archives in repository), "rinfo" (show repository
|
||||||
|
info), and "transfer" (for upgrading Borg repositories). For the most part, borgmatic tries to
|
||||||
|
smooth over differences between Borg 1 and 2 to make your upgrade process easier. However, there
|
||||||
|
are still a few cases where Borg made breaking changes. See the Borg 2.0 changelog for more
|
||||||
|
information: https://www.borgbackup.org/releases/borg-2.0.html
|
||||||
|
* #557: If you install Borg 2, you'll need to manually upgrade your existing Borg 1 repositories
|
||||||
|
before use. Note that Borg 2 stable is not yet released as of this borgmatic release, so don't
|
||||||
|
use Borg 2 for production until it is! See the documentation for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/upgrade/#upgrading-borg
|
||||||
|
* #557: Rename several configuration options to match Borg 2: "remote_rate_limit" is now
|
||||||
|
"upload_rate_limit", "numeric_owner" is "numeric_ids", and "bsd_flags" is "flags". borgmatic
|
||||||
|
still works with the old options.
|
||||||
|
* #557: Remote repository paths without the "ssh://" syntax are deprecated but still supported for
|
||||||
|
now. Remote repository paths containing "~" are deprecated in borgmatic and no longer work in
|
||||||
|
Borg 2.
|
||||||
|
* #557: Omitting the "--archive" flag on the "list" action is deprecated when using Borg 2. Use
|
||||||
|
the new "rlist" action instead.
|
||||||
|
* #557: The "--dry-run" flag can now be used with the "rcreate"/"init" action.
|
||||||
|
* #565: Fix handling of "repository" and "data" consistency checks to prevent invalid Borg flags.
|
||||||
|
* #566: Modify "mount" and "extract" actions to require the "--repository" flag when multiple
|
||||||
|
repositories are configured.
|
||||||
|
* #571: BREAKING: Remove old-style command-line action flags like "--create, "--list", etc. If
|
||||||
|
you're already using actions like "create" and "list" instead, this change should not affect you.
|
||||||
|
* #571: BREAKING: Rename "--files" flag on "prune" action to "--list", as it lists archives, not
|
||||||
|
files.
|
||||||
|
* #571: Add "--list" as alias for "--files" flag on "create" and "export-tar" actions.
|
||||||
|
* Add support for disabling TLS verification in Healthchecks monitoring hook with "verify_tls"
|
||||||
|
option.
|
||||||
|
|
||||||
|
1.6.6
|
||||||
|
* #559: Update documentation about configuring multiple consistency checks or multiple databases.
|
||||||
|
* #560: Fix all database hooks to error when the requested database to restore isn't present in the
|
||||||
|
Borg archive.
|
||||||
|
* #561: Fix command-line "--override" flag to continue supporting old configuration file formats.
|
||||||
|
* #563: Fix traceback with "create" action and "--json" flag when a database hook is configured.
|
||||||
|
|
||||||
|
1.6.5
|
||||||
|
* #553: Fix logging to include the full traceback when Borg experiences an internal error, not just
|
||||||
|
the first few lines.
|
||||||
|
* #554: Fix all monitoring hooks to warn if the server returns an HTTP 4xx error. This can happen
|
||||||
|
with Healthchecks, for instance, when using an invalid ping URL.
|
||||||
|
* #555: Fix environment variable plumbing so options like "encryption_passphrase" and
|
||||||
|
"encryption_passcommand" in one configuration file aren't used for other configuration files.
|
||||||
|
|
||||||
|
1.6.4
|
||||||
|
* #546, #382: Keep your repository passphrases and database passwords outside of borgmatic's
|
||||||
|
configuration file with environment variable interpolation. See the documentation for more
|
||||||
|
information: https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/
|
||||||
|
|
||||||
|
1.6.3
|
||||||
|
* #541: Add "borgmatic list --find" flag for searching for files across multiple archives, useful
|
||||||
|
for hunting down that file you accidentally deleted so you can extract it. See the documentation
|
||||||
|
for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/#searching-for-a-file
|
||||||
|
* #543: Add a monitoring hook for sending push notifications via ntfy. See the documentation for
|
||||||
|
more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook
|
||||||
|
* Fix Bash completion script to no longer alter your shell's settings (complain about unset
|
||||||
|
variables or error on pipe failures).
|
||||||
|
* Deprecate "borgmatic list --successful" flag, as listing only non-checkpoint (successful)
|
||||||
|
archives is now the default in newer versions of Borg.
|
||||||
|
|
||||||
|
1.6.2
|
||||||
|
* #523: Reduce the default consistency check frequency and support configuring the frequency
|
||||||
|
independently for each check. Also add "borgmatic check --force" flag to ignore configured
|
||||||
|
frequencies. See the documentation for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#check-frequency
|
||||||
|
* #536: Fix generate-borgmatic-config to support more complex schema changes like the new
|
||||||
|
Healthchecks configuration options when the "--source" flag is used.
|
||||||
|
* #538: Add support for "borgmatic borg debug" command.
|
||||||
|
* #539: Add "generate-borgmatic-config --overwrite" flag to replace an existing destination file.
|
||||||
|
* Add Bash completion script so you can tab-complete the borgmatic command-line. See the
|
||||||
|
documentation for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/set-up-backups/#shell-completion
|
||||||
|
|
||||||
|
1.6.1
|
||||||
|
* #294: Add Healthchecks monitoring hook "ping_body_limit" option to configure how many bytes of
|
||||||
|
logs to send to the Healthchecks server.
|
||||||
|
* #402: Remove the error when "archive_name_format" is specified but a retention prefix isn't.
|
||||||
|
* #420: Warn when an unsupported variable is used in a hook command.
|
||||||
|
* #439: Change connection failures for monitoring hooks (Healthchecks, Cronitor, PagerDuty, and
|
||||||
|
Cronhub) to be warnings instead of errors. This way, the monitoring system failing does not block
|
||||||
|
backups.
|
||||||
|
* #460: Add Healthchecks monitoring hook "send_logs" option to enable/disable sending borgmatic
|
||||||
|
logs to the Healthchecks server.
|
||||||
|
* #525: Add Healthchecks monitoring hook "states" option to only enable pinging for particular
|
||||||
|
monitoring states (start, finish, fail).
|
||||||
|
* #528: Improve the error message when a configuration override contains an invalid value.
|
||||||
|
* #531: BREAKING: When deep merging common configuration, merge colliding list values by appending
|
||||||
|
them. Previously, one list replaced the other.
|
||||||
|
* #532: When a configuration include is a relative path, load it from either the current working
|
||||||
|
directory or from the directory containing the file doing the including. Previously, only the
|
||||||
|
working directory was used.
|
||||||
|
* Add a randomized delay to the sample systemd timer to spread out the load on a server.
|
||||||
|
* Change the configuration format for borgmatic monitoring hooks (Healthchecks, Cronitor,
|
||||||
|
PagerDuty, and Cronhub) to specify the ping URL / integration key as a named option. The intent
|
||||||
|
is to support additional options (some in this release). This change is backwards-compatible.
|
||||||
|
* Add emojis to documentation table of contents to make it easier to find particular how-to and
|
||||||
|
reference guides at a glance.
|
||||||
|
|
||||||
|
1.6.0
|
||||||
|
* #381: BREAKING: Greatly simplify configuration file reuse by deep merging when including common
|
||||||
|
configuration. See the documentation for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#include-merging
|
||||||
|
* #473: BREAKING: Instead of executing "before" command hooks before all borgmatic actions run (and
|
||||||
|
"after" hooks after), execute these hooks right before/after the corresponding action. E.g.,
|
||||||
|
"before_check" now runs immediately before the "check" action. This better supports running
|
||||||
|
timing-sensitive tasks like pausing containers. Side effect: before/after command hooks now run
|
||||||
|
once for each configured repository instead of once per configuration file. Additionally, the
|
||||||
|
"repositories" interpolated variable has been changed to "repository", containing the path to the
|
||||||
|
current repository for the hook. See the documentation for more information:
|
||||||
|
https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/
|
||||||
|
* #513: Add mention of sudo's "secure_path" option to borgmatic installation documentation.
|
||||||
|
* #515: Fix "borgmatic borg key ..." to pass parameters to Borg in the correct order.
|
||||||
|
* #516: Fix handling of TERM signal to exit borgmatic, not just forward the signal to Borg.
|
||||||
|
* #517: Fix borgmatic exit code (so it's zero) when initial Borg calls fail but later retries
|
||||||
|
succeed.
|
||||||
|
* Change Healthchecks logs truncation size from 10k bytes to 100k bytes, corresponding to that
|
||||||
|
same change on Healthchecks.io.
|
||||||
|
|
||||||
1.5.24
|
1.5.24
|
||||||
* #431: Add "working_directory" option to support source directories with relative paths.
|
* #431: Add "working_directory" option to support source directories with relative paths.
|
||||||
* #444: When loading a configuration file that is unreadable due to file permissions, warn instead
|
* #444: When loading a configuration file that is unreadable due to file permissions, warn instead
|
||||||
|
|
61
README.md
61
README.md
|
@ -24,8 +24,8 @@ location:
|
||||||
|
|
||||||
# Paths of local or remote repositories to backup to.
|
# Paths of local or remote repositories to backup to.
|
||||||
repositories:
|
repositories:
|
||||||
- 1234@usw-s001.rsync.net:backups.borg
|
- ssh://1234@usw-s001.rsync.net/./backups.borg
|
||||||
- k8pDxu32@k8pDxu32.repo.borgbase.com:repo
|
- ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
|
||||||
- /var/lib/backups/local.borg
|
- /var/lib/backups/local.borg
|
||||||
|
|
||||||
retention:
|
retention:
|
||||||
|
@ -37,8 +37,9 @@ retention:
|
||||||
consistency:
|
consistency:
|
||||||
# List of checks to run to validate your backups.
|
# List of checks to run to validate your backups.
|
||||||
checks:
|
checks:
|
||||||
- repository
|
- name: repository
|
||||||
- archives
|
- name: archives
|
||||||
|
frequency: 2 weeks
|
||||||
|
|
||||||
hooks:
|
hooks:
|
||||||
# Custom preparation scripts to run.
|
# Custom preparation scripts to run.
|
||||||
|
@ -70,6 +71,7 @@ borgmatic is powered by [Borg Backup](https://www.borgbackup.org/).
|
||||||
<a href="https://cronitor.io/"><img src="docs/static/cronitor.png" alt="Cronitor" height="60px" style="margin-bottom:20px;"></a>
|
<a href="https://cronitor.io/"><img src="docs/static/cronitor.png" alt="Cronitor" height="60px" style="margin-bottom:20px;"></a>
|
||||||
<a href="https://cronhub.io/"><img src="docs/static/cronhub.png" alt="Cronhub" height="60px" style="margin-bottom:20px;"></a>
|
<a href="https://cronhub.io/"><img src="docs/static/cronhub.png" alt="Cronhub" height="60px" style="margin-bottom:20px;"></a>
|
||||||
<a href="https://www.pagerduty.com/"><img src="docs/static/pagerduty.png" alt="PagerDuty" height="60px" style="margin-bottom:20px;"></a>
|
<a href="https://www.pagerduty.com/"><img src="docs/static/pagerduty.png" alt="PagerDuty" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://ntfy.sh/"><img src="docs/static/ntfy.png" alt="ntfy" height="60px" style="margin-bottom:20px;"></a>
|
||||||
<a href="https://www.borgbase.com/?utm_source=borgmatic"><img src="docs/static/borgbase.png" alt="BorgBase" height="60px" style="margin-bottom:20px;"></a>
|
<a href="https://www.borgbase.com/?utm_source=borgmatic"><img src="docs/static/borgbase.png" alt="BorgBase" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
|
||||||
|
|
||||||
|
@ -102,23 +104,38 @@ offerings, but do not currently fund borgmatic development or hosting.
|
||||||
|
|
||||||
### Issues
|
### Issues
|
||||||
|
|
||||||
You've got issues? Or an idea for a feature enhancement? We've got an [issue
|
Are you experiencing an issue with borgmatic? Or do you have an idea for a
|
||||||
tracker](https://projects.torsion.org/borgmatic-collective/borgmatic/issues). In order to
|
feature enhancement? Head on over to our [issue
|
||||||
create a new issue or comment on an issue, you'll need to [login
|
tracker](https://projects.torsion.org/borgmatic-collective/borgmatic/issues).
|
||||||
first](https://projects.torsion.org/user/login). Note that you can login with
|
In order to create a new issue or add a comment, you'll need to
|
||||||
an existing GitHub account if you prefer.
|
[register](https://projects.torsion.org/user/sign_up?invite_code=borgmatic)
|
||||||
|
first. If you prefer to use an existing GitHub account, you can skip account
|
||||||
If you'd like to chat with borgmatic developers or users, head on over to the
|
creation and [login directly](https://projects.torsion.org/user/login).
|
||||||
`#borgmatic` IRC channel on Libera Chat, either via <a
|
|
||||||
href="https://web.libera.chat/#borgmatic">web chat</a> or a
|
|
||||||
native <a href="ircs://irc.libera.chat:6697">IRC client</a>. If you
|
|
||||||
don't get a response right away, please hang around a while—or file a ticket
|
|
||||||
instead.
|
|
||||||
|
|
||||||
Also see the [security
|
Also see the [security
|
||||||
policy](https://torsion.org/borgmatic/docs/security-policy/) for any security
|
policy](https://torsion.org/borgmatic/docs/security-policy/) for any security
|
||||||
issues.
|
issues.
|
||||||
|
|
||||||
|
|
||||||
|
### Social
|
||||||
|
|
||||||
|
Check out the [Borg subreddit](https://www.reddit.com/r/BorgBackup/) for
|
||||||
|
general Borg and borgmatic discussion and support.
|
||||||
|
|
||||||
|
Also follow [borgmatic on Mastodon](https://fosstodon.org/@borgmatic).
|
||||||
|
|
||||||
|
|
||||||
|
### Chat
|
||||||
|
|
||||||
|
To chat with borgmatic developers or users, check out the `#borgmatic`
|
||||||
|
IRC channel on Libera Chat, either via <a
|
||||||
|
href="https://web.libera.chat/#borgmatic">web chat</a> or a native <a
|
||||||
|
href="ircs://irc.libera.chat:6697">IRC client</a>. If you don't get a response
|
||||||
|
right away, please hang around a while—or file a ticket instead.
|
||||||
|
|
||||||
|
|
||||||
|
### Other
|
||||||
|
|
||||||
Other questions or comments? Contact
|
Other questions or comments? Contact
|
||||||
[witten@torsion.org](mailto:witten@torsion.org).
|
[witten@torsion.org](mailto:witten@torsion.org).
|
||||||
|
|
||||||
|
@ -133,10 +150,14 @@ borgmatic is licensed under the GNU General Public License version 3 or any
|
||||||
later version.
|
later version.
|
||||||
|
|
||||||
If you'd like to contribute to borgmatic development, please feel free to
|
If you'd like to contribute to borgmatic development, please feel free to
|
||||||
submit a [Pull Request](https://projects.torsion.org/borgmatic-collective/borgmatic/pulls)
|
submit a [Pull
|
||||||
or open an [issue](https://projects.torsion.org/borgmatic-collective/borgmatic/issues) first
|
Request](https://projects.torsion.org/borgmatic-collective/borgmatic/pulls) or
|
||||||
to discuss your idea. We also accept Pull Requests on GitHub, if that's more
|
open an
|
||||||
your thing. In general, contributions are very welcome. We don't bite!
|
[issue](https://projects.torsion.org/borgmatic-collective/borgmatic/issues) to
|
||||||
|
discuss your idea. Note that you'll need to
|
||||||
|
[register](https://projects.torsion.org/user/sign_up?invite_code=borgmatic)
|
||||||
|
first. We also accept Pull Requests on GitHub, if that's more your thing. In
|
||||||
|
general, contributions are very welcome. We don't bite!
|
||||||
|
|
||||||
Also, please check out the [borgmatic development
|
Also, please check out the [borgmatic development
|
||||||
how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for
|
how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for
|
||||||
|
|
|
@ -1,45 +1,68 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from borgmatic.borg.flags import make_flags
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, flags
|
||||||
from borgmatic.execute import execute_command
|
from borgmatic.execute import execute_command
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
REPOSITORYLESS_BORG_COMMANDS = {'serve', None}
|
REPOSITORYLESS_BORG_COMMANDS = {'serve', None}
|
||||||
|
BORG_SUBCOMMANDS_WITH_SUBCOMMANDS = {'key', 'debug'}
|
||||||
|
BORG_SUBCOMMANDS_WITHOUT_REPOSITORY = (('debug', 'info'), ('debug', 'convert-profile'), ())
|
||||||
|
|
||||||
|
|
||||||
def run_arbitrary_borg(
|
def run_arbitrary_borg(
|
||||||
repository, storage_config, options, archive=None, local_path='borg', remote_path=None
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
options,
|
||||||
|
archive=None,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given a local or remote repository path, a storage config dict, a sequence of arbitrary
|
Given a local or remote repository path, a storage config dict, the local Borg version, a
|
||||||
command-line Borg options, and an optional archive name, run an arbitrary Borg command on the
|
sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary
|
||||||
given repository/archive.
|
Borg command on the given repository/archive.
|
||||||
'''
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
options = options[1:] if options[0] == '--' else options
|
options = options[1:] if options[0] == '--' else options
|
||||||
borg_command = options[0]
|
|
||||||
command_options = tuple(options[1:])
|
# Borg commands like "key" have a sub-command ("export", etc.) that must follow it.
|
||||||
|
command_options_start_index = 2 if options[0] in BORG_SUBCOMMANDS_WITH_SUBCOMMANDS else 1
|
||||||
|
borg_command = tuple(options[:command_options_start_index])
|
||||||
|
command_options = tuple(options[command_options_start_index:])
|
||||||
except IndexError:
|
except IndexError:
|
||||||
borg_command = None
|
borg_command = ()
|
||||||
command_options = ()
|
command_options = ()
|
||||||
|
|
||||||
repository_archive = '::'.join((repository, archive)) if repository and archive else repository
|
if borg_command in BORG_SUBCOMMANDS_WITHOUT_REPOSITORY:
|
||||||
|
repository_archive_flags = ()
|
||||||
|
elif archive:
|
||||||
|
repository_archive_flags = flags.make_repository_archive_flags(
|
||||||
|
repository, archive, local_borg_version
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
repository_archive_flags = flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
|
||||||
full_command = (
|
full_command = (
|
||||||
(local_path,)
|
(local_path,)
|
||||||
+ ((borg_command,) if borg_command else ())
|
+ borg_command
|
||||||
+ ((repository_archive,) if borg_command and repository_archive else ())
|
+ repository_archive_flags
|
||||||
+ command_options
|
+ command_options
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
+ make_flags('remote-path', remote_path)
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
+ make_flags('lock-wait', lock_wait)
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
)
|
)
|
||||||
|
|
||||||
return execute_command(
|
return execute_command(
|
||||||
full_command, output_log_level=logging.WARNING, borg_local_path=local_path,
|
full_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
)
|
)
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from borgmatic.borg import environment, flags
|
||||||
|
from borgmatic.execute import execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def break_lock(
|
||||||
|
repository, storage_config, local_borg_version, local_path='borg', remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage configuration dict, the local Borg version,
|
||||||
|
and optional local and remote Borg paths, break any repository and cache locks leftover from Borg
|
||||||
|
aborting.
|
||||||
|
'''
|
||||||
|
umask = storage_config.get('umask', None)
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'break-lock')
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment)
|
|
@ -1,48 +1,155 @@
|
||||||
|
import argparse
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
|
||||||
from borgmatic.borg import extract
|
from borgmatic.borg import environment, extract, feature, flags, rinfo, state
|
||||||
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
DEFAULT_CHECKS = ('repository', 'archives')
|
DEFAULT_CHECKS = (
|
||||||
|
{'name': 'repository', 'frequency': '1 month'},
|
||||||
|
{'name': 'archives', 'frequency': '1 month'},
|
||||||
|
)
|
||||||
DEFAULT_PREFIX = '{hostname}-'
|
DEFAULT_PREFIX = '{hostname}-'
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _parse_checks(consistency_config, only_checks=None):
|
def parse_checks(consistency_config, only_checks=None):
|
||||||
'''
|
'''
|
||||||
Given a consistency config with a "checks" list, and an optional list of override checks,
|
Given a consistency config with a "checks" sequence of dicts and an optional list of override
|
||||||
transform them a tuple of named checks to run.
|
checks, return a tuple of named checks to run.
|
||||||
|
|
||||||
For example, given a retention config of:
|
For example, given a retention config of:
|
||||||
|
|
||||||
{'checks': ['repository', 'archives']}
|
{'checks': ({'name': 'repository'}, {'name': 'archives'})}
|
||||||
|
|
||||||
This will be returned as:
|
This will be returned as:
|
||||||
|
|
||||||
('repository', 'archives')
|
('repository', 'archives')
|
||||||
|
|
||||||
If no "checks" option is present in the config, return the DEFAULT_CHECKS. If the checks value
|
If no "checks" option is present in the config, return the DEFAULT_CHECKS. If a checks value
|
||||||
is the string "disabled", return an empty tuple, meaning that no checks should be run.
|
has a name of "disabled", return an empty tuple, meaning that no checks should be run.
|
||||||
|
|
||||||
If the "data" option is present, then make sure the "archives" option is included as well.
|
|
||||||
'''
|
'''
|
||||||
checks = [
|
checks = only_checks or tuple(
|
||||||
check.lower() for check in (only_checks or consistency_config.get('checks', []) or [])
|
check_config['name']
|
||||||
]
|
for check_config in (consistency_config.get('checks', None) or DEFAULT_CHECKS)
|
||||||
if checks == ['disabled']:
|
)
|
||||||
|
checks = tuple(check.lower() for check in checks)
|
||||||
|
if 'disabled' in checks:
|
||||||
|
if len(checks) > 1:
|
||||||
|
logger.warning(
|
||||||
|
'Multiple checks are configured, but one of them is "disabled"; not running any checks'
|
||||||
|
)
|
||||||
return ()
|
return ()
|
||||||
|
|
||||||
if 'data' in checks and 'archives' not in checks:
|
return checks
|
||||||
checks.append('archives')
|
|
||||||
|
|
||||||
return tuple(check for check in checks if check not in ('disabled', '')) or DEFAULT_CHECKS
|
|
||||||
|
|
||||||
|
|
||||||
def _make_check_flags(checks, check_last=None, prefix=None):
|
def parse_frequency(frequency):
|
||||||
'''
|
'''
|
||||||
Given a parsed sequence of checks, transform it into tuple of command-line flags.
|
Given a frequency string with a number and a unit of time, return a corresponding
|
||||||
|
datetime.timedelta instance or None if the frequency is None or "always".
|
||||||
|
|
||||||
|
For instance, given "3 weeks", return datetime.timedelta(weeks=3)
|
||||||
|
|
||||||
|
Raise ValueError if the given frequency cannot be parsed.
|
||||||
|
'''
|
||||||
|
if not frequency:
|
||||||
|
return None
|
||||||
|
|
||||||
|
frequency = frequency.strip().lower()
|
||||||
|
|
||||||
|
if frequency == 'always':
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
number, time_unit = frequency.split(' ')
|
||||||
|
number = int(number)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Could not parse consistency check frequency '{frequency}'")
|
||||||
|
|
||||||
|
if not time_unit.endswith('s'):
|
||||||
|
time_unit += 's'
|
||||||
|
|
||||||
|
if time_unit == 'months':
|
||||||
|
number *= 30
|
||||||
|
time_unit = 'days'
|
||||||
|
elif time_unit == 'years':
|
||||||
|
number *= 365
|
||||||
|
time_unit = 'days'
|
||||||
|
|
||||||
|
try:
|
||||||
|
return datetime.timedelta(**{time_unit: number})
|
||||||
|
except TypeError:
|
||||||
|
raise ValueError(f"Could not parse consistency check frequency '{frequency}'")
|
||||||
|
|
||||||
|
|
||||||
|
def filter_checks_on_frequency(
|
||||||
|
location_config, consistency_config, borg_repository_id, checks, force
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a location config, a consistency config with a "checks" sequence of dicts, a Borg
|
||||||
|
repository ID, a sequence of checks, and whether to force checks to run, filter down those
|
||||||
|
checks based on the configured "frequency" for each check as compared to its check time file.
|
||||||
|
|
||||||
|
In other words, a check whose check time file's timestamp is too new (based on the configured
|
||||||
|
frequency) will get cut from the returned sequence of checks. Example:
|
||||||
|
|
||||||
|
consistency_config = {
|
||||||
|
'checks': [
|
||||||
|
{
|
||||||
|
'name': 'archives',
|
||||||
|
'frequency': '2 weeks',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
When this function is called with that consistency_config and "archives" in checks, "archives"
|
||||||
|
will get filtered out of the returned result if its check time file is newer than 2 weeks old,
|
||||||
|
indicating that it's not yet time to run that check again.
|
||||||
|
|
||||||
|
Raise ValueError if a frequency cannot be parsed.
|
||||||
|
'''
|
||||||
|
filtered_checks = list(checks)
|
||||||
|
|
||||||
|
if force:
|
||||||
|
return tuple(filtered_checks)
|
||||||
|
|
||||||
|
for check_config in consistency_config.get('checks', DEFAULT_CHECKS):
|
||||||
|
check = check_config['name']
|
||||||
|
if checks and check not in checks:
|
||||||
|
continue
|
||||||
|
|
||||||
|
frequency_delta = parse_frequency(check_config.get('frequency'))
|
||||||
|
if not frequency_delta:
|
||||||
|
continue
|
||||||
|
|
||||||
|
check_time = read_check_time(
|
||||||
|
make_check_time_path(location_config, borg_repository_id, check)
|
||||||
|
)
|
||||||
|
if not check_time:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we've not yet reached the time when the frequency dictates we're ready for another
|
||||||
|
# check, skip this check.
|
||||||
|
if datetime.datetime.now() < check_time + frequency_delta:
|
||||||
|
remaining = check_time + frequency_delta - datetime.datetime.now()
|
||||||
|
logger.info(
|
||||||
|
f"Skipping {check} check due to configured frequency; {remaining} until next check"
|
||||||
|
)
|
||||||
|
filtered_checks.remove(check)
|
||||||
|
|
||||||
|
return tuple(filtered_checks)
|
||||||
|
|
||||||
|
|
||||||
|
def make_check_flags(local_borg_version, checks, check_last=None, prefix=None):
|
||||||
|
'''
|
||||||
|
Given the local Borg version and a parsed sequence of checks, transform the checks into tuple of
|
||||||
|
command-line flags.
|
||||||
|
|
||||||
For example, given parsed checks of:
|
For example, given parsed checks of:
|
||||||
|
|
||||||
|
@ -53,47 +160,100 @@ def _make_check_flags(checks, check_last=None, prefix=None):
|
||||||
('--repository-only',)
|
('--repository-only',)
|
||||||
|
|
||||||
However, if both "repository" and "archives" are in checks, then omit them from the returned
|
However, if both "repository" and "archives" are in checks, then omit them from the returned
|
||||||
flags because Borg does both checks by default.
|
flags because Borg does both checks by default. If "data" is in checks, that implies "archives".
|
||||||
|
|
||||||
Additionally, if a check_last value is given and "archives" is in checks, then include a
|
Additionally, if a check_last value is given and "archives" is in checks, then include a
|
||||||
"--last" flag. And if a prefix value is given and "archives" is in checks, then include a
|
"--last" flag. And if a prefix value is given and "archives" is in checks, then include a
|
||||||
"--prefix" flag.
|
"--match-archives" flag.
|
||||||
'''
|
'''
|
||||||
|
if 'data' in checks:
|
||||||
|
data_flags = ('--verify-data',)
|
||||||
|
checks += ('archives',)
|
||||||
|
else:
|
||||||
|
data_flags = ()
|
||||||
|
|
||||||
if 'archives' in checks:
|
if 'archives' in checks:
|
||||||
last_flags = ('--last', str(check_last)) if check_last else ()
|
last_flags = ('--last', str(check_last)) if check_last else ()
|
||||||
prefix_flags = ('--prefix', prefix) if prefix else ()
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
|
||||||
|
match_archives_flags = ('--match-archives', f'sh:{prefix}*') if prefix else ()
|
||||||
|
else:
|
||||||
|
match_archives_flags = ('--glob-archives', f'{prefix}*') if prefix else ()
|
||||||
else:
|
else:
|
||||||
last_flags = ()
|
last_flags = ()
|
||||||
prefix_flags = ()
|
match_archives_flags = ()
|
||||||
if check_last:
|
if check_last:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Ignoring check_last option, as "archives" is not in consistency checks.'
|
'Ignoring check_last option, as "archives" or "data" are not in consistency checks'
|
||||||
)
|
)
|
||||||
if prefix:
|
if prefix:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Ignoring consistency prefix option, as "archives" is not in consistency checks.'
|
'Ignoring consistency prefix option, as "archives" or "data" are not in consistency checks'
|
||||||
)
|
)
|
||||||
|
|
||||||
common_flags = last_flags + prefix_flags + (('--verify-data',) if 'data' in checks else ())
|
common_flags = last_flags + match_archives_flags + data_flags
|
||||||
|
|
||||||
if set(DEFAULT_CHECKS).issubset(set(checks)):
|
if {'repository', 'archives'}.issubset(set(checks)):
|
||||||
return common_flags
|
return common_flags
|
||||||
|
|
||||||
return (
|
return (
|
||||||
tuple('--{}-only'.format(check) for check in checks if check in DEFAULT_CHECKS)
|
tuple('--{}-only'.format(check) for check in checks if check in ('repository', 'archives'))
|
||||||
+ common_flags
|
+ common_flags
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_check_time_path(location_config, borg_repository_id, check_type):
|
||||||
|
'''
|
||||||
|
Given a location configuration dict, a Borg repository ID, and the name of a check type
|
||||||
|
("repository", "archives", etc.), return a path for recording that check's time (the time of
|
||||||
|
that check last occurring).
|
||||||
|
'''
|
||||||
|
return os.path.join(
|
||||||
|
os.path.expanduser(
|
||||||
|
location_config.get(
|
||||||
|
'borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||||
|
)
|
||||||
|
),
|
||||||
|
'checks',
|
||||||
|
borg_repository_id,
|
||||||
|
check_type,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def write_check_time(path): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Record a check time of now as the modification time of the given path.
|
||||||
|
'''
|
||||||
|
logger.debug(f'Writing check time at {path}')
|
||||||
|
|
||||||
|
os.makedirs(os.path.dirname(path), mode=0o700, exist_ok=True)
|
||||||
|
pathlib.Path(path, mode=0o600).touch()
|
||||||
|
|
||||||
|
|
||||||
|
def read_check_time(path):
|
||||||
|
'''
|
||||||
|
Return the check time based on the modification time of the given path. Return None if the path
|
||||||
|
doesn't exist.
|
||||||
|
'''
|
||||||
|
logger.debug(f'Reading check time from {path}')
|
||||||
|
|
||||||
|
try:
|
||||||
|
return datetime.datetime.fromtimestamp(os.stat(path).st_mtime)
|
||||||
|
except FileNotFoundError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def check_archives(
|
def check_archives(
|
||||||
repository,
|
repository,
|
||||||
|
location_config,
|
||||||
storage_config,
|
storage_config,
|
||||||
consistency_config,
|
consistency_config,
|
||||||
|
local_borg_version,
|
||||||
local_path='borg',
|
local_path='borg',
|
||||||
remote_path=None,
|
remote_path=None,
|
||||||
progress=None,
|
progress=None,
|
||||||
repair=None,
|
repair=None,
|
||||||
only_checks=None,
|
only_checks=None,
|
||||||
|
force=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given a local or remote repository path, a storage config dict, a consistency config dict,
|
Given a local or remote repository path, a storage config dict, a consistency config dict,
|
||||||
|
@ -102,13 +262,35 @@ def check_archives(
|
||||||
Borg archives for consistency.
|
Borg archives for consistency.
|
||||||
|
|
||||||
If there are no consistency checks to run, skip running them.
|
If there are no consistency checks to run, skip running them.
|
||||||
|
|
||||||
|
Raises ValueError if the Borg repository ID cannot be determined.
|
||||||
'''
|
'''
|
||||||
checks = _parse_checks(consistency_config, only_checks)
|
try:
|
||||||
|
borg_repository_id = json.loads(
|
||||||
|
rinfo.display_repository_info(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
argparse.Namespace(json=True),
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
)['repository']['id']
|
||||||
|
except (json.JSONDecodeError, KeyError):
|
||||||
|
raise ValueError(f'Cannot determine Borg repository ID for {repository}')
|
||||||
|
|
||||||
|
checks = filter_checks_on_frequency(
|
||||||
|
location_config,
|
||||||
|
consistency_config,
|
||||||
|
borg_repository_id,
|
||||||
|
parse_checks(consistency_config, only_checks),
|
||||||
|
force,
|
||||||
|
)
|
||||||
check_last = consistency_config.get('check_last', None)
|
check_last = consistency_config.get('check_last', None)
|
||||||
lock_wait = None
|
lock_wait = None
|
||||||
extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '')
|
extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '')
|
||||||
|
|
||||||
if set(checks).intersection(set(DEFAULT_CHECKS + ('data',))):
|
if set(checks).intersection({'repository', 'archives', 'data'}):
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
verbosity_flags = ()
|
verbosity_flags = ()
|
||||||
|
@ -122,21 +304,31 @@ def check_archives(
|
||||||
full_command = (
|
full_command = (
|
||||||
(local_path, 'check')
|
(local_path, 'check')
|
||||||
+ (('--repair',) if repair else ())
|
+ (('--repair',) if repair else ())
|
||||||
+ _make_check_flags(checks, check_last, prefix)
|
+ make_check_flags(local_borg_version, checks, check_last, prefix)
|
||||||
+ (('--remote-path', remote_path) if remote_path else ())
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
+ verbosity_flags
|
+ verbosity_flags
|
||||||
+ (('--progress',) if progress else ())
|
+ (('--progress',) if progress else ())
|
||||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
+ (repository,)
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
# The Borg repair option trigger an interactive prompt, which won't work when output is
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
# The Borg repair option triggers an interactive prompt, which won't work when output is
|
||||||
# captured. And progress messes with the terminal directly.
|
# captured. And progress messes with the terminal directly.
|
||||||
if repair or progress:
|
if repair or progress:
|
||||||
execute_command(full_command, output_file=DO_NOT_CAPTURE)
|
execute_command(
|
||||||
|
full_command, output_file=DO_NOT_CAPTURE, extra_environment=borg_environment
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
execute_command(full_command)
|
execute_command(full_command, extra_environment=borg_environment)
|
||||||
|
|
||||||
|
for check in checks:
|
||||||
|
write_check_time(make_check_time_path(location_config, borg_repository_id, check))
|
||||||
|
|
||||||
if 'extract' in checks:
|
if 'extract' in checks:
|
||||||
extract.extract_last_archive_dry_run(repository, lock_wait, local_path, remote_path)
|
extract.extract_last_archive_dry_run(
|
||||||
|
storage_config, local_borg_version, repository, lock_wait, local_path, remote_path
|
||||||
|
)
|
||||||
|
write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract'))
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from borgmatic.borg import environment, flags
|
||||||
from borgmatic.execute import execute_command
|
from borgmatic.execute import execute_command
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -9,6 +10,7 @@ def compact_segments(
|
||||||
dry_run,
|
dry_run,
|
||||||
repository,
|
repository,
|
||||||
storage_config,
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
local_path='borg',
|
local_path='borg',
|
||||||
remote_path=None,
|
remote_path=None,
|
||||||
progress=False,
|
progress=False,
|
||||||
|
@ -16,8 +18,8 @@ def compact_segments(
|
||||||
threshold=None,
|
threshold=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given dry-run flag, a local or remote repository path, and a storage config dict, compact Borg
|
Given dry-run flag, a local or remote repository path, a storage config dict, and the local
|
||||||
segments in a repository.
|
Borg version, compact the segments in a repository.
|
||||||
'''
|
'''
|
||||||
umask = storage_config.get('umask', None)
|
umask = storage_config.get('umask', None)
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
@ -34,8 +36,16 @@ def compact_segments(
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
+ (repository,)
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
if not dry_run:
|
if dry_run:
|
||||||
execute_command(full_command, output_log_level=logging.INFO, borg_local_path=local_path)
|
logging.info(f'{repository}: Skipping compact (dry run)')
|
||||||
|
return
|
||||||
|
|
||||||
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
||||||
|
|
|
@ -3,10 +3,17 @@ import itertools
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import stat
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from borgmatic.borg import feature
|
import borgmatic.logger
|
||||||
from borgmatic.execute import DO_NOT_CAPTURE, execute_command, execute_command_with_processes
|
from borgmatic.borg import environment, feature, flags, state
|
||||||
|
from borgmatic.execute import (
|
||||||
|
DO_NOT_CAPTURE,
|
||||||
|
execute_command,
|
||||||
|
execute_command_and_capture_output,
|
||||||
|
execute_command_with_processes,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -59,7 +66,7 @@ def map_directories_to_devices(directories):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def deduplicate_directories(directory_devices):
|
def deduplicate_directories(directory_devices, additional_directory_devices):
|
||||||
'''
|
'''
|
||||||
Given a map from directory to the identifier for the device on which that directory resides,
|
Given a map from directory to the identifier for the device on which that directory resides,
|
||||||
return the directories as a sorted tuple with all duplicate child directories removed. For
|
return the directories as a sorted tuple with all duplicate child directories removed. For
|
||||||
|
@ -74,22 +81,28 @@ def deduplicate_directories(directory_devices):
|
||||||
there are cases where Borg coming across the same file twice will result in duplicate reads and
|
there are cases where Borg coming across the same file twice will result in duplicate reads and
|
||||||
even hangs, e.g. when a database hook is using a named pipe for streaming database dumps to
|
even hangs, e.g. when a database hook is using a named pipe for streaming database dumps to
|
||||||
Borg.
|
Borg.
|
||||||
|
|
||||||
|
If any additional directory devices are given, also deduplicate against them, but don't include
|
||||||
|
them in the returned directories.
|
||||||
'''
|
'''
|
||||||
deduplicated = set()
|
deduplicated = set()
|
||||||
directories = sorted(directory_devices.keys())
|
directories = sorted(directory_devices.keys())
|
||||||
|
additional_directories = sorted(additional_directory_devices.keys())
|
||||||
|
all_devices = {**directory_devices, **additional_directory_devices}
|
||||||
|
|
||||||
for directory in directories:
|
for directory in directories:
|
||||||
deduplicated.add(directory)
|
deduplicated.add(directory)
|
||||||
parents = pathlib.PurePath(directory).parents
|
parents = pathlib.PurePath(directory).parents
|
||||||
|
|
||||||
# If another directory in the given list is a parent of current directory (even n levels
|
# If another directory in the given list (or the additional list) is a parent of current
|
||||||
# up) and both are on the same filesystem, then the current directory is a duplicate.
|
# directory (even n levels up) and both are on the same filesystem, then the current
|
||||||
for other_directory in directories:
|
# directory is a duplicate.
|
||||||
|
for other_directory in directories + additional_directories:
|
||||||
for parent in parents:
|
for parent in parents:
|
||||||
if (
|
if (
|
||||||
pathlib.PurePath(other_directory) == parent
|
pathlib.PurePath(other_directory) == parent
|
||||||
and directory_devices[directory] is not None
|
and all_devices[directory] is not None
|
||||||
and directory_devices[other_directory] == directory_devices[directory]
|
and all_devices[other_directory] == all_devices[directory]
|
||||||
):
|
):
|
||||||
if directory in deduplicated:
|
if directory in deduplicated:
|
||||||
deduplicated.remove(directory)
|
deduplicated.remove(directory)
|
||||||
|
@ -98,16 +111,24 @@ def deduplicate_directories(directory_devices):
|
||||||
return tuple(sorted(deduplicated))
|
return tuple(sorted(deduplicated))
|
||||||
|
|
||||||
|
|
||||||
def write_pattern_file(patterns=None):
|
def write_pattern_file(patterns=None, sources=None, pattern_file=None):
|
||||||
'''
|
'''
|
||||||
Given a sequence of patterns, write them to a named temporary file and return it. Return None
|
Given a sequence of patterns and an optional sequence of source directories, write them to a
|
||||||
if no patterns are provided.
|
named temporary file (with the source directories as additional roots) and return the file.
|
||||||
|
If an optional open pattern file is given, overwrite it instead of making a new temporary file.
|
||||||
|
Return None if no patterns are provided.
|
||||||
'''
|
'''
|
||||||
if not patterns:
|
if not patterns and not sources:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
pattern_file = tempfile.NamedTemporaryFile('w')
|
if pattern_file is None:
|
||||||
pattern_file.write('\n'.join(patterns))
|
pattern_file = tempfile.NamedTemporaryFile('w')
|
||||||
|
else:
|
||||||
|
pattern_file.seek(0)
|
||||||
|
|
||||||
|
pattern_file.write(
|
||||||
|
'\n'.join(tuple(patterns or ()) + tuple(f'R {source}' for source in (sources or [])))
|
||||||
|
)
|
||||||
pattern_file.flush()
|
pattern_file.flush()
|
||||||
|
|
||||||
return pattern_file
|
return pattern_file
|
||||||
|
@ -175,15 +196,15 @@ def make_exclude_flags(location_config, exclude_filename=None):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_BORGMATIC_SOURCE_DIRECTORY = '~/.borgmatic'
|
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
|
||||||
|
|
||||||
|
|
||||||
def borgmatic_source_directories(borgmatic_source_directory):
|
def collect_borgmatic_source_directories(borgmatic_source_directory):
|
||||||
'''
|
'''
|
||||||
Return a list of borgmatic-specific source directories used for state like database backups.
|
Return a list of borgmatic-specific source directories used for state like database backups.
|
||||||
'''
|
'''
|
||||||
if not borgmatic_source_directory:
|
if not borgmatic_source_directory:
|
||||||
borgmatic_source_directory = DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
borgmatic_source_directory = state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||||
|
|
||||||
return (
|
return (
|
||||||
[borgmatic_source_directory]
|
[borgmatic_source_directory]
|
||||||
|
@ -192,7 +213,76 @@ def borgmatic_source_directories(borgmatic_source_directory):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
|
ROOT_PATTERN_PREFIX = 'R '
|
||||||
|
|
||||||
|
|
||||||
|
def pattern_root_directories(patterns=None):
|
||||||
|
'''
|
||||||
|
Given a sequence of patterns, parse out and return just the root directories.
|
||||||
|
'''
|
||||||
|
if not patterns:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return [
|
||||||
|
pattern.split(ROOT_PATTERN_PREFIX, maxsplit=1)[1]
|
||||||
|
for pattern in patterns
|
||||||
|
if pattern.startswith(ROOT_PATTERN_PREFIX)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def special_file(path):
|
||||||
|
'''
|
||||||
|
Return whether the given path is a special file (character device, block device, or named pipe
|
||||||
|
/ FIFO).
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
mode = os.stat(path).st_mode
|
||||||
|
except (FileNotFoundError, OSError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return stat.S_ISCHR(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode)
|
||||||
|
|
||||||
|
|
||||||
|
def any_parent_directories(path, candidate_parents):
|
||||||
|
'''
|
||||||
|
Return whether any of the given candidate parent directories are an actual parent of the given
|
||||||
|
path. This includes grandparents, etc.
|
||||||
|
'''
|
||||||
|
for parent in candidate_parents:
|
||||||
|
if pathlib.PurePosixPath(parent) in pathlib.PurePath(path).parents:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def collect_special_file_paths(
|
||||||
|
create_command, local_path, working_directory, borg_environment, skip_directories
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a Borg create command as a tuple, a local Borg path, a working directory, and a dict of
|
||||||
|
environment variables to pass to Borg, and a sequence of parent directories to skip, collect the
|
||||||
|
paths for any special files (character devices, block devices, and named pipes / FIFOs) that
|
||||||
|
Borg would encounter during a create. These are all paths that could cause Borg to hang if its
|
||||||
|
--read-special flag is used.
|
||||||
|
'''
|
||||||
|
paths_output = execute_command_and_capture_output(
|
||||||
|
create_command + ('--dry-run', '--list'),
|
||||||
|
capture_stderr=True,
|
||||||
|
working_directory=working_directory,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
|
||||||
|
paths = tuple(
|
||||||
|
path_line.split(' ', 1)[1]
|
||||||
|
for path_line in paths_output.split('\n')
|
||||||
|
if path_line and path_line.startswith('- ')
|
||||||
|
)
|
||||||
|
|
||||||
|
return tuple(
|
||||||
|
path
|
||||||
|
for path in paths
|
||||||
|
if special_file(path) and not any_parent_directories(path, skip_directories)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_archive(
|
def create_archive(
|
||||||
|
@ -206,7 +296,7 @@ def create_archive(
|
||||||
progress=False,
|
progress=False,
|
||||||
stats=False,
|
stats=False,
|
||||||
json=False,
|
json=False,
|
||||||
files=False,
|
list_files=False,
|
||||||
stream_processes=None,
|
stream_processes=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
|
@ -216,27 +306,40 @@ def create_archive(
|
||||||
If a sequence of stream processes is given (instances of subprocess.Popen), then execute the
|
If a sequence of stream processes is given (instances of subprocess.Popen), then execute the
|
||||||
create command while also triggering the given processes to produce output.
|
create command while also triggering the given processes to produce output.
|
||||||
'''
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
borgmatic_source_directories = expand_directories(
|
||||||
|
collect_borgmatic_source_directories(location_config.get('borgmatic_source_directory'))
|
||||||
|
)
|
||||||
sources = deduplicate_directories(
|
sources = deduplicate_directories(
|
||||||
map_directories_to_devices(
|
map_directories_to_devices(
|
||||||
expand_directories(
|
expand_directories(
|
||||||
location_config['source_directories']
|
tuple(location_config.get('source_directories', ())) + borgmatic_source_directories
|
||||||
+ borgmatic_source_directories(location_config.get('borgmatic_source_directory'))
|
|
||||||
)
|
)
|
||||||
)
|
),
|
||||||
|
additional_directory_devices=map_directories_to_devices(
|
||||||
|
expand_directories(pattern_root_directories(location_config.get('patterns')))
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from'))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
working_directory = os.path.expanduser(location_config.get('working_directory'))
|
working_directory = os.path.expanduser(location_config.get('working_directory'))
|
||||||
except TypeError:
|
except TypeError:
|
||||||
working_directory = None
|
working_directory = None
|
||||||
pattern_file = write_pattern_file(location_config.get('patterns'))
|
|
||||||
|
pattern_file = (
|
||||||
|
write_pattern_file(location_config.get('patterns'), sources)
|
||||||
|
if location_config.get('patterns') or location_config.get('patterns_from')
|
||||||
|
else None
|
||||||
|
)
|
||||||
exclude_file = write_pattern_file(
|
exclude_file = write_pattern_file(
|
||||||
expand_home_directories(location_config.get('exclude_patterns'))
|
expand_home_directories(location_config.get('exclude_patterns'))
|
||||||
)
|
)
|
||||||
checkpoint_interval = storage_config.get('checkpoint_interval', None)
|
checkpoint_interval = storage_config.get('checkpoint_interval', None)
|
||||||
chunker_params = storage_config.get('chunker_params', None)
|
chunker_params = storage_config.get('chunker_params', None)
|
||||||
compression = storage_config.get('compression', None)
|
compression = storage_config.get('compression', None)
|
||||||
remote_rate_limit = storage_config.get('remote_rate_limit', None)
|
upload_rate_limit = storage_config.get('upload_rate_limit', None)
|
||||||
umask = storage_config.get('umask', None)
|
umask = storage_config.get('umask', None)
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
files_cache = location_config.get('files_cache')
|
files_cache = location_config.get('files_cache')
|
||||||
|
@ -249,27 +352,30 @@ def create_archive(
|
||||||
atime_flags = ('--noatime',) if location_config.get('atime') is False else ()
|
atime_flags = ('--noatime',) if location_config.get('atime') is False else ()
|
||||||
|
|
||||||
if feature.available(feature.Feature.NOFLAGS, local_borg_version):
|
if feature.available(feature.Feature.NOFLAGS, local_borg_version):
|
||||||
noflags_flags = ('--noflags',) if location_config.get('bsd_flags') is False else ()
|
noflags_flags = ('--noflags',) if location_config.get('flags') is False else ()
|
||||||
else:
|
else:
|
||||||
noflags_flags = ('--nobsdflags',) if location_config.get('bsd_flags') is False else ()
|
noflags_flags = ('--nobsdflags',) if location_config.get('flags') is False else ()
|
||||||
|
|
||||||
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
|
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
|
||||||
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_owner') else ()
|
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else ()
|
||||||
else:
|
else:
|
||||||
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_owner') else ()
|
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else ()
|
||||||
|
|
||||||
if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version):
|
if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version):
|
||||||
upload_ratelimit_flags = (
|
upload_ratelimit_flags = (
|
||||||
('--upload-ratelimit', str(remote_rate_limit)) if remote_rate_limit else ()
|
('--upload-ratelimit', str(upload_rate_limit)) if upload_rate_limit else ()
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
upload_ratelimit_flags = (
|
upload_ratelimit_flags = (
|
||||||
('--remote-ratelimit', str(remote_rate_limit)) if remote_rate_limit else ()
|
('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else ()
|
||||||
)
|
)
|
||||||
|
|
||||||
ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from'))
|
if stream_processes and location_config.get('read_special') is False:
|
||||||
|
logger.warning(
|
||||||
|
f'{repository}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
|
||||||
|
)
|
||||||
|
|
||||||
full_command = (
|
create_command = (
|
||||||
tuple(local_path.split(' '))
|
tuple(local_path.split(' '))
|
||||||
+ ('create',)
|
+ ('create',)
|
||||||
+ make_pattern_flags(location_config, pattern_file.name if pattern_file else None)
|
+ make_pattern_flags(location_config, pattern_file.name if pattern_file else None)
|
||||||
|
@ -287,32 +393,23 @@ def create_archive(
|
||||||
+ atime_flags
|
+ atime_flags
|
||||||
+ (('--noctime',) if location_config.get('ctime') is False else ())
|
+ (('--noctime',) if location_config.get('ctime') is False else ())
|
||||||
+ (('--nobirthtime',) if location_config.get('birthtime') is False else ())
|
+ (('--nobirthtime',) if location_config.get('birthtime') is False else ())
|
||||||
+ (('--read-special',) if (location_config.get('read_special') or stream_processes) else ())
|
+ (('--read-special',) if location_config.get('read_special') or stream_processes else ())
|
||||||
+ noflags_flags
|
+ noflags_flags
|
||||||
+ (('--files-cache', files_cache) if files_cache else ())
|
+ (('--files-cache', files_cache) if files_cache else ())
|
||||||
+ (('--remote-path', remote_path) if remote_path else ())
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
+ (('--umask', str(umask)) if umask else ())
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
+ (('--list', '--filter', 'AME-') if files and not json and not progress else ())
|
+ (('--list', '--filter', 'AMEx-') if list_files and not json and not progress else ())
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ())
|
|
||||||
+ (('--stats',) if stats and not json and not dry_run else ())
|
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not json else ())
|
|
||||||
+ (('--dry-run',) if dry_run else ())
|
+ (('--dry-run',) if dry_run else ())
|
||||||
+ (('--progress',) if progress else ())
|
|
||||||
+ (('--json',) if json else ())
|
|
||||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
+ (
|
+ flags.make_repository_archive_flags(repository, archive_name_format, local_borg_version)
|
||||||
'{repository}::{archive_name_format}'.format(
|
+ (sources if not pattern_file else ())
|
||||||
repository=repository, archive_name_format=archive_name_format
|
|
||||||
),
|
|
||||||
)
|
|
||||||
+ sources
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if json:
|
if json:
|
||||||
output_log_level = None
|
output_log_level = None
|
||||||
elif (stats or files) and logger.getEffectiveLevel() == logging.WARNING:
|
elif list_files or (stats and not dry_run):
|
||||||
output_log_level = logging.WARNING
|
output_log_level = logging.ANSWER
|
||||||
else:
|
else:
|
||||||
output_log_level = logging.INFO
|
output_log_level = logging.INFO
|
||||||
|
|
||||||
|
@ -320,20 +417,61 @@ def create_archive(
|
||||||
# the terminal directly.
|
# the terminal directly.
|
||||||
output_file = DO_NOT_CAPTURE if progress else None
|
output_file = DO_NOT_CAPTURE if progress else None
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
# If database hooks are enabled (as indicated by streaming processes), exclude files that might
|
||||||
|
# cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True.
|
||||||
|
if stream_processes and not location_config.get('read_special'):
|
||||||
|
logger.debug(f'{repository}: Collecting special file paths')
|
||||||
|
special_file_paths = collect_special_file_paths(
|
||||||
|
create_command,
|
||||||
|
local_path,
|
||||||
|
working_directory,
|
||||||
|
borg_environment,
|
||||||
|
skip_directories=borgmatic_source_directories,
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
f'{repository}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
exclude_file = write_pattern_file(
|
||||||
|
expand_home_directories(
|
||||||
|
tuple(location_config.get('exclude_patterns') or ()) + special_file_paths
|
||||||
|
),
|
||||||
|
pattern_file=exclude_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
if exclude_file:
|
||||||
|
create_command += make_exclude_flags(location_config, exclude_file.name)
|
||||||
|
|
||||||
|
create_command += (
|
||||||
|
(('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ())
|
||||||
|
+ (('--stats',) if stats and not json and not dry_run else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not json else ())
|
||||||
|
+ (('--progress',) if progress else ())
|
||||||
|
+ (('--json',) if json else ())
|
||||||
|
)
|
||||||
|
|
||||||
if stream_processes:
|
if stream_processes:
|
||||||
return execute_command_with_processes(
|
return execute_command_with_processes(
|
||||||
full_command,
|
create_command,
|
||||||
stream_processes,
|
stream_processes,
|
||||||
output_log_level,
|
output_log_level,
|
||||||
output_file,
|
output_file,
|
||||||
borg_local_path=local_path,
|
borg_local_path=local_path,
|
||||||
working_directory=working_directory,
|
working_directory=working_directory,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
elif output_log_level is None:
|
||||||
|
return execute_command_and_capture_output(
|
||||||
|
create_command, working_directory=working_directory, extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
execute_command(
|
||||||
|
create_command,
|
||||||
|
output_log_level,
|
||||||
|
output_file,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
working_directory=working_directory,
|
||||||
|
extra_environment=borg_environment,
|
||||||
)
|
)
|
||||||
|
|
||||||
return execute_command(
|
|
||||||
full_command,
|
|
||||||
output_log_level,
|
|
||||||
output_file,
|
|
||||||
borg_local_path=local_path,
|
|
||||||
working_directory=working_directory,
|
|
||||||
)
|
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import os
|
|
||||||
|
|
||||||
OPTION_TO_ENVIRONMENT_VARIABLE = {
|
OPTION_TO_ENVIRONMENT_VARIABLE = {
|
||||||
'borg_base_directory': 'BORG_BASE_DIR',
|
'borg_base_directory': 'BORG_BASE_DIR',
|
||||||
'borg_config_directory': 'BORG_CONFIG_DIR',
|
'borg_config_directory': 'BORG_CONFIG_DIR',
|
||||||
|
@ -18,21 +16,24 @@ DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def initialize(storage_config):
|
def make_environment(storage_config):
|
||||||
for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items():
|
'''
|
||||||
|
Given a borgmatic storage configuration dict, return its options converted to a Borg environment
|
||||||
|
variable dict.
|
||||||
|
'''
|
||||||
|
environment = {}
|
||||||
|
|
||||||
# Options from borgmatic configuration take precedence over already set BORG_* environment
|
for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items():
|
||||||
# variables.
|
value = storage_config.get(option_name)
|
||||||
value = storage_config.get(option_name) or os.environ.get(environment_variable_name)
|
|
||||||
|
|
||||||
if value:
|
if value:
|
||||||
os.environ[environment_variable_name] = value
|
environment[environment_variable_name] = value
|
||||||
else:
|
|
||||||
os.environ.pop(environment_variable_name, None)
|
|
||||||
|
|
||||||
for (
|
for (
|
||||||
option_name,
|
option_name,
|
||||||
environment_variable_name,
|
environment_variable_name,
|
||||||
) in DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE.items():
|
) in DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE.items():
|
||||||
value = storage_config.get(option_name, False)
|
value = storage_config.get(option_name, False)
|
||||||
os.environ[environment_variable_name] = 'yes' if value else 'no'
|
environment[environment_variable_name] = 'yes' if value else 'no'
|
||||||
|
|
||||||
|
return environment
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, flags
|
||||||
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -13,21 +15,23 @@ def export_tar_archive(
|
||||||
paths,
|
paths,
|
||||||
destination_path,
|
destination_path,
|
||||||
storage_config,
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
local_path='borg',
|
local_path='borg',
|
||||||
remote_path=None,
|
remote_path=None,
|
||||||
tar_filter=None,
|
tar_filter=None,
|
||||||
files=False,
|
list_files=False,
|
||||||
strip_components=None,
|
strip_components=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
|
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
|
||||||
export from the archive, a destination path to export to, a storage configuration dict, optional
|
export from the archive, a destination path to export to, a storage configuration dict, the
|
||||||
local and remote Borg paths, an optional filter program, whether to include per-file details,
|
local Borg version, optional local and remote Borg paths, an optional filter program, whether to
|
||||||
and an optional number of path components to strip, export the archive into the given
|
include per-file details, and an optional number of path components to strip, export the archive
|
||||||
destination path as a tar-formatted file.
|
into the given destination path as a tar-formatted file.
|
||||||
|
|
||||||
If the destination path is "-", then stream the output to stdout instead of to a file.
|
If the destination path is "-", then stream the output to stdout instead of to a file.
|
||||||
'''
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
umask = storage_config.get('umask', None)
|
umask = storage_config.get('umask', None)
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
@ -37,18 +41,22 @@ def export_tar_archive(
|
||||||
+ (('--umask', str(umask)) if umask else ())
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
+ (('--list',) if files else ())
|
+ (('--list',) if list_files else ())
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
+ (('--dry-run',) if dry_run else ())
|
+ (('--dry-run',) if dry_run else ())
|
||||||
+ (('--tar-filter', tar_filter) if tar_filter else ())
|
+ (('--tar-filter', tar_filter) if tar_filter else ())
|
||||||
+ (('--strip-components', str(strip_components)) if strip_components else ())
|
+ (('--strip-components', str(strip_components)) if strip_components else ())
|
||||||
+ ('::'.join((repository if ':' in repository else os.path.abspath(repository), archive)),)
|
+ flags.make_repository_archive_flags(
|
||||||
|
repository if ':' in repository else os.path.abspath(repository),
|
||||||
|
archive,
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
+ (destination_path,)
|
+ (destination_path,)
|
||||||
+ (tuple(paths) if paths else ())
|
+ (tuple(paths) if paths else ())
|
||||||
)
|
)
|
||||||
|
|
||||||
if files and logger.getEffectiveLevel() == logging.WARNING:
|
if list_files:
|
||||||
output_log_level = logging.WARNING
|
output_log_level = logging.ANSWER
|
||||||
else:
|
else:
|
||||||
output_log_level = logging.INFO
|
output_log_level = logging.INFO
|
||||||
|
|
||||||
|
@ -61,4 +69,5 @@ def export_tar_archive(
|
||||||
output_file=DO_NOT_CAPTURE if destination_path == '-' else None,
|
output_file=DO_NOT_CAPTURE if destination_path == '-' else None,
|
||||||
output_log_level=output_log_level,
|
output_log_level=output_log_level,
|
||||||
borg_local_path=local_path,
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
)
|
)
|
||||||
|
|
|
@ -2,13 +2,20 @@ import logging
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from borgmatic.borg import feature
|
from borgmatic.borg import environment, feature, flags, rlist
|
||||||
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def extract_last_archive_dry_run(repository, lock_wait=None, local_path='borg', remote_path=None):
|
def extract_last_archive_dry_run(
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
repository,
|
||||||
|
lock_wait=None,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Perform an extraction dry-run of the most recent archive. If there are no archives, skip the
|
Perform an extraction dry-run of the most recent archive. If there are no archives, skip the
|
||||||
dry-run.
|
dry-run.
|
||||||
|
@ -21,38 +28,28 @@ def extract_last_archive_dry_run(repository, lock_wait=None, local_path='borg',
|
||||||
elif logger.isEnabledFor(logging.INFO):
|
elif logger.isEnabledFor(logging.INFO):
|
||||||
verbosity_flags = ('--info',)
|
verbosity_flags = ('--info',)
|
||||||
|
|
||||||
full_list_command = (
|
|
||||||
(local_path, 'list', '--short')
|
|
||||||
+ remote_path_flags
|
|
||||||
+ lock_wait_flags
|
|
||||||
+ verbosity_flags
|
|
||||||
+ (repository,)
|
|
||||||
)
|
|
||||||
|
|
||||||
list_output = execute_command(
|
|
||||||
full_list_command, output_log_level=None, borg_local_path=local_path
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
last_archive_name = list_output.strip().splitlines()[-1]
|
last_archive_name = rlist.resolve_archive_name(
|
||||||
except IndexError:
|
repository, 'latest', storage_config, local_borg_version, local_path, remote_path
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
logger.warning('No archives found. Skipping extract consistency check.')
|
||||||
return
|
return
|
||||||
|
|
||||||
list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else ()
|
list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else ()
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
full_extract_command = (
|
full_extract_command = (
|
||||||
(local_path, 'extract', '--dry-run')
|
(local_path, 'extract', '--dry-run')
|
||||||
+ remote_path_flags
|
+ remote_path_flags
|
||||||
+ lock_wait_flags
|
+ lock_wait_flags
|
||||||
+ verbosity_flags
|
+ verbosity_flags
|
||||||
+ list_flag
|
+ list_flag
|
||||||
+ (
|
+ flags.make_repository_archive_flags(repository, last_archive_name, local_borg_version)
|
||||||
'{repository}::{last_archive_name}'.format(
|
|
||||||
repository=repository, last_archive_name=last_archive_name
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
execute_command(full_extract_command, working_directory=None)
|
execute_command(
|
||||||
|
full_extract_command, working_directory=None, extra_environment=borg_environment
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def extract_archive(
|
def extract_archive(
|
||||||
|
@ -86,9 +83,9 @@ def extract_archive(
|
||||||
raise ValueError('progress and extract_to_stdout cannot both be set')
|
raise ValueError('progress and extract_to_stdout cannot both be set')
|
||||||
|
|
||||||
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
|
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
|
||||||
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_owner') else ()
|
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else ()
|
||||||
else:
|
else:
|
||||||
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_owner') else ()
|
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else ()
|
||||||
|
|
||||||
full_command = (
|
full_command = (
|
||||||
(local_path, 'extract')
|
(local_path, 'extract')
|
||||||
|
@ -102,15 +99,24 @@ def extract_archive(
|
||||||
+ (('--strip-components', str(strip_components)) if strip_components else ())
|
+ (('--strip-components', str(strip_components)) if strip_components else ())
|
||||||
+ (('--progress',) if progress else ())
|
+ (('--progress',) if progress else ())
|
||||||
+ (('--stdout',) if extract_to_stdout else ())
|
+ (('--stdout',) if extract_to_stdout else ())
|
||||||
+ ('::'.join((repository if ':' in repository else os.path.abspath(repository), archive)),)
|
+ flags.make_repository_archive_flags(
|
||||||
|
repository if ':' in repository else os.path.abspath(repository),
|
||||||
|
archive,
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
+ (tuple(paths) if paths else ())
|
+ (tuple(paths) if paths else ())
|
||||||
)
|
)
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
# The progress output isn't compatible with captured and logged output, as progress messes with
|
# The progress output isn't compatible with captured and logged output, as progress messes with
|
||||||
# the terminal directly.
|
# the terminal directly.
|
||||||
if progress:
|
if progress:
|
||||||
return execute_command(
|
return execute_command(
|
||||||
full_command, output_file=DO_NOT_CAPTURE, working_directory=destination_path
|
full_command,
|
||||||
|
output_file=DO_NOT_CAPTURE,
|
||||||
|
working_directory=destination_path,
|
||||||
|
extra_environment=borg_environment,
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -120,8 +126,11 @@ def extract_archive(
|
||||||
output_file=subprocess.PIPE,
|
output_file=subprocess.PIPE,
|
||||||
working_directory=destination_path,
|
working_directory=destination_path,
|
||||||
run_to_completion=False,
|
run_to_completion=False,
|
||||||
|
extra_environment=borg_environment,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Don't give Borg local path, so as to error on warnings, as Borg only gives a warning if the
|
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
|
||||||
# restore paths don't exist in the archive!
|
# if the restore paths don't exist in the archive.
|
||||||
execute_command(full_command, working_directory=destination_path)
|
execute_command(
|
||||||
|
full_command, working_directory=destination_path, extra_environment=borg_environment
|
||||||
|
)
|
||||||
|
|
|
@ -9,6 +9,11 @@ class Feature(Enum):
|
||||||
NOFLAGS = 3
|
NOFLAGS = 3
|
||||||
NUMERIC_IDS = 4
|
NUMERIC_IDS = 4
|
||||||
UPLOAD_RATELIMIT = 5
|
UPLOAD_RATELIMIT = 5
|
||||||
|
SEPARATE_REPOSITORY_ARCHIVE = 6
|
||||||
|
RCREATE = 7
|
||||||
|
RLIST = 8
|
||||||
|
RINFO = 9
|
||||||
|
MATCH_ARCHIVES = 10
|
||||||
|
|
||||||
|
|
||||||
FEATURE_TO_MINIMUM_BORG_VERSION = {
|
FEATURE_TO_MINIMUM_BORG_VERSION = {
|
||||||
|
@ -17,6 +22,11 @@ FEATURE_TO_MINIMUM_BORG_VERSION = {
|
||||||
Feature.NOFLAGS: parse_version('1.2.0a8'), # borg create --noflags
|
Feature.NOFLAGS: parse_version('1.2.0a8'), # borg create --noflags
|
||||||
Feature.NUMERIC_IDS: parse_version('1.2.0b3'), # borg create/extract/mount --numeric-ids
|
Feature.NUMERIC_IDS: parse_version('1.2.0b3'), # borg create/extract/mount --numeric-ids
|
||||||
Feature.UPLOAD_RATELIMIT: parse_version('1.2.0b3'), # borg create --upload-ratelimit
|
Feature.UPLOAD_RATELIMIT: parse_version('1.2.0b3'), # borg create --upload-ratelimit
|
||||||
|
Feature.SEPARATE_REPOSITORY_ARCHIVE: parse_version('2.0.0a2'), # --repo with separate archive
|
||||||
|
Feature.RCREATE: parse_version('2.0.0a2'), # borg rcreate
|
||||||
|
Feature.RLIST: parse_version('2.0.0a2'), # borg rlist
|
||||||
|
Feature.RINFO: parse_version('2.0.0a2'), # borg rinfo
|
||||||
|
Feature.MATCH_ARCHIVES: parse_version('2.0.0b3'), # borg --match-archives
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
|
from borgmatic.borg import feature
|
||||||
|
|
||||||
|
|
||||||
def make_flags(name, value):
|
def make_flags(name, value):
|
||||||
'''
|
'''
|
||||||
|
@ -29,3 +31,28 @@ def make_flags_from_arguments(arguments, excludes=()):
|
||||||
if name not in excludes and not name.startswith('_')
|
if name not in excludes and not name.startswith('_')
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_repository_flags(repository, local_borg_version):
|
||||||
|
'''
|
||||||
|
Given the path of a Borg repository and the local Borg version, return Borg-version-appropriate
|
||||||
|
command-line flags (as a tuple) for selecting that repository.
|
||||||
|
'''
|
||||||
|
return (
|
||||||
|
('--repo',)
|
||||||
|
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
||||||
|
else ()
|
||||||
|
) + (repository,)
|
||||||
|
|
||||||
|
|
||||||
|
def make_repository_archive_flags(repository, archive, local_borg_version):
|
||||||
|
'''
|
||||||
|
Given the path of a Borg repository, an archive name or pattern, and the local Borg version,
|
||||||
|
return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository
|
||||||
|
and archive.
|
||||||
|
'''
|
||||||
|
return (
|
||||||
|
('--repo', repository, archive)
|
||||||
|
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
||||||
|
else (f'{repository}::{archive}',)
|
||||||
|
)
|
||||||
|
|
|
@ -1,19 +1,26 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from borgmatic.borg.flags import make_flags, make_flags_from_arguments
|
import borgmatic.logger
|
||||||
from borgmatic.execute import execute_command
|
from borgmatic.borg import environment, feature, flags
|
||||||
|
from borgmatic.execute import execute_command, execute_command_and_capture_output
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def display_archives_info(
|
def display_archives_info(
|
||||||
repository, storage_config, info_arguments, local_path='borg', remote_path=None
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
info_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given a local or remote repository path, a storage config dict, and the arguments to the info
|
Given a local or remote repository path, a storage config dict, the local Borg version, and the
|
||||||
action, display summary information for Borg archives in the repository or return JSON summary
|
arguments to the info action, display summary information for Borg archives in the repository or
|
||||||
information.
|
return JSON summary information.
|
||||||
'''
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
full_command = (
|
full_command = (
|
||||||
|
@ -28,18 +35,36 @@ def display_archives_info(
|
||||||
if logger.isEnabledFor(logging.DEBUG) and not info_arguments.json
|
if logger.isEnabledFor(logging.DEBUG) and not info_arguments.json
|
||||||
else ()
|
else ()
|
||||||
)
|
)
|
||||||
+ make_flags('remote-path', remote_path)
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
+ make_flags('lock-wait', lock_wait)
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
+ make_flags_from_arguments(info_arguments, excludes=('repository', 'archive'))
|
|
||||||
+ (
|
+ (
|
||||||
'::'.join((repository, info_arguments.archive))
|
(
|
||||||
if info_arguments.archive
|
flags.make_flags('match-archives', f'sh:{info_arguments.prefix}*')
|
||||||
else repository,
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
else flags.make_flags('glob-archives', f'{info_arguments.prefix}*')
|
||||||
|
)
|
||||||
|
if info_arguments.prefix
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ flags.make_flags_from_arguments(
|
||||||
|
info_arguments, excludes=('repository', 'archive', 'prefix')
|
||||||
|
)
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
+ (
|
||||||
|
flags.make_flags('match-archives', info_arguments.archive)
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
else flags.make_flags('glob-archives', info_arguments.archive)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return execute_command(
|
if info_arguments.json:
|
||||||
full_command,
|
return execute_command_and_capture_output(
|
||||||
output_log_level=None if info_arguments.json else logging.WARNING,
|
full_command, extra_environment=environment.make_environment(storage_config),
|
||||||
borg_local_path=local_path,
|
)
|
||||||
)
|
else:
|
||||||
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
||||||
|
|
|
@ -1,58 +0,0 @@
|
||||||
import logging
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
INFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2
|
|
||||||
|
|
||||||
|
|
||||||
def initialize_repository(
|
|
||||||
repository,
|
|
||||||
storage_config,
|
|
||||||
encryption_mode,
|
|
||||||
append_only=None,
|
|
||||||
storage_quota=None,
|
|
||||||
local_path='borg',
|
|
||||||
remote_path=None,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Given a local or remote repository path, a storage configuration dict, a Borg encryption mode,
|
|
||||||
whether the repository should be append-only, and the storage quota to use, initialize the
|
|
||||||
repository. If the repository already exists, then log and skip initialization.
|
|
||||||
'''
|
|
||||||
info_command = (
|
|
||||||
(local_path, 'info')
|
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
|
||||||
+ (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
|
|
||||||
+ (('--remote-path', remote_path) if remote_path else ())
|
|
||||||
+ (repository,)
|
|
||||||
)
|
|
||||||
logger.debug(' '.join(info_command))
|
|
||||||
|
|
||||||
try:
|
|
||||||
execute_command(info_command, output_log_level=None)
|
|
||||||
logger.info('Repository already exists. Skipping initialization.')
|
|
||||||
return
|
|
||||||
except subprocess.CalledProcessError as error:
|
|
||||||
if error.returncode != INFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
|
|
||||||
raise
|
|
||||||
|
|
||||||
extra_borg_options = storage_config.get('extra_borg_options', {}).get('init', '')
|
|
||||||
|
|
||||||
init_command = (
|
|
||||||
(local_path, 'init')
|
|
||||||
+ (('--encryption', encryption_mode) if encryption_mode else ())
|
|
||||||
+ (('--append-only',) if append_only else ())
|
|
||||||
+ (('--storage-quota', storage_quota) if storage_quota else ())
|
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
|
||||||
+ (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
|
|
||||||
+ (('--remote-path', remote_path) if remote_path else ())
|
|
||||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
|
||||||
+ (repository,)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Do not capture output here, so as to support interactive prompts.
|
|
||||||
execute_command(init_command, output_file=DO_NOT_CAPTURE, borg_local_path=local_path)
|
|
|
@ -1,63 +1,41 @@
|
||||||
|
import argparse
|
||||||
|
import copy
|
||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
from borgmatic.borg.flags import make_flags, make_flags_from_arguments
|
import borgmatic.logger
|
||||||
from borgmatic.execute import execute_command
|
from borgmatic.borg import environment, feature, flags, rlist
|
||||||
|
from borgmatic.execute import execute_command, execute_command_and_capture_output
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# A hack to convince Borg to exclude archives ending in ".checkpoint". This assumes that a
|
ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST = ('prefix', 'match_archives', 'sort_by', 'first', 'last')
|
||||||
# non-checkpoint archive name ends in a digit (e.g. from a timestamp).
|
MAKE_FLAGS_EXCLUDES = (
|
||||||
BORG_EXCLUDE_CHECKPOINTS_GLOB = '*[0123456789]'
|
'repository',
|
||||||
|
'archive',
|
||||||
|
'successful',
|
||||||
|
'paths',
|
||||||
|
'find_paths',
|
||||||
|
) + ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST
|
||||||
|
|
||||||
|
|
||||||
def resolve_archive_name(repository, archive, storage_config, local_path='borg', remote_path=None):
|
def make_list_command(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
list_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Given a local or remote repository path, an archive name, a storage config dict, a local Borg
|
Given a local or remote repository path, a storage config dict, the arguments to the list
|
||||||
path, and a remote Borg path, simply return the archive name. But if the archive name is
|
action, and local and remote Borg paths, return a command as a tuple to list archives or paths
|
||||||
"latest", then instead introspect the repository for the latest successful (non-checkpoint)
|
within an archive.
|
||||||
archive, and return its name.
|
|
||||||
|
|
||||||
Raise ValueError if "latest" is given but there are no archives in the repository.
|
|
||||||
'''
|
|
||||||
if archive != "latest":
|
|
||||||
return archive
|
|
||||||
|
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
|
||||||
|
|
||||||
full_command = (
|
|
||||||
(local_path, 'list')
|
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
|
||||||
+ make_flags('remote-path', remote_path)
|
|
||||||
+ make_flags('lock-wait', lock_wait)
|
|
||||||
+ make_flags('glob-archives', BORG_EXCLUDE_CHECKPOINTS_GLOB)
|
|
||||||
+ make_flags('last', 1)
|
|
||||||
+ ('--short', repository)
|
|
||||||
)
|
|
||||||
|
|
||||||
output = execute_command(full_command, output_log_level=None, borg_local_path=local_path)
|
|
||||||
try:
|
|
||||||
latest_archive = output.strip().splitlines()[-1]
|
|
||||||
except IndexError:
|
|
||||||
raise ValueError('No archives found in the repository')
|
|
||||||
|
|
||||||
logger.debug('{}: Latest archive is {}'.format(repository, latest_archive))
|
|
||||||
|
|
||||||
return latest_archive
|
|
||||||
|
|
||||||
|
|
||||||
def list_archives(repository, storage_config, list_arguments, local_path='borg', remote_path=None):
|
|
||||||
'''
|
|
||||||
Given a local or remote repository path, a storage config dict, and the arguments to the list
|
|
||||||
action, display the output of listing Borg archives in the repository or return JSON output. Or,
|
|
||||||
if an archive name is given, listing the files in that archive.
|
|
||||||
'''
|
'''
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
if list_arguments.successful:
|
|
||||||
list_arguments.glob_archives = BORG_EXCLUDE_CHECKPOINTS_GLOB
|
|
||||||
|
|
||||||
full_command = (
|
return (
|
||||||
(local_path, 'list')
|
(local_path, 'list')
|
||||||
+ (
|
+ (
|
||||||
('--info',)
|
('--info',)
|
||||||
|
@ -69,21 +47,154 @@ def list_archives(repository, storage_config, list_arguments, local_path='borg',
|
||||||
if logger.isEnabledFor(logging.DEBUG) and not list_arguments.json
|
if logger.isEnabledFor(logging.DEBUG) and not list_arguments.json
|
||||||
else ()
|
else ()
|
||||||
)
|
)
|
||||||
+ make_flags('remote-path', remote_path)
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
+ make_flags('lock-wait', lock_wait)
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
+ make_flags_from_arguments(
|
+ flags.make_flags_from_arguments(list_arguments, excludes=MAKE_FLAGS_EXCLUDES)
|
||||||
list_arguments, excludes=('repository', 'archive', 'paths', 'successful')
|
|
||||||
)
|
|
||||||
+ (
|
+ (
|
||||||
'::'.join((repository, list_arguments.archive))
|
flags.make_repository_archive_flags(
|
||||||
|
repository, list_arguments.archive, local_borg_version
|
||||||
|
)
|
||||||
if list_arguments.archive
|
if list_arguments.archive
|
||||||
else repository,
|
else flags.make_repository_flags(repository, local_borg_version)
|
||||||
)
|
)
|
||||||
+ (tuple(list_arguments.paths) if list_arguments.paths else ())
|
+ (tuple(list_arguments.paths) if list_arguments.paths else ())
|
||||||
)
|
)
|
||||||
|
|
||||||
return execute_command(
|
|
||||||
full_command,
|
def make_find_paths(find_paths):
|
||||||
output_log_level=None if list_arguments.json else logging.WARNING,
|
'''
|
||||||
borg_local_path=local_path,
|
Given a sequence of path fragments or patterns as passed to `--find`, transform all path
|
||||||
|
fragments into glob patterns. Pass through existing patterns untouched.
|
||||||
|
|
||||||
|
For example, given find_paths of:
|
||||||
|
|
||||||
|
['foo.txt', 'pp:root/somedir']
|
||||||
|
|
||||||
|
... transform that into:
|
||||||
|
|
||||||
|
['sh:**/*foo.txt*/**', 'pp:root/somedir']
|
||||||
|
'''
|
||||||
|
if not find_paths:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
return tuple(
|
||||||
|
find_path
|
||||||
|
if re.compile(r'([-!+RrPp] )|(\w\w:)').match(find_path)
|
||||||
|
else f'sh:**/*{find_path}*/**'
|
||||||
|
for find_path in find_paths
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def list_archive(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
list_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the local Borg version, the
|
||||||
|
arguments to the list action, and local and remote Borg paths, display the output of listing
|
||||||
|
the files of a Borg archive (or return JSON output). If list_arguments.find_paths are given,
|
||||||
|
list the files by searching across multiple archives. If neither find_paths nor archive name
|
||||||
|
are given, instead list the archives in the given repository.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
|
||||||
|
if not list_arguments.archive and not list_arguments.find_paths:
|
||||||
|
if feature.available(feature.Feature.RLIST, local_borg_version):
|
||||||
|
logger.warning(
|
||||||
|
'Omitting the --archive flag on the list action is deprecated when using Borg 2.x+. Use the rlist action instead.'
|
||||||
|
)
|
||||||
|
|
||||||
|
rlist_arguments = argparse.Namespace(
|
||||||
|
repository=repository,
|
||||||
|
short=list_arguments.short,
|
||||||
|
format=list_arguments.format,
|
||||||
|
json=list_arguments.json,
|
||||||
|
prefix=list_arguments.prefix,
|
||||||
|
match_archives=list_arguments.match_archives,
|
||||||
|
sort_by=list_arguments.sort_by,
|
||||||
|
first=list_arguments.first,
|
||||||
|
last=list_arguments.last,
|
||||||
|
)
|
||||||
|
return rlist.list_repository(
|
||||||
|
repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path
|
||||||
|
)
|
||||||
|
|
||||||
|
if list_arguments.archive:
|
||||||
|
for name in ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST:
|
||||||
|
if getattr(list_arguments, name, None):
|
||||||
|
logger.warning(
|
||||||
|
f"The --{name.replace('_', '-')} flag on the list action is ignored when using the --archive flag."
|
||||||
|
)
|
||||||
|
|
||||||
|
if list_arguments.json:
|
||||||
|
raise ValueError(
|
||||||
|
'The --json flag on the list action is not supported when using the --archive/--find flags.'
|
||||||
|
)
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
# If there are any paths to find (and there's not a single archive already selected), start by
|
||||||
|
# getting a list of archives to search.
|
||||||
|
if list_arguments.find_paths and not list_arguments.archive:
|
||||||
|
rlist_arguments = argparse.Namespace(
|
||||||
|
repository=repository,
|
||||||
|
short=True,
|
||||||
|
format=None,
|
||||||
|
json=None,
|
||||||
|
prefix=list_arguments.prefix,
|
||||||
|
match_archives=list_arguments.match_archives,
|
||||||
|
sort_by=list_arguments.sort_by,
|
||||||
|
first=list_arguments.first,
|
||||||
|
last=list_arguments.last,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ask Borg to list archives. Capture its output for use below.
|
||||||
|
archive_lines = tuple(
|
||||||
|
execute_command_and_capture_output(
|
||||||
|
rlist.make_rlist_command(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
),
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
.strip('\n')
|
||||||
|
.split('\n')
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
archive_lines = (list_arguments.archive,)
|
||||||
|
|
||||||
|
# For each archive listed by Borg, run list on the contents of that archive.
|
||||||
|
for archive in archive_lines:
|
||||||
|
logger.answer(f'{repository}: Listing archive {archive}')
|
||||||
|
|
||||||
|
archive_arguments = copy.copy(list_arguments)
|
||||||
|
archive_arguments.archive = archive
|
||||||
|
|
||||||
|
# This list call is to show the files in a single archive, not list multiple archives. So
|
||||||
|
# blank out any archive filtering flags. They'll break anyway in Borg 2.
|
||||||
|
for name in ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST:
|
||||||
|
setattr(archive_arguments, name, None)
|
||||||
|
|
||||||
|
main_command = make_list_command(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
archive_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
) + make_find_paths(list_arguments.find_paths)
|
||||||
|
|
||||||
|
execute_command(
|
||||||
|
main_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from borgmatic.borg import environment, feature, flags
|
||||||
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -13,13 +14,15 @@ def mount_archive(
|
||||||
foreground,
|
foreground,
|
||||||
options,
|
options,
|
||||||
storage_config,
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
local_path='borg',
|
local_path='borg',
|
||||||
remote_path=None,
|
remote_path=None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given a local or remote repository path, an optional archive name, a filesystem mount point,
|
Given a local or remote repository path, an optional archive name, a filesystem mount point,
|
||||||
zero or more paths to mount from the archive, extra Borg mount options, a storage configuration
|
zero or more paths to mount from the archive, extra Borg mount options, a storage configuration
|
||||||
dict, and optional local and remote Borg paths, mount the archive onto the mount point.
|
dict, the local Borg version, and optional local and remote Borg paths, mount the archive onto
|
||||||
|
the mount point.
|
||||||
'''
|
'''
|
||||||
umask = storage_config.get('umask', None)
|
umask = storage_config.get('umask', None)
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
@ -33,14 +36,36 @@ def mount_archive(
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
+ (('--foreground',) if foreground else ())
|
+ (('--foreground',) if foreground else ())
|
||||||
+ (('-o', options) if options else ())
|
+ (('-o', options) if options else ())
|
||||||
+ (('::'.join((repository, archive)),) if archive else (repository,))
|
+ (
|
||||||
|
(
|
||||||
|
flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
+ (
|
||||||
|
('--match-archives', archive)
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
else ('--glob-archives', archive)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
||||||
|
else (
|
||||||
|
flags.make_repository_archive_flags(repository, archive, local_borg_version)
|
||||||
|
if archive
|
||||||
|
else flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
)
|
||||||
+ (mount_point,)
|
+ (mount_point,)
|
||||||
+ (tuple(paths) if paths else ())
|
+ (tuple(paths) if paths else ())
|
||||||
)
|
)
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
# Don't capture the output when foreground mode is used so that ctrl-C can work properly.
|
# Don't capture the output when foreground mode is used so that ctrl-C can work properly.
|
||||||
if foreground:
|
if foreground:
|
||||||
execute_command(full_command, output_file=DO_NOT_CAPTURE, borg_local_path=local_path)
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_file=DO_NOT_CAPTURE,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
execute_command(full_command, borg_local_path=local_path)
|
execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment)
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, feature, flags
|
||||||
from borgmatic.execute import execute_command
|
from borgmatic.execute import execute_command
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _make_prune_flags(retention_config):
|
def make_prune_flags(retention_config, local_borg_version):
|
||||||
'''
|
'''
|
||||||
Given a retention config dict mapping from option name to value, tranform it into an iterable of
|
Given a retention config dict mapping from option name to value, tranform it into an iterable of
|
||||||
command-line name-value flag pairs.
|
command-line name-value flag pairs.
|
||||||
|
@ -22,11 +24,13 @@ def _make_prune_flags(retention_config):
|
||||||
)
|
)
|
||||||
'''
|
'''
|
||||||
config = retention_config.copy()
|
config = retention_config.copy()
|
||||||
|
prefix = config.pop('prefix', '{hostname}-')
|
||||||
|
|
||||||
if 'prefix' not in config:
|
if prefix:
|
||||||
config['prefix'] = '{hostname}-'
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
|
||||||
elif not config['prefix']:
|
config['match_archives'] = f'sh:{prefix}*'
|
||||||
config.pop('prefix')
|
else:
|
||||||
|
config['glob_archives'] = f'{prefix}*'
|
||||||
|
|
||||||
return (
|
return (
|
||||||
('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items()
|
('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items()
|
||||||
|
@ -38,38 +42,49 @@ def prune_archives(
|
||||||
repository,
|
repository,
|
||||||
storage_config,
|
storage_config,
|
||||||
retention_config,
|
retention_config,
|
||||||
|
local_borg_version,
|
||||||
local_path='borg',
|
local_path='borg',
|
||||||
remote_path=None,
|
remote_path=None,
|
||||||
stats=False,
|
stats=False,
|
||||||
files=False,
|
list_archives=False,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Given dry-run flag, a local or remote repository path, a storage config dict, and a
|
Given dry-run flag, a local or remote repository path, a storage config dict, and a
|
||||||
retention config dict, prune Borg archives according to the retention policy specified in that
|
retention config dict, prune Borg archives according to the retention policy specified in that
|
||||||
configuration.
|
configuration.
|
||||||
'''
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
umask = storage_config.get('umask', None)
|
umask = storage_config.get('umask', None)
|
||||||
lock_wait = storage_config.get('lock_wait', None)
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '')
|
extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '')
|
||||||
|
|
||||||
full_command = (
|
full_command = (
|
||||||
(local_path, 'prune')
|
(local_path, 'prune')
|
||||||
+ tuple(element for pair in _make_prune_flags(retention_config) for element in pair)
|
+ tuple(
|
||||||
|
element
|
||||||
|
for pair in make_prune_flags(retention_config, local_borg_version)
|
||||||
|
for element in pair
|
||||||
|
)
|
||||||
+ (('--remote-path', remote_path) if remote_path else ())
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
+ (('--umask', str(umask)) if umask else ())
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
+ (('--stats',) if stats and not dry_run else ())
|
+ (('--stats',) if stats and not dry_run else ())
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
+ (('--list',) if files else ())
|
+ (('--list',) if list_archives else ())
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
+ (('--dry-run',) if dry_run else ())
|
+ (('--dry-run',) if dry_run else ())
|
||||||
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
+ (repository,)
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
)
|
)
|
||||||
|
|
||||||
if (stats or files) and logger.getEffectiveLevel() == logging.WARNING:
|
if stats or list_archives:
|
||||||
output_log_level = logging.WARNING
|
output_log_level = logging.ANSWER
|
||||||
else:
|
else:
|
||||||
output_log_level = logging.INFO
|
output_log_level = logging.INFO
|
||||||
|
|
||||||
execute_command(full_command, output_log_level=output_log_level, borg_local_path=local_path)
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=output_log_level,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from borgmatic.borg import environment, feature, flags, rinfo
|
||||||
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2
|
||||||
|
|
||||||
|
|
||||||
|
def create_repository(
|
||||||
|
dry_run,
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
encryption_mode,
|
||||||
|
source_repository=None,
|
||||||
|
copy_crypt_key=False,
|
||||||
|
append_only=None,
|
||||||
|
storage_quota=None,
|
||||||
|
make_parent_dirs=False,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a dry-run flag, a local or remote repository path, a storage configuration dict, the local
|
||||||
|
Borg version, a Borg encryption mode, the path to another repo whose key material should be
|
||||||
|
reused, whether the repository should be append-only, and the storage quota to use, create the
|
||||||
|
repository. If the repository already exists, then log and skip creation.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
rinfo.display_repository_info(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
argparse.Namespace(json=True),
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
logger.info(f'{repository}: Repository already exists. Skipping creation.')
|
||||||
|
return
|
||||||
|
except subprocess.CalledProcessError as error:
|
||||||
|
if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
|
||||||
|
raise
|
||||||
|
|
||||||
|
extra_borg_options = storage_config.get('extra_borg_options', {}).get('rcreate', '')
|
||||||
|
|
||||||
|
rcreate_command = (
|
||||||
|
(local_path,)
|
||||||
|
+ (
|
||||||
|
('rcreate',)
|
||||||
|
if feature.available(feature.Feature.RCREATE, local_borg_version)
|
||||||
|
else ('init',)
|
||||||
|
)
|
||||||
|
+ (('--encryption', encryption_mode) if encryption_mode else ())
|
||||||
|
+ (('--other-repo', source_repository) if source_repository else ())
|
||||||
|
+ (('--copy-crypt-key',) if copy_crypt_key else ())
|
||||||
|
+ (('--append-only',) if append_only else ())
|
||||||
|
+ (('--storage-quota', storage_quota) if storage_quota else ())
|
||||||
|
+ (('--make-parent-dirs',) if make_parent_dirs else ())
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
logging.info(f'{repository}: Skipping repository creation (dry run)')
|
||||||
|
return
|
||||||
|
|
||||||
|
# Do not capture output here, so as to support interactive prompts.
|
||||||
|
execute_command(
|
||||||
|
rcreate_command,
|
||||||
|
output_file=DO_NOT_CAPTURE,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
|
@ -0,0 +1,61 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, feature, flags
|
||||||
|
from borgmatic.execute import execute_command, execute_command_and_capture_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def display_repository_info(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
rinfo_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the local Borg version, and the
|
||||||
|
arguments to the rinfo action, display summary information for the Borg repository or return
|
||||||
|
JSON summary information.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path,)
|
||||||
|
+ (
|
||||||
|
('rinfo',)
|
||||||
|
if feature.available(feature.Feature.RINFO, local_borg_version)
|
||||||
|
else ('info',)
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
('--info',)
|
||||||
|
if logger.getEffectiveLevel() == logging.INFO and not rinfo_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
('--debug', '--show-rc')
|
||||||
|
if logger.isEnabledFor(logging.DEBUG) and not rinfo_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
|
+ (('--json',) if rinfo_arguments.json else ())
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
extra_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
if rinfo_arguments.json:
|
||||||
|
return execute_command_and_capture_output(
|
||||||
|
full_command, extra_environment=extra_environment,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=extra_environment,
|
||||||
|
)
|
|
@ -0,0 +1,127 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, feature, flags
|
||||||
|
from borgmatic.execute import execute_command, execute_command_and_capture_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_archive_name(
|
||||||
|
repository, archive, storage_config, local_borg_version, local_path='borg', remote_path=None
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, an archive name, a storage config dict, a local Borg
|
||||||
|
path, and a remote Borg path, simply return the archive name. But if the archive name is
|
||||||
|
"latest", then instead introspect the repository for the latest archive and return its name.
|
||||||
|
|
||||||
|
Raise ValueError if "latest" is given but there are no archives in the repository.
|
||||||
|
'''
|
||||||
|
if archive != "latest":
|
||||||
|
return archive
|
||||||
|
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(
|
||||||
|
local_path,
|
||||||
|
'rlist' if feature.available(feature.Feature.RLIST, local_borg_version) else 'list',
|
||||||
|
)
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
|
+ flags.make_flags('last', 1)
|
||||||
|
+ ('--short',)
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
output = execute_command_and_capture_output(
|
||||||
|
full_command, extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
latest_archive = output.strip().splitlines()[-1]
|
||||||
|
except IndexError:
|
||||||
|
raise ValueError('No archives found in the repository')
|
||||||
|
|
||||||
|
logger.debug('{}: Latest archive is {}'.format(repository, latest_archive))
|
||||||
|
|
||||||
|
return latest_archive
|
||||||
|
|
||||||
|
|
||||||
|
MAKE_FLAGS_EXCLUDES = ('repository', 'prefix')
|
||||||
|
|
||||||
|
|
||||||
|
def make_rlist_command(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the local Borg version, the
|
||||||
|
arguments to the rlist action, and local and remote Borg paths, return a command as a tuple to
|
||||||
|
list archives with a repository.
|
||||||
|
'''
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
return (
|
||||||
|
(
|
||||||
|
local_path,
|
||||||
|
'rlist' if feature.available(feature.Feature.RLIST, local_borg_version) else 'list',
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
('--info',)
|
||||||
|
if logger.getEffectiveLevel() == logging.INFO and not rlist_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
('--debug', '--show-rc')
|
||||||
|
if logger.isEnabledFor(logging.DEBUG) and not rlist_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
|
+ (
|
||||||
|
(
|
||||||
|
flags.make_flags('match-archives', f'sh:{rlist_arguments.prefix}*')
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
else flags.make_flags('glob-archives', f'{rlist_arguments.prefix}*')
|
||||||
|
)
|
||||||
|
if rlist_arguments.prefix
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ flags.make_flags_from_arguments(rlist_arguments, excludes=MAKE_FLAGS_EXCLUDES)
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def list_repository(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the local Borg version, the
|
||||||
|
arguments to the list action, and local and remote Borg paths, display the output of listing
|
||||||
|
Borg archives in the given repository (or return JSON output).
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
main_command = make_rlist_command(
|
||||||
|
repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path
|
||||||
|
)
|
||||||
|
|
||||||
|
if rlist_arguments.json:
|
||||||
|
return execute_command_and_capture_output(main_command, extra_environment=borg_environment,)
|
||||||
|
else:
|
||||||
|
execute_command(
|
||||||
|
main_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
|
@ -0,0 +1 @@
|
||||||
|
DEFAULT_BORGMATIC_SOURCE_DIRECTORY = '~/.borgmatic'
|
|
@ -0,0 +1,50 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, flags
|
||||||
|
from borgmatic.execute import execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def transfer_archives(
|
||||||
|
dry_run,
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
transfer_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a dry-run flag, a local or remote repository path, a storage config dict, the local Borg
|
||||||
|
version, and the arguments to the transfer action, transfer archives to the given repository.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'transfer')
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', storage_config.get('lock_wait', None))
|
||||||
|
+ (
|
||||||
|
flags.make_flags(
|
||||||
|
'match-archives', transfer_arguments.match_archives or transfer_arguments.archive
|
||||||
|
)
|
||||||
|
)
|
||||||
|
+ flags.make_flags_from_arguments(
|
||||||
|
transfer_arguments,
|
||||||
|
excludes=('repository', 'source_repository', 'archive', 'match_archives'),
|
||||||
|
)
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
+ flags.make_flags('other-repo', transfer_arguments.source_repository)
|
||||||
|
+ flags.make_flags('dry-run', dry_run)
|
||||||
|
)
|
||||||
|
|
||||||
|
return execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
|
@ -1,13 +1,14 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from borgmatic.execute import execute_command
|
from borgmatic.borg import environment
|
||||||
|
from borgmatic.execute import execute_command_and_capture_output
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def local_borg_version(local_path='borg'):
|
def local_borg_version(storage_config, local_path='borg'):
|
||||||
'''
|
'''
|
||||||
Given a local Borg binary path, return a version string for it.
|
Given a storage configuration dict and a local Borg binary path, return a version string for it.
|
||||||
|
|
||||||
Raise OSError or CalledProcessError if there is a problem running Borg.
|
Raise OSError or CalledProcessError if there is a problem running Borg.
|
||||||
Raise ValueError if the version cannot be parsed.
|
Raise ValueError if the version cannot be parsed.
|
||||||
|
@ -17,7 +18,9 @@ def local_borg_version(local_path='borg'):
|
||||||
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
)
|
)
|
||||||
output = execute_command(full_command, output_log_level=None, borg_local_path=local_path)
|
output = execute_command_and_capture_output(
|
||||||
|
full_command, extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return output.split(' ')[1].strip()
|
return output.split(' ')[1].strip()
|
||||||
|
|
|
@ -4,18 +4,22 @@ from argparse import Action, ArgumentParser
|
||||||
from borgmatic.config import collect
|
from borgmatic.config import collect
|
||||||
|
|
||||||
SUBPARSER_ALIASES = {
|
SUBPARSER_ALIASES = {
|
||||||
'init': ['--init', '-I'],
|
'rcreate': ['init', '-I'],
|
||||||
'prune': ['--prune', '-p'],
|
'prune': ['-p'],
|
||||||
'compact': [],
|
'compact': [],
|
||||||
'create': ['--create', '-C'],
|
'create': ['-C'],
|
||||||
'check': ['--check', '-k'],
|
'check': ['-k'],
|
||||||
'extract': ['--extract', '-x'],
|
'extract': ['-x'],
|
||||||
'export-tar': ['--export-tar'],
|
'export-tar': [],
|
||||||
'mount': ['--mount', '-m'],
|
'mount': ['-m'],
|
||||||
'umount': ['--umount', '-u'],
|
'umount': ['-u'],
|
||||||
'restore': ['--restore', '-r'],
|
'restore': ['-r'],
|
||||||
'list': ['--list', '-l'],
|
'rlist': [],
|
||||||
'info': ['--info', '-i'],
|
'list': ['-l'],
|
||||||
|
'rinfo': [],
|
||||||
|
'info': ['-i'],
|
||||||
|
'transfer': [],
|
||||||
|
'break-lock': [],
|
||||||
'borg': [],
|
'borg': [],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -109,10 +113,9 @@ class Extend_action(Action):
|
||||||
setattr(namespace, self.dest, list(values))
|
setattr(namespace, self.dest, list(values))
|
||||||
|
|
||||||
|
|
||||||
def parse_arguments(*unparsed_arguments):
|
def make_parsers():
|
||||||
'''
|
'''
|
||||||
Given command-line arguments with which this script was invoked, parse the arguments and return
|
Build a top-level parser and its subparsers and return them as a tuple.
|
||||||
them as a dict mapping from subparser name (or "global") to an argparse.Namespace instance.
|
|
||||||
'''
|
'''
|
||||||
config_paths = collect.get_default_config_paths(expand_home=True)
|
config_paths = collect.get_default_config_paths(expand_home=True)
|
||||||
unexpanded_config_paths = collect.get_default_config_paths(expand_home=False)
|
unexpanded_config_paths = collect.get_default_config_paths(expand_home=False)
|
||||||
|
@ -189,6 +192,18 @@ def parse_arguments(*unparsed_arguments):
|
||||||
action='extend',
|
action='extend',
|
||||||
help='One or more configuration file options to override with specified values',
|
help='One or more configuration file options to override with specified values',
|
||||||
)
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--no-environment-interpolation',
|
||||||
|
dest='resolve_env',
|
||||||
|
action='store_false',
|
||||||
|
help='Do not resolve environment variables in configuration file',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--bash-completion',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Show bash completion script and exit',
|
||||||
|
)
|
||||||
global_group.add_argument(
|
global_group.add_argument(
|
||||||
'--version',
|
'--version',
|
||||||
dest='version',
|
dest='version',
|
||||||
|
@ -211,33 +226,93 @@ def parse_arguments(*unparsed_arguments):
|
||||||
metavar='',
|
metavar='',
|
||||||
help='Specify zero or more actions. Defaults to prune, compact, create, and check. Use --help with action for details:',
|
help='Specify zero or more actions. Defaults to prune, compact, create, and check. Use --help with action for details:',
|
||||||
)
|
)
|
||||||
init_parser = subparsers.add_parser(
|
rcreate_parser = subparsers.add_parser(
|
||||||
'init',
|
'rcreate',
|
||||||
aliases=SUBPARSER_ALIASES['init'],
|
aliases=SUBPARSER_ALIASES['rcreate'],
|
||||||
help='Initialize an empty Borg repository',
|
help='Create a new, empty Borg repository',
|
||||||
description='Initialize an empty Borg repository',
|
description='Create a new, empty Borg repository',
|
||||||
add_help=False,
|
add_help=False,
|
||||||
)
|
)
|
||||||
init_group = init_parser.add_argument_group('init arguments')
|
rcreate_group = rcreate_parser.add_argument_group('rcreate arguments')
|
||||||
init_group.add_argument(
|
rcreate_group.add_argument(
|
||||||
'-e',
|
'-e',
|
||||||
'--encryption',
|
'--encryption',
|
||||||
dest='encryption_mode',
|
dest='encryption_mode',
|
||||||
help='Borg repository encryption mode',
|
help='Borg repository encryption mode',
|
||||||
required=True,
|
required=True,
|
||||||
)
|
)
|
||||||
init_group.add_argument(
|
rcreate_group.add_argument(
|
||||||
'--append-only',
|
'--source-repository',
|
||||||
dest='append_only',
|
'--other-repo',
|
||||||
|
metavar='KEY_REPOSITORY',
|
||||||
|
help='Path to an existing Borg repository whose key material should be reused (Borg 2.x+ only)',
|
||||||
|
)
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'--copy-crypt-key',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Create an append-only repository',
|
help='Copy the crypt key used for authenticated encryption from the source repository, defaults to a new random key (Borg 2.x+ only)',
|
||||||
)
|
)
|
||||||
init_group.add_argument(
|
rcreate_group.add_argument(
|
||||||
'--storage-quota',
|
'--append-only', action='store_true', help='Create an append-only repository',
|
||||||
dest='storage_quota',
|
)
|
||||||
help='Create a repository with a fixed storage quota',
|
rcreate_group.add_argument(
|
||||||
|
'--storage-quota', help='Create a repository with a fixed storage quota',
|
||||||
|
)
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'--make-parent-dirs',
|
||||||
|
action='store_true',
|
||||||
|
help='Create any missing parent directories of the repository directory',
|
||||||
|
)
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
|
||||||
|
transfer_parser = subparsers.add_parser(
|
||||||
|
'transfer',
|
||||||
|
aliases=SUBPARSER_ALIASES['transfer'],
|
||||||
|
help='Transfer archives from one repository to another, optionally upgrading the transferred data (Borg 2.0+ only)',
|
||||||
|
description='Transfer archives from one repository to another, optionally upgrading the transferred data (Borg 2.0+ only)',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
transfer_group = transfer_parser.add_argument_group('transfer arguments')
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of existing destination repository to transfer archives to, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--source-repository',
|
||||||
|
help='Path of existing source repository to transfer archives from',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--archive',
|
||||||
|
help='Name of single archive to transfer (or "latest"), defaults to transferring all archives',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--upgrader',
|
||||||
|
help='Upgrader type used to convert the transfered data, e.g. "From12To20" to upgrade data from Borg 1.2 to 2.0 format, defaults to no conversion',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'-a',
|
||||||
|
'--match-archives',
|
||||||
|
'--glob-archives',
|
||||||
|
metavar='PATTERN',
|
||||||
|
help='Only transfer archives with names matching this pattern',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys'
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--first',
|
||||||
|
metavar='N',
|
||||||
|
help='Only transfer first N archives after other filters are applied',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--last', metavar='N', help='Only transfer last N archives after other filters are applied'
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
)
|
)
|
||||||
init_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
|
||||||
|
|
||||||
prune_parser = subparsers.add_parser(
|
prune_parser = subparsers.add_parser(
|
||||||
'prune',
|
'prune',
|
||||||
|
@ -255,7 +330,7 @@ def parse_arguments(*unparsed_arguments):
|
||||||
help='Display statistics of archive',
|
help='Display statistics of archive',
|
||||||
)
|
)
|
||||||
prune_group.add_argument(
|
prune_group.add_argument(
|
||||||
'--files', dest='files', default=False, action='store_true', help='Show per-file details'
|
'--list', dest='list_archives', action='store_true', help='List archives kept/pruned'
|
||||||
)
|
)
|
||||||
prune_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
prune_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
@ -279,7 +354,7 @@ def parse_arguments(*unparsed_arguments):
|
||||||
dest='cleanup_commits',
|
dest='cleanup_commits',
|
||||||
default=False,
|
default=False,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Cleanup commit-only 17-byte segment files left behind by Borg 1.1',
|
help='Cleanup commit-only 17-byte segment files left behind by Borg 1.1 (flag in Borg 1.2 only)',
|
||||||
)
|
)
|
||||||
compact_group.add_argument(
|
compact_group.add_argument(
|
||||||
'--threshold',
|
'--threshold',
|
||||||
|
@ -294,8 +369,8 @@ def parse_arguments(*unparsed_arguments):
|
||||||
create_parser = subparsers.add_parser(
|
create_parser = subparsers.add_parser(
|
||||||
'create',
|
'create',
|
||||||
aliases=SUBPARSER_ALIASES['create'],
|
aliases=SUBPARSER_ALIASES['create'],
|
||||||
help='Create archives (actually perform backups)',
|
help='Create an archive (actually perform a backup)',
|
||||||
description='Create archives (actually perform backups)',
|
description='Create an archive (actually perform a backup)',
|
||||||
add_help=False,
|
add_help=False,
|
||||||
)
|
)
|
||||||
create_group = create_parser.add_argument_group('create arguments')
|
create_group = create_parser.add_argument_group('create arguments')
|
||||||
|
@ -314,7 +389,7 @@ def parse_arguments(*unparsed_arguments):
|
||||||
help='Display statistics of archive',
|
help='Display statistics of archive',
|
||||||
)
|
)
|
||||||
create_group.add_argument(
|
create_group.add_argument(
|
||||||
'--files', dest='files', default=False, action='store_true', help='Show per-file details'
|
'--list', '--files', dest='list_files', action='store_true', help='Show per-file details'
|
||||||
)
|
)
|
||||||
create_group.add_argument(
|
create_group.add_argument(
|
||||||
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
||||||
|
@ -341,7 +416,7 @@ def parse_arguments(*unparsed_arguments):
|
||||||
dest='repair',
|
dest='repair',
|
||||||
default=False,
|
default=False,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Attempt to repair any inconsistencies found (experimental and only for interactive use)',
|
help='Attempt to repair any inconsistencies found (for interactive use)',
|
||||||
)
|
)
|
||||||
check_group.add_argument(
|
check_group.add_argument(
|
||||||
'--only',
|
'--only',
|
||||||
|
@ -349,7 +424,13 @@ def parse_arguments(*unparsed_arguments):
|
||||||
choices=('repository', 'archives', 'data', 'extract'),
|
choices=('repository', 'archives', 'data', 'extract'),
|
||||||
dest='only',
|
dest='only',
|
||||||
action='append',
|
action='append',
|
||||||
help='Run a particular consistency check (repository, archives, data, or extract) instead of configured checks; can specify flag multiple times',
|
help='Run a particular consistency check (repository, archives, data, or extract) instead of configured checks (subject to configured frequency, can specify flag multiple times)',
|
||||||
|
)
|
||||||
|
check_group.add_argument(
|
||||||
|
'--force',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Ignore configured check frequencies and run checks unconditionally',
|
||||||
)
|
)
|
||||||
check_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
check_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
@ -426,14 +507,14 @@ def parse_arguments(*unparsed_arguments):
|
||||||
'--destination',
|
'--destination',
|
||||||
metavar='PATH',
|
metavar='PATH',
|
||||||
dest='destination',
|
dest='destination',
|
||||||
help='Path to destination export tar file, or "-" for stdout (but be careful about dirtying output with --verbosity or --files)',
|
help='Path to destination export tar file, or "-" for stdout (but be careful about dirtying output with --verbosity or --list)',
|
||||||
required=True,
|
required=True,
|
||||||
)
|
)
|
||||||
export_tar_group.add_argument(
|
export_tar_group.add_argument(
|
||||||
'--tar-filter', help='Name of filter program to pipe data through'
|
'--tar-filter', help='Name of filter program to pipe data through'
|
||||||
)
|
)
|
||||||
export_tar_group.add_argument(
|
export_tar_group.add_argument(
|
||||||
'--files', default=False, action='store_true', help='Show per-file details'
|
'--list', '--files', dest='list_files', action='store_true', help='Show per-file details'
|
||||||
)
|
)
|
||||||
export_tar_group.add_argument(
|
export_tar_group.add_argument(
|
||||||
'--strip-components',
|
'--strip-components',
|
||||||
|
@ -526,27 +607,74 @@ def parse_arguments(*unparsed_arguments):
|
||||||
'-h', '--help', action='help', help='Show this help message and exit'
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
rlist_parser = subparsers.add_parser(
|
||||||
|
'rlist',
|
||||||
|
aliases=SUBPARSER_ALIASES['rlist'],
|
||||||
|
help='List repository',
|
||||||
|
description='List the archives in a repository',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
rlist_group = rlist_parser.add_argument_group('rlist arguments')
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--repository', help='Path of repository to list, defaults to the configured repositories',
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--short', default=False, action='store_true', help='Output only archive names'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument('--format', help='Format for archive listing')
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--json', default=False, action='store_true', help='Output results as JSON'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'-P', '--prefix', help='Only list archive names starting with this prefix'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'-a',
|
||||||
|
'--match-archives',
|
||||||
|
'--glob-archives',
|
||||||
|
metavar='PATTERN',
|
||||||
|
help='Only list archive names matching this pattern',
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--first', metavar='N', help='List first N archives after other filters are applied'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--last', metavar='N', help='List last N archives after other filters are applied'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
list_parser = subparsers.add_parser(
|
list_parser = subparsers.add_parser(
|
||||||
'list',
|
'list',
|
||||||
aliases=SUBPARSER_ALIASES['list'],
|
aliases=SUBPARSER_ALIASES['list'],
|
||||||
help='List archives',
|
help='List archive',
|
||||||
description='List archives or the contents of an archive',
|
description='List the files in an archive or search for a file across archives',
|
||||||
add_help=False,
|
add_help=False,
|
||||||
)
|
)
|
||||||
list_group = list_parser.add_argument_group('list arguments')
|
list_group = list_parser.add_argument_group('list arguments')
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
'--repository', help='Path of repository to list, defaults to the configured repositories',
|
'--repository',
|
||||||
|
help='Path of repository containing archive to list, defaults to the configured repositories',
|
||||||
)
|
)
|
||||||
list_group.add_argument('--archive', help='Name of archive to list (or "latest")')
|
list_group.add_argument('--archive', help='Name of the archive to list (or "latest")')
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
'--path',
|
'--path',
|
||||||
metavar='PATH',
|
metavar='PATH',
|
||||||
nargs='+',
|
nargs='+',
|
||||||
dest='paths',
|
dest='paths',
|
||||||
help='Paths to list from archive, defaults to the entire archive',
|
help='Paths or patterns to list from a single selected archive (via "--archive"), defaults to listing the entire archive',
|
||||||
)
|
)
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
'--short', default=False, action='store_true', help='Output only archive or path names'
|
'--find',
|
||||||
|
metavar='PATH',
|
||||||
|
nargs='+',
|
||||||
|
dest='find_paths',
|
||||||
|
help='Partial paths or patterns to search for and list across multiple archives',
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'--short', default=False, action='store_true', help='Output only path names'
|
||||||
)
|
)
|
||||||
list_group.add_argument('--format', help='Format for file listing')
|
list_group.add_argument('--format', help='Format for file listing')
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
|
@ -556,13 +684,17 @@ def parse_arguments(*unparsed_arguments):
|
||||||
'-P', '--prefix', help='Only list archive names starting with this prefix'
|
'-P', '--prefix', help='Only list archive names starting with this prefix'
|
||||||
)
|
)
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
'-a', '--glob-archives', metavar='GLOB', help='Only list archive names matching this glob'
|
'-a',
|
||||||
|
'--match-archives',
|
||||||
|
'--glob-archives',
|
||||||
|
metavar='PATTERN',
|
||||||
|
help='Only list archive names matching this pattern',
|
||||||
)
|
)
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
'--successful',
|
'--successful',
|
||||||
default=False,
|
default=True,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Only list archive names of successful (non-checkpoint) backups',
|
help='Deprecated; no effect. Newer versions of Borg shows successful (non-checkpoint) archives by default.',
|
||||||
)
|
)
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
'--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys'
|
'--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys'
|
||||||
|
@ -587,17 +719,34 @@ def parse_arguments(*unparsed_arguments):
|
||||||
)
|
)
|
||||||
list_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
list_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
rinfo_parser = subparsers.add_parser(
|
||||||
|
'rinfo',
|
||||||
|
aliases=SUBPARSER_ALIASES['rinfo'],
|
||||||
|
help='Show repository summary information such as disk space used',
|
||||||
|
description='Show repository summary information such as disk space used',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
rinfo_group = rinfo_parser.add_argument_group('rinfo arguments')
|
||||||
|
rinfo_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository to show info for, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
rinfo_group.add_argument(
|
||||||
|
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
||||||
|
)
|
||||||
|
rinfo_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
info_parser = subparsers.add_parser(
|
info_parser = subparsers.add_parser(
|
||||||
'info',
|
'info',
|
||||||
aliases=SUBPARSER_ALIASES['info'],
|
aliases=SUBPARSER_ALIASES['info'],
|
||||||
help='Display summary information on archives',
|
help='Show archive summary information such as disk space used',
|
||||||
description='Display summary information on archives',
|
description='Show archive summary information such as disk space used',
|
||||||
add_help=False,
|
add_help=False,
|
||||||
)
|
)
|
||||||
info_group = info_parser.add_argument_group('info arguments')
|
info_group = info_parser.add_argument_group('info arguments')
|
||||||
info_group.add_argument(
|
info_group.add_argument(
|
||||||
'--repository',
|
'--repository',
|
||||||
help='Path of repository to show info for, defaults to the configured repository if there is only one',
|
help='Path of repository containing archive to show info for, defaults to the configured repository if there is only one',
|
||||||
)
|
)
|
||||||
info_group.add_argument('--archive', help='Name of archive to show info for (or "latest")')
|
info_group.add_argument('--archive', help='Name of archive to show info for (or "latest")')
|
||||||
info_group.add_argument(
|
info_group.add_argument(
|
||||||
|
@ -608,9 +757,10 @@ def parse_arguments(*unparsed_arguments):
|
||||||
)
|
)
|
||||||
info_group.add_argument(
|
info_group.add_argument(
|
||||||
'-a',
|
'-a',
|
||||||
|
'--match-archives',
|
||||||
'--glob-archives',
|
'--glob-archives',
|
||||||
metavar='GLOB',
|
metavar='PATTERN',
|
||||||
help='Only show info for archive names matching this glob',
|
help='Only show info for archive names matching this pattern',
|
||||||
)
|
)
|
||||||
info_group.add_argument(
|
info_group.add_argument(
|
||||||
'--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys'
|
'--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys'
|
||||||
|
@ -625,6 +775,22 @@ def parse_arguments(*unparsed_arguments):
|
||||||
)
|
)
|
||||||
info_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
info_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
break_lock_parser = subparsers.add_parser(
|
||||||
|
'break-lock',
|
||||||
|
aliases=SUBPARSER_ALIASES['break-lock'],
|
||||||
|
help='Break the repository and cache locks left behind by Borg aborting',
|
||||||
|
description='Break Borg repository and cache locks left behind by Borg aborting',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
break_lock_group = break_lock_parser.add_argument_group('break-lock arguments')
|
||||||
|
break_lock_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository to break the lock for, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
break_lock_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
|
||||||
borg_parser = subparsers.add_parser(
|
borg_parser = subparsers.add_parser(
|
||||||
'borg',
|
'borg',
|
||||||
aliases=SUBPARSER_ALIASES['borg'],
|
aliases=SUBPARSER_ALIASES['borg'],
|
||||||
|
@ -647,6 +813,16 @@ def parse_arguments(*unparsed_arguments):
|
||||||
)
|
)
|
||||||
borg_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
borg_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
return top_level_parser, subparsers
|
||||||
|
|
||||||
|
|
||||||
|
def parse_arguments(*unparsed_arguments):
|
||||||
|
'''
|
||||||
|
Given command-line arguments with which this script was invoked, parse the arguments and return
|
||||||
|
them as a dict mapping from subparser name (or "global") to an argparse.Namespace instance.
|
||||||
|
'''
|
||||||
|
top_level_parser, subparsers = make_parsers()
|
||||||
|
|
||||||
arguments, remaining_arguments = parse_subparser_arguments(
|
arguments, remaining_arguments = parse_subparser_arguments(
|
||||||
unparsed_arguments, subparsers.choices
|
unparsed_arguments, subparsers.choices
|
||||||
)
|
)
|
||||||
|
@ -654,21 +830,32 @@ def parse_arguments(*unparsed_arguments):
|
||||||
|
|
||||||
if arguments['global'].excludes_filename:
|
if arguments['global'].excludes_filename:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'The --excludes option has been replaced with exclude_patterns in configuration'
|
'The --excludes flag has been replaced with exclude_patterns in configuration.'
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'init' in arguments and arguments['global'].dry_run:
|
if (
|
||||||
raise ValueError('The init action cannot be used with the --dry-run option')
|
('list' in arguments and 'rinfo' in arguments and arguments['list'].json)
|
||||||
|
or ('list' in arguments and 'info' in arguments and arguments['list'].json)
|
||||||
if 'list' in arguments and arguments['list'].glob_archives and arguments['list'].successful:
|
or ('rinfo' in arguments and 'info' in arguments and arguments['rinfo'].json)
|
||||||
raise ValueError('The --glob-archives and --successful options cannot be used together')
|
):
|
||||||
|
raise ValueError('With the --json flag, multiple actions cannot be used together.')
|
||||||
|
|
||||||
if (
|
if (
|
||||||
'list' in arguments
|
'transfer' in arguments
|
||||||
and 'info' in arguments
|
and arguments['transfer'].archive
|
||||||
and arguments['list'].json
|
and arguments['transfer'].match_archives
|
||||||
and arguments['info'].json
|
|
||||||
):
|
):
|
||||||
raise ValueError('With the --json option, list and info actions cannot be used together')
|
raise ValueError(
|
||||||
|
'With the transfer action, only one of --archive and --glob-archives flags can be used.'
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'info' in arguments and (
|
||||||
|
(arguments['info'].archive and arguments['info'].prefix)
|
||||||
|
or (arguments['info'].archive and arguments['info'].match_archives)
|
||||||
|
or (arguments['info'].prefix and arguments['info'].match_archives)
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
'With the info action, only one of --archive, --prefix, or --match-archives flags can be used.'
|
||||||
|
)
|
||||||
|
|
||||||
return arguments
|
return arguments
|
||||||
|
|
|
@ -11,25 +11,29 @@ from subprocess import CalledProcessError
|
||||||
import colorama
|
import colorama
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
|
|
||||||
|
import borgmatic.commands.completion
|
||||||
from borgmatic.borg import borg as borg_borg
|
from borgmatic.borg import borg as borg_borg
|
||||||
|
from borgmatic.borg import break_lock as borg_break_lock
|
||||||
from borgmatic.borg import check as borg_check
|
from borgmatic.borg import check as borg_check
|
||||||
from borgmatic.borg import compact as borg_compact
|
from borgmatic.borg import compact as borg_compact
|
||||||
from borgmatic.borg import create as borg_create
|
from borgmatic.borg import create as borg_create
|
||||||
from borgmatic.borg import environment as borg_environment
|
|
||||||
from borgmatic.borg import export_tar as borg_export_tar
|
from borgmatic.borg import export_tar as borg_export_tar
|
||||||
from borgmatic.borg import extract as borg_extract
|
from borgmatic.borg import extract as borg_extract
|
||||||
from borgmatic.borg import feature as borg_feature
|
from borgmatic.borg import feature as borg_feature
|
||||||
from borgmatic.borg import info as borg_info
|
from borgmatic.borg import info as borg_info
|
||||||
from borgmatic.borg import init as borg_init
|
|
||||||
from borgmatic.borg import list as borg_list
|
from borgmatic.borg import list as borg_list
|
||||||
from borgmatic.borg import mount as borg_mount
|
from borgmatic.borg import mount as borg_mount
|
||||||
from borgmatic.borg import prune as borg_prune
|
from borgmatic.borg import prune as borg_prune
|
||||||
|
from borgmatic.borg import rcreate as borg_rcreate
|
||||||
|
from borgmatic.borg import rinfo as borg_rinfo
|
||||||
|
from borgmatic.borg import rlist as borg_rlist
|
||||||
|
from borgmatic.borg import transfer as borg_transfer
|
||||||
from borgmatic.borg import umount as borg_umount
|
from borgmatic.borg import umount as borg_umount
|
||||||
from borgmatic.borg import version as borg_version
|
from borgmatic.borg import version as borg_version
|
||||||
from borgmatic.commands.arguments import parse_arguments
|
from borgmatic.commands.arguments import parse_arguments
|
||||||
from borgmatic.config import checks, collect, convert, validate
|
from borgmatic.config import checks, collect, convert, validate
|
||||||
from borgmatic.hooks import command, dispatch, dump, monitor
|
from borgmatic.hooks import command, dispatch, dump, monitor
|
||||||
from borgmatic.logger import configure_logging, should_do_markup
|
from borgmatic.logger import add_custom_log_levels, configure_logging, should_do_markup
|
||||||
from borgmatic.signals import configure_signals
|
from borgmatic.signals import configure_signals
|
||||||
from borgmatic.verbosity import verbosity_to_log_level
|
from borgmatic.verbosity import verbosity_to_log_level
|
||||||
|
|
||||||
|
@ -59,20 +63,15 @@ def run_configuration(config_filename, config, arguments):
|
||||||
remote_path = location.get('remote_path')
|
remote_path = location.get('remote_path')
|
||||||
retries = storage.get('retries', 0)
|
retries = storage.get('retries', 0)
|
||||||
retry_wait = storage.get('retry_wait', 0)
|
retry_wait = storage.get('retry_wait', 0)
|
||||||
borg_environment.initialize(storage)
|
|
||||||
encountered_error = None
|
encountered_error = None
|
||||||
error_repository = ''
|
error_repository = ''
|
||||||
using_primary_action = {'prune', 'compact', 'create', 'check'}.intersection(arguments)
|
using_primary_action = {'prune', 'compact', 'create', 'check'}.intersection(arguments)
|
||||||
monitoring_log_level = verbosity_to_log_level(global_arguments.monitoring_verbosity)
|
monitoring_log_level = verbosity_to_log_level(global_arguments.monitoring_verbosity)
|
||||||
|
|
||||||
hook_context = {
|
|
||||||
'repositories': ','.join(location['repositories']),
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
local_borg_version = borg_version.local_borg_version(local_path)
|
local_borg_version = borg_version.local_borg_version(storage, local_path)
|
||||||
except (OSError, CalledProcessError, ValueError) as error:
|
except (OSError, CalledProcessError, ValueError) as error:
|
||||||
yield from make_error_log_records(
|
yield from log_error_records(
|
||||||
'{}: Error getting local Borg version'.format(config_filename), error
|
'{}: Error getting local Borg version'.format(config_filename), error
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
@ -87,50 +86,6 @@ def run_configuration(config_filename, config, arguments):
|
||||||
monitoring_log_level,
|
monitoring_log_level,
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
)
|
)
|
||||||
if 'prune' in arguments:
|
|
||||||
command.execute_hook(
|
|
||||||
hooks.get('before_prune'),
|
|
||||||
hooks.get('umask'),
|
|
||||||
config_filename,
|
|
||||||
'pre-prune',
|
|
||||||
global_arguments.dry_run,
|
|
||||||
**hook_context,
|
|
||||||
)
|
|
||||||
if 'compact' in arguments:
|
|
||||||
command.execute_hook(
|
|
||||||
hooks.get('before_compact'),
|
|
||||||
hooks.get('umask'),
|
|
||||||
config_filename,
|
|
||||||
'pre-compact',
|
|
||||||
global_arguments.dry_run,
|
|
||||||
)
|
|
||||||
if 'create' in arguments:
|
|
||||||
command.execute_hook(
|
|
||||||
hooks.get('before_backup'),
|
|
||||||
hooks.get('umask'),
|
|
||||||
config_filename,
|
|
||||||
'pre-backup',
|
|
||||||
global_arguments.dry_run,
|
|
||||||
**hook_context,
|
|
||||||
)
|
|
||||||
if 'check' in arguments:
|
|
||||||
command.execute_hook(
|
|
||||||
hooks.get('before_check'),
|
|
||||||
hooks.get('umask'),
|
|
||||||
config_filename,
|
|
||||||
'pre-check',
|
|
||||||
global_arguments.dry_run,
|
|
||||||
**hook_context,
|
|
||||||
)
|
|
||||||
if 'extract' in arguments:
|
|
||||||
command.execute_hook(
|
|
||||||
hooks.get('before_extract'),
|
|
||||||
hooks.get('umask'),
|
|
||||||
config_filename,
|
|
||||||
'pre-extract',
|
|
||||||
global_arguments.dry_run,
|
|
||||||
**hook_context,
|
|
||||||
)
|
|
||||||
if using_primary_action:
|
if using_primary_action:
|
||||||
dispatch.call_hooks(
|
dispatch.call_hooks(
|
||||||
'ping_monitor',
|
'ping_monitor',
|
||||||
|
@ -146,9 +101,7 @@ def run_configuration(config_filename, config, arguments):
|
||||||
return
|
return
|
||||||
|
|
||||||
encountered_error = error
|
encountered_error = error
|
||||||
yield from make_error_log_records(
|
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
||||||
'{}: Error running pre hook'.format(config_filename), error
|
|
||||||
)
|
|
||||||
|
|
||||||
if not encountered_error:
|
if not encountered_error:
|
||||||
repo_queue = Queue()
|
repo_queue = Queue()
|
||||||
|
@ -164,6 +117,7 @@ def run_configuration(config_filename, config, arguments):
|
||||||
try:
|
try:
|
||||||
yield from run_actions(
|
yield from run_actions(
|
||||||
arguments=arguments,
|
arguments=arguments,
|
||||||
|
config_filename=config_filename,
|
||||||
location=location,
|
location=location,
|
||||||
storage=storage,
|
storage=storage,
|
||||||
retention=retention,
|
retention=retention,
|
||||||
|
@ -175,72 +129,32 @@ def run_configuration(config_filename, config, arguments):
|
||||||
repository_path=repository_path,
|
repository_path=repository_path,
|
||||||
)
|
)
|
||||||
except (OSError, CalledProcessError, ValueError) as error:
|
except (OSError, CalledProcessError, ValueError) as error:
|
||||||
yield from make_error_log_records(
|
|
||||||
'{}: Error running actions for repository'.format(repository_path), error
|
|
||||||
)
|
|
||||||
if retry_num < retries:
|
if retry_num < retries:
|
||||||
repo_queue.put((repository_path, retry_num + 1),)
|
repo_queue.put((repository_path, retry_num + 1),)
|
||||||
|
tuple( # Consume the generator so as to trigger logging.
|
||||||
|
log_error_records(
|
||||||
|
'{}: Error running actions for repository'.format(repository_path),
|
||||||
|
error,
|
||||||
|
levelno=logging.WARNING,
|
||||||
|
log_command_error_output=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f'{config_filename}: Retrying... attempt {retry_num + 1}/{retries}'
|
f'{config_filename}: Retrying... attempt {retry_num + 1}/{retries}'
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if command.considered_soft_failure(config_filename, error):
|
||||||
|
return
|
||||||
|
|
||||||
|
yield from log_error_records(
|
||||||
|
'{}: Error running actions for repository'.format(repository_path), error
|
||||||
|
)
|
||||||
encountered_error = error
|
encountered_error = error
|
||||||
error_repository = repository_path
|
error_repository = repository_path
|
||||||
|
|
||||||
if not encountered_error:
|
if not encountered_error:
|
||||||
try:
|
try:
|
||||||
if 'prune' in arguments:
|
|
||||||
command.execute_hook(
|
|
||||||
hooks.get('after_prune'),
|
|
||||||
hooks.get('umask'),
|
|
||||||
config_filename,
|
|
||||||
'post-prune',
|
|
||||||
global_arguments.dry_run,
|
|
||||||
**hook_context,
|
|
||||||
)
|
|
||||||
if 'compact' in arguments:
|
|
||||||
command.execute_hook(
|
|
||||||
hooks.get('after_compact'),
|
|
||||||
hooks.get('umask'),
|
|
||||||
config_filename,
|
|
||||||
'post-compact',
|
|
||||||
global_arguments.dry_run,
|
|
||||||
)
|
|
||||||
if 'create' in arguments:
|
|
||||||
dispatch.call_hooks(
|
|
||||||
'remove_database_dumps',
|
|
||||||
hooks,
|
|
||||||
config_filename,
|
|
||||||
dump.DATABASE_HOOK_NAMES,
|
|
||||||
location,
|
|
||||||
global_arguments.dry_run,
|
|
||||||
)
|
|
||||||
command.execute_hook(
|
|
||||||
hooks.get('after_backup'),
|
|
||||||
hooks.get('umask'),
|
|
||||||
config_filename,
|
|
||||||
'post-backup',
|
|
||||||
global_arguments.dry_run,
|
|
||||||
**hook_context,
|
|
||||||
)
|
|
||||||
if 'check' in arguments:
|
|
||||||
command.execute_hook(
|
|
||||||
hooks.get('after_check'),
|
|
||||||
hooks.get('umask'),
|
|
||||||
config_filename,
|
|
||||||
'post-check',
|
|
||||||
global_arguments.dry_run,
|
|
||||||
**hook_context,
|
|
||||||
)
|
|
||||||
if 'extract' in arguments:
|
|
||||||
command.execute_hook(
|
|
||||||
hooks.get('after_extract'),
|
|
||||||
hooks.get('umask'),
|
|
||||||
config_filename,
|
|
||||||
'post-extract',
|
|
||||||
global_arguments.dry_run,
|
|
||||||
**hook_context,
|
|
||||||
)
|
|
||||||
if using_primary_action:
|
if using_primary_action:
|
||||||
dispatch.call_hooks(
|
dispatch.call_hooks(
|
||||||
'ping_monitor',
|
'ping_monitor',
|
||||||
|
@ -264,9 +178,7 @@ def run_configuration(config_filename, config, arguments):
|
||||||
return
|
return
|
||||||
|
|
||||||
encountered_error = error
|
encountered_error = error
|
||||||
yield from make_error_log_records(
|
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
||||||
'{}: Error running post hook'.format(config_filename), error
|
|
||||||
)
|
|
||||||
|
|
||||||
if encountered_error and using_primary_action:
|
if encountered_error and using_primary_action:
|
||||||
try:
|
try:
|
||||||
|
@ -301,7 +213,7 @@ def run_configuration(config_filename, config, arguments):
|
||||||
if command.considered_soft_failure(config_filename, error):
|
if command.considered_soft_failure(config_filename, error):
|
||||||
return
|
return
|
||||||
|
|
||||||
yield from make_error_log_records(
|
yield from log_error_records(
|
||||||
'{}: Error running on-error hook'.format(config_filename), error
|
'{}: Error running on-error hook'.format(config_filename), error
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -309,6 +221,7 @@ def run_configuration(config_filename, config, arguments):
|
||||||
def run_actions(
|
def run_actions(
|
||||||
*,
|
*,
|
||||||
arguments,
|
arguments,
|
||||||
|
config_filename,
|
||||||
location,
|
location,
|
||||||
storage,
|
storage,
|
||||||
retention,
|
retention,
|
||||||
|
@ -318,63 +231,137 @@ def run_actions(
|
||||||
remote_path,
|
remote_path,
|
||||||
local_borg_version,
|
local_borg_version,
|
||||||
repository_path,
|
repository_path,
|
||||||
): # pragma: no cover
|
):
|
||||||
'''
|
'''
|
||||||
Given parsed command-line arguments as an argparse.ArgumentParser instance, several different
|
Given parsed command-line arguments as an argparse.ArgumentParser instance, the configuration
|
||||||
configuration dicts, local and remote paths to Borg, a local Borg version string, and a
|
filename, several different configuration dicts, local and remote paths to Borg, a local Borg
|
||||||
repository name, run all actions from the command-line arguments on the given repository.
|
version string, and a repository name, run all actions from the command-line arguments on the
|
||||||
|
given repository.
|
||||||
|
|
||||||
Yield JSON output strings from executing any actions that produce JSON.
|
Yield JSON output strings from executing any actions that produce JSON.
|
||||||
|
|
||||||
Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an
|
Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an
|
||||||
action. Raise ValueError if the arguments or configuration passed to action are invalid.
|
action or a hook. Raise ValueError if the arguments or configuration passed to action are
|
||||||
|
invalid.
|
||||||
'''
|
'''
|
||||||
|
add_custom_log_levels()
|
||||||
repository = os.path.expanduser(repository_path)
|
repository = os.path.expanduser(repository_path)
|
||||||
global_arguments = arguments['global']
|
global_arguments = arguments['global']
|
||||||
dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else ''
|
dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else ''
|
||||||
if 'init' in arguments:
|
hook_context = {
|
||||||
logger.info('{}: Initializing repository'.format(repository))
|
'repository': repository_path,
|
||||||
borg_init.initialize_repository(
|
# Deprecated: For backwards compatibility with borgmatic < 1.6.0.
|
||||||
|
'repositories': ','.join(location['repositories']),
|
||||||
|
}
|
||||||
|
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('before_actions'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-actions',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'rcreate' in arguments:
|
||||||
|
logger.info('{}: Creating repository'.format(repository))
|
||||||
|
borg_rcreate.create_repository(
|
||||||
|
global_arguments.dry_run,
|
||||||
repository,
|
repository,
|
||||||
storage,
|
storage,
|
||||||
arguments['init'].encryption_mode,
|
local_borg_version,
|
||||||
arguments['init'].append_only,
|
arguments['rcreate'].encryption_mode,
|
||||||
arguments['init'].storage_quota,
|
arguments['rcreate'].source_repository,
|
||||||
|
arguments['rcreate'].copy_crypt_key,
|
||||||
|
arguments['rcreate'].append_only,
|
||||||
|
arguments['rcreate'].storage_quota,
|
||||||
|
arguments['rcreate'].make_parent_dirs,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
||||||
|
if 'transfer' in arguments:
|
||||||
|
logger.info(f'{repository}: Transferring archives to repository')
|
||||||
|
borg_transfer.transfer_archives(
|
||||||
|
global_arguments.dry_run,
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
transfer_arguments=arguments['transfer'],
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
)
|
)
|
||||||
if 'prune' in arguments:
|
if 'prune' in arguments:
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('before_prune'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-prune',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
|
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
|
||||||
borg_prune.prune_archives(
|
borg_prune.prune_archives(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository,
|
||||||
storage,
|
storage,
|
||||||
retention,
|
retention,
|
||||||
|
local_borg_version,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
stats=arguments['prune'].stats,
|
stats=arguments['prune'].stats,
|
||||||
files=arguments['prune'].files,
|
list_archives=arguments['prune'].list_archives,
|
||||||
|
)
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('after_prune'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-prune',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
)
|
)
|
||||||
if 'compact' in arguments:
|
if 'compact' in arguments:
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('before_compact'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-compact',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
if borg_feature.available(borg_feature.Feature.COMPACT, local_borg_version):
|
if borg_feature.available(borg_feature.Feature.COMPACT, local_borg_version):
|
||||||
logger.info('{}: Compacting segments{}'.format(repository, dry_run_label))
|
logger.info('{}: Compacting segments{}'.format(repository, dry_run_label))
|
||||||
borg_compact.compact_segments(
|
borg_compact.compact_segments(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository,
|
||||||
storage,
|
storage,
|
||||||
|
local_borg_version,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
progress=arguments['compact'].progress,
|
progress=arguments['compact'].progress,
|
||||||
cleanup_commits=arguments['compact'].cleanup_commits,
|
cleanup_commits=arguments['compact'].cleanup_commits,
|
||||||
threshold=arguments['compact'].threshold,
|
threshold=arguments['compact'].threshold,
|
||||||
)
|
)
|
||||||
else:
|
else: # pragma: nocover
|
||||||
logger.info(
|
logger.info(
|
||||||
'{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository)
|
'{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository)
|
||||||
)
|
)
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('after_compact'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-compact',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
if 'create' in arguments:
|
if 'create' in arguments:
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('before_backup'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-backup',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
|
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
|
||||||
dispatch.call_hooks(
|
dispatch.call_hooks_even_if_unconfigured(
|
||||||
'remove_database_dumps',
|
'remove_database_dumps',
|
||||||
hooks,
|
hooks,
|
||||||
repository,
|
repository,
|
||||||
|
@ -403,25 +390,69 @@ def run_actions(
|
||||||
progress=arguments['create'].progress,
|
progress=arguments['create'].progress,
|
||||||
stats=arguments['create'].stats,
|
stats=arguments['create'].stats,
|
||||||
json=arguments['create'].json,
|
json=arguments['create'].json,
|
||||||
files=arguments['create'].files,
|
list_files=arguments['create'].list_files,
|
||||||
stream_processes=stream_processes,
|
stream_processes=stream_processes,
|
||||||
)
|
)
|
||||||
if json_output:
|
if json_output: # pragma: nocover
|
||||||
yield json.loads(json_output)
|
yield json.loads(json_output)
|
||||||
|
|
||||||
|
dispatch.call_hooks_even_if_unconfigured(
|
||||||
|
'remove_database_dumps',
|
||||||
|
hooks,
|
||||||
|
config_filename,
|
||||||
|
dump.DATABASE_HOOK_NAMES,
|
||||||
|
location,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('after_backup'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-backup',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
|
|
||||||
if 'check' in arguments and checks.repository_enabled_for_checks(repository, consistency):
|
if 'check' in arguments and checks.repository_enabled_for_checks(repository, consistency):
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('before_check'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-check',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
logger.info('{}: Running consistency checks'.format(repository))
|
logger.info('{}: Running consistency checks'.format(repository))
|
||||||
borg_check.check_archives(
|
borg_check.check_archives(
|
||||||
repository,
|
repository,
|
||||||
|
location,
|
||||||
storage,
|
storage,
|
||||||
consistency,
|
consistency,
|
||||||
|
local_borg_version,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
progress=arguments['check'].progress,
|
progress=arguments['check'].progress,
|
||||||
repair=arguments['check'].repair,
|
repair=arguments['check'].repair,
|
||||||
only_checks=arguments['check'].only,
|
only_checks=arguments['check'].only,
|
||||||
|
force=arguments['check'].force,
|
||||||
|
)
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('after_check'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-check',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
)
|
)
|
||||||
if 'extract' in arguments:
|
if 'extract' in arguments:
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('before_extract'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-extract',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
if arguments['extract'].repository is None or validate.repositories_match(
|
if arguments['extract'].repository is None or validate.repositories_match(
|
||||||
repository, arguments['extract'].repository
|
repository, arguments['extract'].repository
|
||||||
):
|
):
|
||||||
|
@ -431,8 +462,13 @@ def run_actions(
|
||||||
borg_extract.extract_archive(
|
borg_extract.extract_archive(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository,
|
||||||
borg_list.resolve_archive_name(
|
borg_rlist.resolve_archive_name(
|
||||||
repository, arguments['extract'].archive, storage, local_path, remote_path
|
repository,
|
||||||
|
arguments['extract'].archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
),
|
),
|
||||||
arguments['extract'].paths,
|
arguments['extract'].paths,
|
||||||
location,
|
location,
|
||||||
|
@ -444,6 +480,14 @@ def run_actions(
|
||||||
strip_components=arguments['extract'].strip_components,
|
strip_components=arguments['extract'].strip_components,
|
||||||
progress=arguments['extract'].progress,
|
progress=arguments['extract'].progress,
|
||||||
)
|
)
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('after_extract'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-extract',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
if 'export-tar' in arguments:
|
if 'export-tar' in arguments:
|
||||||
if arguments['export-tar'].repository is None or validate.repositories_match(
|
if arguments['export-tar'].repository is None or validate.repositories_match(
|
||||||
repository, arguments['export-tar'].repository
|
repository, arguments['export-tar'].repository
|
||||||
|
@ -456,16 +500,22 @@ def run_actions(
|
||||||
borg_export_tar.export_tar_archive(
|
borg_export_tar.export_tar_archive(
|
||||||
global_arguments.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository,
|
||||||
borg_list.resolve_archive_name(
|
borg_rlist.resolve_archive_name(
|
||||||
repository, arguments['export-tar'].archive, storage, local_path, remote_path
|
repository,
|
||||||
|
arguments['export-tar'].archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
),
|
),
|
||||||
arguments['export-tar'].paths,
|
arguments['export-tar'].paths,
|
||||||
arguments['export-tar'].destination,
|
arguments['export-tar'].destination,
|
||||||
storage,
|
storage,
|
||||||
|
local_borg_version,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
tar_filter=arguments['export-tar'].tar_filter,
|
tar_filter=arguments['export-tar'].tar_filter,
|
||||||
files=arguments['export-tar'].files,
|
list_files=arguments['export-tar'].list_files,
|
||||||
strip_components=arguments['export-tar'].strip_components,
|
strip_components=arguments['export-tar'].strip_components,
|
||||||
)
|
)
|
||||||
if 'mount' in arguments:
|
if 'mount' in arguments:
|
||||||
|
@ -476,23 +526,29 @@ def run_actions(
|
||||||
logger.info(
|
logger.info(
|
||||||
'{}: Mounting archive {}'.format(repository, arguments['mount'].archive)
|
'{}: Mounting archive {}'.format(repository, arguments['mount'].archive)
|
||||||
)
|
)
|
||||||
else:
|
else: # pragma: nocover
|
||||||
logger.info('{}: Mounting repository'.format(repository))
|
logger.info('{}: Mounting repository'.format(repository))
|
||||||
|
|
||||||
borg_mount.mount_archive(
|
borg_mount.mount_archive(
|
||||||
repository,
|
repository,
|
||||||
borg_list.resolve_archive_name(
|
borg_rlist.resolve_archive_name(
|
||||||
repository, arguments['mount'].archive, storage, local_path, remote_path
|
repository,
|
||||||
|
arguments['mount'].archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
),
|
),
|
||||||
arguments['mount'].mount_point,
|
arguments['mount'].mount_point,
|
||||||
arguments['mount'].paths,
|
arguments['mount'].paths,
|
||||||
arguments['mount'].foreground,
|
arguments['mount'].foreground,
|
||||||
arguments['mount'].options,
|
arguments['mount'].options,
|
||||||
storage,
|
storage,
|
||||||
|
local_borg_version,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
)
|
)
|
||||||
if 'restore' in arguments:
|
if 'restore' in arguments: # pragma: nocover
|
||||||
if arguments['restore'].repository is None or validate.repositories_match(
|
if arguments['restore'].repository is None or validate.repositories_match(
|
||||||
repository, arguments['restore'].repository
|
repository, arguments['restore'].repository
|
||||||
):
|
):
|
||||||
|
@ -501,7 +557,7 @@ def run_actions(
|
||||||
repository, arguments['restore'].archive
|
repository, arguments['restore'].archive
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
dispatch.call_hooks(
|
dispatch.call_hooks_even_if_unconfigured(
|
||||||
'remove_database_dumps',
|
'remove_database_dumps',
|
||||||
hooks,
|
hooks,
|
||||||
repository,
|
repository,
|
||||||
|
@ -514,8 +570,13 @@ def run_actions(
|
||||||
if 'all' in restore_names:
|
if 'all' in restore_names:
|
||||||
restore_names = []
|
restore_names = []
|
||||||
|
|
||||||
archive_name = borg_list.resolve_archive_name(
|
archive_name = borg_rlist.resolve_archive_name(
|
||||||
repository, arguments['restore'].archive, storage, local_path, remote_path
|
repository,
|
||||||
|
arguments['restore'].archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
found_names = set()
|
found_names = set()
|
||||||
|
|
||||||
|
@ -566,7 +627,7 @@ def run_actions(
|
||||||
extract_process,
|
extract_process,
|
||||||
)
|
)
|
||||||
|
|
||||||
dispatch.call_hooks(
|
dispatch.call_hooks_even_if_unconfigured(
|
||||||
'remove_database_dumps',
|
'remove_database_dumps',
|
||||||
hooks,
|
hooks,
|
||||||
repository,
|
repository,
|
||||||
|
@ -585,64 +646,139 @@ def run_actions(
|
||||||
', '.join(missing_names)
|
', '.join(missing_names)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
if 'rlist' in arguments:
|
||||||
|
if arguments['rlist'].repository is None or validate.repositories_match(
|
||||||
|
repository, arguments['rlist'].repository
|
||||||
|
):
|
||||||
|
rlist_arguments = copy.copy(arguments['rlist'])
|
||||||
|
if not rlist_arguments.json: # pragma: nocover
|
||||||
|
logger.answer('{}: Listing repository'.format(repository))
|
||||||
|
json_output = borg_rlist.list_repository(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments=rlist_arguments,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
||||||
|
if json_output: # pragma: nocover
|
||||||
|
yield json.loads(json_output)
|
||||||
if 'list' in arguments:
|
if 'list' in arguments:
|
||||||
if arguments['list'].repository is None or validate.repositories_match(
|
if arguments['list'].repository is None or validate.repositories_match(
|
||||||
repository, arguments['list'].repository
|
repository, arguments['list'].repository
|
||||||
):
|
):
|
||||||
list_arguments = copy.copy(arguments['list'])
|
list_arguments = copy.copy(arguments['list'])
|
||||||
if not list_arguments.json:
|
if not list_arguments.json: # pragma: nocover
|
||||||
logger.warning('{}: Listing archives'.format(repository))
|
if list_arguments.find_paths:
|
||||||
list_arguments.archive = borg_list.resolve_archive_name(
|
logger.answer('{}: Searching archives'.format(repository))
|
||||||
repository, list_arguments.archive, storage, local_path, remote_path
|
elif not list_arguments.archive:
|
||||||
|
logger.answer('{}: Listing archives'.format(repository))
|
||||||
|
list_arguments.archive = borg_rlist.resolve_archive_name(
|
||||||
|
repository,
|
||||||
|
list_arguments.archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
json_output = borg_list.list_archives(
|
json_output = borg_list.list_archive(
|
||||||
repository,
|
repository,
|
||||||
storage,
|
storage,
|
||||||
|
local_borg_version,
|
||||||
list_arguments=list_arguments,
|
list_arguments=list_arguments,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
)
|
)
|
||||||
if json_output:
|
if json_output: # pragma: nocover
|
||||||
|
yield json.loads(json_output)
|
||||||
|
if 'rinfo' in arguments:
|
||||||
|
if arguments['rinfo'].repository is None or validate.repositories_match(
|
||||||
|
repository, arguments['rinfo'].repository
|
||||||
|
):
|
||||||
|
rinfo_arguments = copy.copy(arguments['rinfo'])
|
||||||
|
if not rinfo_arguments.json: # pragma: nocover
|
||||||
|
logger.answer('{}: Displaying repository summary information'.format(repository))
|
||||||
|
json_output = borg_rinfo.display_repository_info(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
rinfo_arguments=rinfo_arguments,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
||||||
|
if json_output: # pragma: nocover
|
||||||
yield json.loads(json_output)
|
yield json.loads(json_output)
|
||||||
if 'info' in arguments:
|
if 'info' in arguments:
|
||||||
if arguments['info'].repository is None or validate.repositories_match(
|
if arguments['info'].repository is None or validate.repositories_match(
|
||||||
repository, arguments['info'].repository
|
repository, arguments['info'].repository
|
||||||
):
|
):
|
||||||
info_arguments = copy.copy(arguments['info'])
|
info_arguments = copy.copy(arguments['info'])
|
||||||
if not info_arguments.json:
|
if not info_arguments.json: # pragma: nocover
|
||||||
logger.warning('{}: Displaying summary info for archives'.format(repository))
|
logger.answer('{}: Displaying archive summary information'.format(repository))
|
||||||
info_arguments.archive = borg_list.resolve_archive_name(
|
info_arguments.archive = borg_rlist.resolve_archive_name(
|
||||||
repository, info_arguments.archive, storage, local_path, remote_path
|
repository,
|
||||||
|
info_arguments.archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
json_output = borg_info.display_archives_info(
|
json_output = borg_info.display_archives_info(
|
||||||
repository,
|
repository,
|
||||||
storage,
|
storage,
|
||||||
|
local_borg_version,
|
||||||
info_arguments=info_arguments,
|
info_arguments=info_arguments,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
)
|
)
|
||||||
if json_output:
|
if json_output: # pragma: nocover
|
||||||
yield json.loads(json_output)
|
yield json.loads(json_output)
|
||||||
|
if 'break-lock' in arguments:
|
||||||
|
if arguments['break-lock'].repository is None or validate.repositories_match(
|
||||||
|
repository, arguments['break-lock'].repository
|
||||||
|
):
|
||||||
|
logger.info(f'{repository}: Breaking repository and cache locks')
|
||||||
|
borg_break_lock.break_lock(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
||||||
if 'borg' in arguments:
|
if 'borg' in arguments:
|
||||||
if arguments['borg'].repository is None or validate.repositories_match(
|
if arguments['borg'].repository is None or validate.repositories_match(
|
||||||
repository, arguments['borg'].repository
|
repository, arguments['borg'].repository
|
||||||
):
|
):
|
||||||
logger.warning('{}: Running arbitrary Borg command'.format(repository))
|
logger.info('{}: Running arbitrary Borg command'.format(repository))
|
||||||
archive_name = borg_list.resolve_archive_name(
|
archive_name = borg_rlist.resolve_archive_name(
|
||||||
repository, arguments['borg'].archive, storage, local_path, remote_path
|
repository,
|
||||||
|
arguments['borg'].archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
)
|
)
|
||||||
borg_borg.run_arbitrary_borg(
|
borg_borg.run_arbitrary_borg(
|
||||||
repository,
|
repository,
|
||||||
storage,
|
storage,
|
||||||
|
local_borg_version,
|
||||||
options=arguments['borg'].options,
|
options=arguments['borg'].options,
|
||||||
archive=archive_name,
|
archive=archive_name,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('after_actions'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-actions',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
|
|
||||||
def load_configurations(config_filenames, overrides=None):
|
|
||||||
|
def load_configurations(config_filenames, overrides=None, resolve_env=True):
|
||||||
'''
|
'''
|
||||||
Given a sequence of configuration filenames, load and validate each configuration file. Return
|
Given a sequence of configuration filenames, load and validate each configuration file. Return
|
||||||
the results as a tuple of: dict of configuration filename to corresponding parsed configuration,
|
the results as a tuple of: dict of configuration filename to corresponding parsed configuration,
|
||||||
|
@ -655,9 +791,10 @@ def load_configurations(config_filenames, overrides=None):
|
||||||
# Parse and load each configuration file.
|
# Parse and load each configuration file.
|
||||||
for config_filename in config_filenames:
|
for config_filename in config_filenames:
|
||||||
try:
|
try:
|
||||||
configs[config_filename] = validate.parse_configuration(
|
configs[config_filename], parse_logs = validate.parse_configuration(
|
||||||
config_filename, validate.schema_filename(), overrides
|
config_filename, validate.schema_filename(), overrides, resolve_env
|
||||||
)
|
)
|
||||||
|
logs.extend(parse_logs)
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
logs.extend(
|
logs.extend(
|
||||||
[
|
[
|
||||||
|
@ -704,28 +841,39 @@ def log_record(suppress_log=False, **kwargs):
|
||||||
return record
|
return record
|
||||||
|
|
||||||
|
|
||||||
def make_error_log_records(message, error=None):
|
def log_error_records(
|
||||||
|
message, error=None, levelno=logging.CRITICAL, log_command_error_output=False
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Given error message text and an optional exception object, yield a series of logging.LogRecord
|
Given error message text, an optional exception object, an optional log level, and whether to
|
||||||
instances with error summary information. As a side effect, log each record.
|
log the error output of a CalledProcessError (if any), log error summary information and also
|
||||||
|
yield it as a series of logging.LogRecord instances.
|
||||||
|
|
||||||
|
Note that because the logs are yielded as a generator, logs won't get logged unless you consume
|
||||||
|
the generator output.
|
||||||
'''
|
'''
|
||||||
|
level_name = logging._levelToName[levelno]
|
||||||
|
|
||||||
if not error:
|
if not error:
|
||||||
yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
|
yield log_record(levelno=levelno, levelname=level_name, msg=message)
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
raise error
|
raise error
|
||||||
except CalledProcessError as error:
|
except CalledProcessError as error:
|
||||||
yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
|
yield log_record(levelno=levelno, levelname=level_name, msg=message)
|
||||||
if error.output:
|
if error.output:
|
||||||
# Suppress these logs for now and save full error output for the log summary at the end.
|
# Suppress these logs for now and save full error output for the log summary at the end.
|
||||||
yield log_record(
|
yield log_record(
|
||||||
levelno=logging.CRITICAL, levelname='CRITICAL', msg=error.output, suppress_log=True
|
levelno=levelno,
|
||||||
|
levelname=level_name,
|
||||||
|
msg=error.output,
|
||||||
|
suppress_log=not log_command_error_output,
|
||||||
)
|
)
|
||||||
yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
|
yield log_record(levelno=levelno, levelname=level_name, msg=error)
|
||||||
except (ValueError, OSError) as error:
|
except (ValueError, OSError) as error:
|
||||||
yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=message)
|
yield log_record(levelno=levelno, levelname=level_name, msg=message)
|
||||||
yield log_record(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
|
yield log_record(levelno=levelno, levelname=level_name, msg=error)
|
||||||
except: # noqa: E722
|
except: # noqa: E722
|
||||||
# Raising above only as a means of determining the error type. Swallow the exception here
|
# Raising above only as a means of determining the error type. Swallow the exception here
|
||||||
# because we don't want the exception to propagate out of this function.
|
# because we don't want the exception to propagate out of this function.
|
||||||
|
@ -751,24 +899,24 @@ def collect_configuration_run_summary_logs(configs, arguments):
|
||||||
any, to stdout.
|
any, to stdout.
|
||||||
'''
|
'''
|
||||||
# Run cross-file validation checks.
|
# Run cross-file validation checks.
|
||||||
if 'extract' in arguments:
|
repository = None
|
||||||
repository = arguments['extract'].repository
|
|
||||||
elif 'list' in arguments and arguments['list'].archive:
|
|
||||||
repository = arguments['list'].repository
|
|
||||||
elif 'mount' in arguments:
|
|
||||||
repository = arguments['mount'].repository
|
|
||||||
else:
|
|
||||||
repository = None
|
|
||||||
|
|
||||||
if repository:
|
for action_name, action_arguments in arguments.items():
|
||||||
try:
|
if hasattr(action_arguments, 'repository'):
|
||||||
validate.guard_configuration_contains_repository(repository, configs)
|
repository = getattr(action_arguments, 'repository')
|
||||||
except ValueError as error:
|
break
|
||||||
yield from make_error_log_records(str(error))
|
|
||||||
return
|
try:
|
||||||
|
if 'extract' in arguments or 'mount' in arguments:
|
||||||
|
validate.guard_single_repository_selected(repository, configs)
|
||||||
|
|
||||||
|
validate.guard_configuration_contains_repository(repository, configs)
|
||||||
|
except ValueError as error:
|
||||||
|
yield from log_error_records(str(error))
|
||||||
|
return
|
||||||
|
|
||||||
if not configs:
|
if not configs:
|
||||||
yield from make_error_log_records(
|
yield from log_error_records(
|
||||||
'{}: No valid configuration files found'.format(
|
'{}: No valid configuration files found'.format(
|
||||||
' '.join(arguments['global'].config_paths)
|
' '.join(arguments['global'].config_paths)
|
||||||
)
|
)
|
||||||
|
@ -787,7 +935,7 @@ def collect_configuration_run_summary_logs(configs, arguments):
|
||||||
arguments['global'].dry_run,
|
arguments['global'].dry_run,
|
||||||
)
|
)
|
||||||
except (CalledProcessError, ValueError, OSError) as error:
|
except (CalledProcessError, ValueError, OSError) as error:
|
||||||
yield from make_error_log_records('Error running pre-everything hook', error)
|
yield from log_error_records('Error running pre-everything hook', error)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Execute the actions corresponding to each configuration file.
|
# Execute the actions corresponding to each configuration file.
|
||||||
|
@ -797,7 +945,7 @@ def collect_configuration_run_summary_logs(configs, arguments):
|
||||||
error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
|
error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
|
||||||
|
|
||||||
if error_logs:
|
if error_logs:
|
||||||
yield from make_error_log_records(
|
yield from log_error_records(
|
||||||
'{}: Error running configuration file'.format(config_filename)
|
'{}: Error running configuration file'.format(config_filename)
|
||||||
)
|
)
|
||||||
yield from error_logs
|
yield from error_logs
|
||||||
|
@ -816,10 +964,10 @@ def collect_configuration_run_summary_logs(configs, arguments):
|
||||||
logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point))
|
logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point))
|
||||||
try:
|
try:
|
||||||
borg_umount.unmount_archive(
|
borg_umount.unmount_archive(
|
||||||
mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs)
|
mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs),
|
||||||
)
|
)
|
||||||
except (CalledProcessError, OSError) as error:
|
except (CalledProcessError, OSError) as error:
|
||||||
yield from make_error_log_records('Error unmounting mount point', error)
|
yield from log_error_records('Error unmounting mount point', error)
|
||||||
|
|
||||||
if json_results:
|
if json_results:
|
||||||
sys.stdout.write(json.dumps(json_results))
|
sys.stdout.write(json.dumps(json_results))
|
||||||
|
@ -836,7 +984,7 @@ def collect_configuration_run_summary_logs(configs, arguments):
|
||||||
arguments['global'].dry_run,
|
arguments['global'].dry_run,
|
||||||
)
|
)
|
||||||
except (CalledProcessError, ValueError, OSError) as error:
|
except (CalledProcessError, ValueError, OSError) as error:
|
||||||
yield from make_error_log_records('Error running post-everything hook', error)
|
yield from log_error_records('Error running post-everything hook', error)
|
||||||
|
|
||||||
|
|
||||||
def exit_with_help_link(): # pragma: no cover
|
def exit_with_help_link(): # pragma: no cover
|
||||||
|
@ -868,9 +1016,14 @@ def main(): # pragma: no cover
|
||||||
if global_arguments.version:
|
if global_arguments.version:
|
||||||
print(pkg_resources.require('borgmatic')[0].version)
|
print(pkg_resources.require('borgmatic')[0].version)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
if global_arguments.bash_completion:
|
||||||
|
print(borgmatic.commands.completion.bash_completion())
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
config_filenames = tuple(collect.collect_config_filenames(global_arguments.config_paths))
|
config_filenames = tuple(collect.collect_config_filenames(global_arguments.config_paths))
|
||||||
configs, parse_logs = load_configurations(config_filenames, global_arguments.overrides)
|
configs, parse_logs = load_configurations(
|
||||||
|
config_filenames, global_arguments.overrides, global_arguments.resolve_env
|
||||||
|
)
|
||||||
|
|
||||||
any_json_flags = any(
|
any_json_flags = any(
|
||||||
getattr(sub_arguments, 'json', False) for sub_arguments in arguments.values()
|
getattr(sub_arguments, 'json', False) for sub_arguments in arguments.values()
|
||||||
|
|
|
@ -0,0 +1,57 @@
|
||||||
|
from borgmatic.commands import arguments
|
||||||
|
|
||||||
|
UPGRADE_MESSAGE = '''
|
||||||
|
Your bash completions script is from a different version of borgmatic than is
|
||||||
|
currently installed. Please upgrade your script so your completions match the
|
||||||
|
command-line flags in your installed borgmatic! Try this to upgrade:
|
||||||
|
|
||||||
|
sudo sh -c "borgmatic --bash-completion > $BASH_SOURCE"
|
||||||
|
source $BASH_SOURCE
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
def parser_flags(parser):
|
||||||
|
'''
|
||||||
|
Given an argparse.ArgumentParser instance, return its argument flags in a space-separated
|
||||||
|
string.
|
||||||
|
'''
|
||||||
|
return ' '.join(option for action in parser._actions for option in action.option_strings)
|
||||||
|
|
||||||
|
|
||||||
|
def bash_completion():
|
||||||
|
'''
|
||||||
|
Return a bash completion script for the borgmatic command. Produce this by introspecting
|
||||||
|
borgmatic's command-line argument parsers.
|
||||||
|
'''
|
||||||
|
top_level_parser, subparsers = arguments.make_parsers()
|
||||||
|
global_flags = parser_flags(top_level_parser)
|
||||||
|
actions = ' '.join(subparsers.choices.keys())
|
||||||
|
|
||||||
|
# Avert your eyes.
|
||||||
|
return '\n'.join(
|
||||||
|
(
|
||||||
|
'check_version() {',
|
||||||
|
' local this_script="$(cat "$BASH_SOURCE" 2> /dev/null)"',
|
||||||
|
' local installed_script="$(borgmatic --bash-completion 2> /dev/null)"',
|
||||||
|
' if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ];'
|
||||||
|
' then cat << EOF\n%s\nEOF' % UPGRADE_MESSAGE,
|
||||||
|
' fi',
|
||||||
|
'}',
|
||||||
|
'complete_borgmatic() {',
|
||||||
|
)
|
||||||
|
+ tuple(
|
||||||
|
''' if [[ " ${COMP_WORDS[*]} " =~ " %s " ]]; then
|
||||||
|
COMPREPLY=($(compgen -W "%s %s %s" -- "${COMP_WORDS[COMP_CWORD]}"))
|
||||||
|
return 0
|
||||||
|
fi'''
|
||||||
|
% (action, parser_flags(subparser), actions, global_flags)
|
||||||
|
for action, subparser in subparsers.choices.items()
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))'
|
||||||
|
% (actions, global_flags),
|
||||||
|
' (check_version &)',
|
||||||
|
'}',
|
||||||
|
'\ncomplete -o bashdefault -o default -F complete_borgmatic borgmatic',
|
||||||
|
)
|
||||||
|
)
|
|
@ -23,10 +23,16 @@ def parse_arguments(*arguments):
|
||||||
'--destination',
|
'--destination',
|
||||||
dest='destination_filename',
|
dest='destination_filename',
|
||||||
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
|
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
|
||||||
help='Destination YAML configuration file. Default: {}'.format(
|
help='Destination YAML configuration file, default: {}'.format(
|
||||||
DEFAULT_DESTINATION_CONFIG_FILENAME
|
DEFAULT_DESTINATION_CONFIG_FILENAME
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--overwrite',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Whether to overwrite any existing destination file, defaults to false',
|
||||||
|
)
|
||||||
|
|
||||||
return parser.parse_args(arguments)
|
return parser.parse_args(arguments)
|
||||||
|
|
||||||
|
@ -36,7 +42,10 @@ def main(): # pragma: no cover
|
||||||
args = parse_arguments(*sys.argv[1:])
|
args = parse_arguments(*sys.argv[1:])
|
||||||
|
|
||||||
generate.generate_sample_configuration(
|
generate.generate_sample_configuration(
|
||||||
args.source_filename, args.destination_filename, validate.schema_filename()
|
args.source_filename,
|
||||||
|
args.destination_filename,
|
||||||
|
validate.schema_filename(),
|
||||||
|
overwrite=args.overwrite,
|
||||||
)
|
)
|
||||||
|
|
||||||
print('Generated a sample configuration file at {}.'.format(args.destination_filename))
|
print('Generated a sample configuration file at {}.'.format(args.destination_filename))
|
||||||
|
@ -51,8 +60,8 @@ def main(): # pragma: no cover
|
||||||
' diff --unified {} {}'.format(args.source_filename, args.destination_filename)
|
' diff --unified {} {}'.format(args.source_filename, args.destination_filename)
|
||||||
)
|
)
|
||||||
print()
|
print()
|
||||||
print('Please edit the file to suit your needs. The values are representative.')
|
print('This includes all available configuration options with example values. The few')
|
||||||
print('All fields are optional except where indicated.')
|
print('required options are indicated. Please edit the file to suit your needs.')
|
||||||
print()
|
print()
|
||||||
print('If you ever need help: https://torsion.org/borgmatic/#issues')
|
print('If you ever need help: https://torsion.org/borgmatic/#issues')
|
||||||
except (ValueError, OSError) as error:
|
except (ValueError, OSError) as error:
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
_VARIABLE_PATTERN = re.compile(
|
||||||
|
r'(?P<escape>\\)?(?P<variable>\$\{(?P<name>[A-Za-z0-9_]+)((:?-)(?P<default>[^}]+))?\})'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_string(matcher):
|
||||||
|
'''
|
||||||
|
Get the value from environment given a matcher containing a name and an optional default value.
|
||||||
|
If the variable is not defined in environment and no default value is provided, an Error is raised.
|
||||||
|
'''
|
||||||
|
if matcher.group('escape') is not None:
|
||||||
|
# in case of escaped envvar, unescape it
|
||||||
|
return matcher.group('variable')
|
||||||
|
# resolve the env var
|
||||||
|
name, default = matcher.group('name'), matcher.group('default')
|
||||||
|
out = os.getenv(name, default=default)
|
||||||
|
if out is None:
|
||||||
|
raise ValueError('Cannot find variable ${name} in environment'.format(name=name))
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_env_variables(item):
|
||||||
|
'''
|
||||||
|
Resolves variables like or ${FOO} from given configuration with values from process environment
|
||||||
|
Supported formats:
|
||||||
|
- ${FOO} will return FOO env variable
|
||||||
|
- ${FOO-bar} or ${FOO:-bar} will return FOO env variable if it exists, else "bar"
|
||||||
|
|
||||||
|
If any variable is missing in environment and no default value is provided, an Error is raised.
|
||||||
|
'''
|
||||||
|
if isinstance(item, str):
|
||||||
|
return _VARIABLE_PATTERN.sub(_resolve_string, item)
|
||||||
|
if isinstance(item, list):
|
||||||
|
for i, subitem in enumerate(item):
|
||||||
|
item[i] = resolve_env_variables(subitem)
|
||||||
|
if isinstance(item, dict):
|
||||||
|
for key, value in item.items():
|
||||||
|
item[key] = resolve_env_variables(value)
|
||||||
|
return item
|
|
@ -5,7 +5,7 @@ import re
|
||||||
|
|
||||||
from ruamel import yaml
|
from ruamel import yaml
|
||||||
|
|
||||||
from borgmatic.config import load
|
from borgmatic.config import load, normalize
|
||||||
|
|
||||||
INDENT = 4
|
INDENT = 4
|
||||||
SEQUENCE_INDENT = 2
|
SEQUENCE_INDENT = 2
|
||||||
|
@ -109,13 +109,18 @@ def render_configuration(config):
|
||||||
return rendered.getvalue()
|
return rendered.getvalue()
|
||||||
|
|
||||||
|
|
||||||
def write_configuration(config_filename, rendered_config, mode=0o600):
|
def write_configuration(config_filename, rendered_config, mode=0o600, overwrite=False):
|
||||||
'''
|
'''
|
||||||
Given a target config filename and rendered config YAML, write it out to file. Create any
|
Given a target config filename and rendered config YAML, write it out to file. Create any
|
||||||
containing directories as needed.
|
containing directories as needed. But if the file already exists and overwrite is False,
|
||||||
|
abort before writing anything.
|
||||||
'''
|
'''
|
||||||
if os.path.exists(config_filename):
|
if not overwrite and os.path.exists(config_filename):
|
||||||
raise FileExistsError('{} already exists. Aborting.'.format(config_filename))
|
raise FileExistsError(
|
||||||
|
'{} already exists. Aborting. Use --overwrite to replace the file.'.format(
|
||||||
|
config_filename
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.makedirs(os.path.dirname(config_filename), mode=0o700)
|
os.makedirs(os.path.dirname(config_filename), mode=0o700)
|
||||||
|
@ -263,18 +268,22 @@ def merge_source_configuration_into_destination(destination_config, source_confi
|
||||||
return destination_config
|
return destination_config
|
||||||
|
|
||||||
|
|
||||||
def generate_sample_configuration(source_filename, destination_filename, schema_filename):
|
def generate_sample_configuration(
|
||||||
|
source_filename, destination_filename, schema_filename, overwrite=False
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Given an optional source configuration filename, and a required destination configuration
|
Given an optional source configuration filename, and a required destination configuration
|
||||||
filename, and the path to a schema filename in a YAML rendition of the JSON Schema format,
|
filename, the path to a schema filename in a YAML rendition of the JSON Schema format, and
|
||||||
write out a sample configuration file based on that schema. If a source filename is provided,
|
whether to overwrite a destination file, write out a sample configuration file based on that
|
||||||
merge the parsed contents of that configuration into the generated configuration.
|
schema. If a source filename is provided, merge the parsed contents of that configuration into
|
||||||
|
the generated configuration.
|
||||||
'''
|
'''
|
||||||
schema = yaml.round_trip_load(open(schema_filename))
|
schema = yaml.round_trip_load(open(schema_filename))
|
||||||
source_config = None
|
source_config = None
|
||||||
|
|
||||||
if source_filename:
|
if source_filename:
|
||||||
source_config = load.load_configuration(source_filename)
|
source_config = load.load_configuration(source_filename)
|
||||||
|
normalize.normalize(source_filename, source_config)
|
||||||
|
|
||||||
destination_config = merge_source_configuration_into_destination(
|
destination_config = merge_source_configuration_into_destination(
|
||||||
_schema_to_sample_configuration(schema), source_config
|
_schema_to_sample_configuration(schema), source_config
|
||||||
|
@ -283,4 +292,5 @@ def generate_sample_configuration(source_filename, destination_filename, schema_
|
||||||
write_configuration(
|
write_configuration(
|
||||||
destination_filename,
|
destination_filename,
|
||||||
_comment_out_optional_configuration(render_configuration(destination_config)),
|
_comment_out_optional_configuration(render_configuration(destination_config)),
|
||||||
|
overwrite=overwrite,
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -6,6 +7,77 @@ import ruamel.yaml
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def include_configuration(loader, filename_node, include_directory):
|
||||||
|
'''
|
||||||
|
Given a ruamel.yaml.loader.Loader, a ruamel.yaml.serializer.ScalarNode containing the included
|
||||||
|
filename, and an include directory path to search for matching files, load the given YAML
|
||||||
|
filename (ignoring the given loader so we can use our own) and return its contents as a data
|
||||||
|
structure of nested dicts and lists. If the filename is relative, probe for it within 1. the
|
||||||
|
current working directory and 2. the given include directory.
|
||||||
|
|
||||||
|
Raise FileNotFoundError if an included file was not found.
|
||||||
|
'''
|
||||||
|
include_directories = [os.getcwd(), os.path.abspath(include_directory)]
|
||||||
|
include_filename = os.path.expanduser(filename_node.value)
|
||||||
|
|
||||||
|
if not os.path.isabs(include_filename):
|
||||||
|
candidate_filenames = [
|
||||||
|
os.path.join(directory, include_filename) for directory in include_directories
|
||||||
|
]
|
||||||
|
|
||||||
|
for candidate_filename in candidate_filenames:
|
||||||
|
if os.path.exists(candidate_filename):
|
||||||
|
include_filename = candidate_filename
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise FileNotFoundError(
|
||||||
|
f'Could not find include {filename_node.value} at {" or ".join(candidate_filenames)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
return load_configuration(include_filename)
|
||||||
|
|
||||||
|
|
||||||
|
class Include_constructor(ruamel.yaml.SafeConstructor):
|
||||||
|
'''
|
||||||
|
A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including
|
||||||
|
separate YAML configuration files. Example syntax: `retention: !include common.yaml`
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, preserve_quotes=None, loader=None, include_directory=None):
|
||||||
|
super(Include_constructor, self).__init__(preserve_quotes, loader)
|
||||||
|
self.add_constructor(
|
||||||
|
'!include',
|
||||||
|
functools.partial(include_configuration, include_directory=include_directory),
|
||||||
|
)
|
||||||
|
|
||||||
|
def flatten_mapping(self, node):
|
||||||
|
'''
|
||||||
|
Support the special case of deep merging included configuration into an existing mapping
|
||||||
|
using the YAML '<<' merge key. Example syntax:
|
||||||
|
|
||||||
|
```
|
||||||
|
retention:
|
||||||
|
keep_daily: 1
|
||||||
|
|
||||||
|
<<: !include common.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
These includes are deep merged into the current configuration file. For instance, in this
|
||||||
|
example, any "retention" options in common.yaml will get merged into the "retention" section
|
||||||
|
in the example configuration file.
|
||||||
|
'''
|
||||||
|
representer = ruamel.yaml.representer.SafeRepresenter()
|
||||||
|
|
||||||
|
for index, (key_node, value_node) in enumerate(node.value):
|
||||||
|
if key_node.tag == u'tag:yaml.org,2002:merge' and value_node.tag == '!include':
|
||||||
|
included_value = representer.represent_data(self.construct_object(value_node))
|
||||||
|
node.value[index] = (key_node, included_value)
|
||||||
|
|
||||||
|
super(Include_constructor, self).flatten_mapping(node)
|
||||||
|
|
||||||
|
node.value = deep_merge_nodes(node.value)
|
||||||
|
|
||||||
|
|
||||||
def load_configuration(filename):
|
def load_configuration(filename):
|
||||||
'''
|
'''
|
||||||
Load the given configuration file and return its contents as a data structure of nested dicts
|
Load the given configuration file and return its contents as a data structure of nested dicts
|
||||||
|
@ -14,46 +86,131 @@ def load_configuration(filename):
|
||||||
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
|
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
|
||||||
if there are too many recursive includes.
|
if there are too many recursive includes.
|
||||||
'''
|
'''
|
||||||
|
# Use an embedded derived class for the include constructor so as to capture the filename
|
||||||
|
# value. (functools.partial doesn't work for this use case because yaml.Constructor has to be
|
||||||
|
# an actual class.)
|
||||||
|
class Include_constructor_with_include_directory(Include_constructor):
|
||||||
|
def __init__(self, preserve_quotes=None, loader=None):
|
||||||
|
super(Include_constructor_with_include_directory, self).__init__(
|
||||||
|
preserve_quotes, loader, include_directory=os.path.dirname(filename)
|
||||||
|
)
|
||||||
|
|
||||||
yaml = ruamel.yaml.YAML(typ='safe')
|
yaml = ruamel.yaml.YAML(typ='safe')
|
||||||
yaml.Constructor = Include_constructor
|
yaml.Constructor = Include_constructor_with_include_directory
|
||||||
|
|
||||||
return yaml.load(open(filename))
|
return yaml.load(open(filename))
|
||||||
|
|
||||||
|
|
||||||
def include_configuration(loader, filename_node):
|
DELETED_NODE = object()
|
||||||
|
|
||||||
|
|
||||||
|
def deep_merge_nodes(nodes):
|
||||||
'''
|
'''
|
||||||
Load the given YAML filename (ignoring the given loader so we can use our own), and return its
|
Given a nested borgmatic configuration data structure as a list of tuples in the form of:
|
||||||
contents as a data structure of nested dicts and lists.
|
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode as a key,
|
||||||
|
ruamel.yaml.nodes.MappingNode or other Node as a value,
|
||||||
|
),
|
||||||
|
|
||||||
|
... deep merge any node values corresponding to duplicate keys and return the result. If
|
||||||
|
there are colliding keys with non-MappingNode values (e.g., integers or strings), the last
|
||||||
|
of the values wins.
|
||||||
|
|
||||||
|
For instance, given node values of:
|
||||||
|
|
||||||
|
[
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_hourly'),
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:int', value='24')
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'),
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:int', value='7')
|
||||||
|
),
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'),
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:int', value='5')
|
||||||
|
),
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
... the returned result would be:
|
||||||
|
|
||||||
|
[
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_hourly'),
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:int', value='24')
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'),
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:int', value='5')
|
||||||
|
),
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
The purpose of deep merging like this is to support, for instance, merging one borgmatic
|
||||||
|
configuration file into another for reuse, such that a configuration section ("retention",
|
||||||
|
etc.) does not completely replace the corresponding section in a merged file.
|
||||||
'''
|
'''
|
||||||
return load_configuration(os.path.expanduser(filename_node.value))
|
# Map from original node key/value to the replacement merged node. DELETED_NODE as a replacement
|
||||||
|
# node indications deletion.
|
||||||
|
replaced_nodes = {}
|
||||||
|
|
||||||
|
# To find nodes that require merging, compare each node with each other node.
|
||||||
|
for a_key, a_value in nodes:
|
||||||
|
for b_key, b_value in nodes:
|
||||||
|
# If we've already considered one of the nodes for merging, skip it.
|
||||||
|
if (a_key, a_value) in replaced_nodes or (b_key, b_value) in replaced_nodes:
|
||||||
|
continue
|
||||||
|
|
||||||
class Include_constructor(ruamel.yaml.SafeConstructor):
|
# If the keys match and the values are different, we need to merge these two A and B nodes.
|
||||||
'''
|
if a_key.tag == b_key.tag and a_key.value == b_key.value and a_value != b_value:
|
||||||
A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including
|
# Since we're merging into the B node, consider the A node a duplicate and remove it.
|
||||||
separate YAML configuration files. Example syntax: `retention: !include common.yaml`
|
replaced_nodes[(a_key, a_value)] = DELETED_NODE
|
||||||
'''
|
|
||||||
|
|
||||||
def __init__(self, preserve_quotes=None, loader=None):
|
# If we're dealing with MappingNodes, recurse and merge its values as well.
|
||||||
super(Include_constructor, self).__init__(preserve_quotes, loader)
|
if isinstance(b_value, ruamel.yaml.nodes.MappingNode):
|
||||||
self.add_constructor('!include', include_configuration)
|
replaced_nodes[(b_key, b_value)] = (
|
||||||
|
b_key,
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag=b_value.tag,
|
||||||
|
value=deep_merge_nodes(a_value.value + b_value.value),
|
||||||
|
start_mark=b_value.start_mark,
|
||||||
|
end_mark=b_value.end_mark,
|
||||||
|
flow_style=b_value.flow_style,
|
||||||
|
comment=b_value.comment,
|
||||||
|
anchor=b_value.anchor,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
# If we're dealing with SequenceNodes, merge by appending one sequence to the other.
|
||||||
|
elif isinstance(b_value, ruamel.yaml.nodes.SequenceNode):
|
||||||
|
replaced_nodes[(b_key, b_value)] = (
|
||||||
|
b_key,
|
||||||
|
ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag=b_value.tag,
|
||||||
|
value=a_value.value + b_value.value,
|
||||||
|
start_mark=b_value.start_mark,
|
||||||
|
end_mark=b_value.end_mark,
|
||||||
|
flow_style=b_value.flow_style,
|
||||||
|
comment=b_value.comment,
|
||||||
|
anchor=b_value.anchor,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
def flatten_mapping(self, node):
|
return [
|
||||||
'''
|
replaced_nodes.get(node, node) for node in nodes if replaced_nodes.get(node) != DELETED_NODE
|
||||||
Support the special case of shallow merging included configuration into an existing mapping
|
]
|
||||||
using the YAML '<<' merge key. Example syntax:
|
|
||||||
|
|
||||||
```
|
|
||||||
retention:
|
|
||||||
keep_daily: 1
|
|
||||||
<<: !include common.yaml
|
|
||||||
```
|
|
||||||
'''
|
|
||||||
representer = ruamel.yaml.representer.SafeRepresenter()
|
|
||||||
|
|
||||||
for index, (key_node, value_node) in enumerate(node.value):
|
|
||||||
if key_node.tag == u'tag:yaml.org,2002:merge' and value_node.tag == '!include':
|
|
||||||
included_value = representer.represent_data(self.construct_object(value_node))
|
|
||||||
node.value[index] = (key_node, included_value)
|
|
||||||
|
|
||||||
super(Include_constructor, self).flatten_mapping(node)
|
|
||||||
|
|
|
@ -1,10 +1,88 @@
|
||||||
def normalize(config):
|
import logging
|
||||||
'''
|
|
||||||
Given a configuration dict, apply particular hard-coded rules to normalize its contents to
|
|
||||||
adhere to the configuration schema.
|
|
||||||
'''
|
|
||||||
exclude_if_present = config.get('location', {}).get('exclude_if_present')
|
|
||||||
|
|
||||||
# "Upgrade" exclude_if_present from a string to a list.
|
|
||||||
|
def normalize(config_filename, config):
|
||||||
|
'''
|
||||||
|
Given a configuration filename and a configuration dict of its loaded contents, apply particular
|
||||||
|
hard-coded rules to normalize the configuration to adhere to the current schema. Return any log
|
||||||
|
message warnings produced based on the normalization performed.
|
||||||
|
'''
|
||||||
|
logs = []
|
||||||
|
location = config.get('location') or {}
|
||||||
|
storage = config.get('storage') or {}
|
||||||
|
consistency = config.get('consistency') or {}
|
||||||
|
hooks = config.get('hooks') or {}
|
||||||
|
|
||||||
|
# Upgrade exclude_if_present from a string to a list.
|
||||||
|
exclude_if_present = location.get('exclude_if_present')
|
||||||
if isinstance(exclude_if_present, str):
|
if isinstance(exclude_if_present, str):
|
||||||
config['location']['exclude_if_present'] = [exclude_if_present]
|
config['location']['exclude_if_present'] = [exclude_if_present]
|
||||||
|
|
||||||
|
# Upgrade various monitoring hooks from a string to a dict.
|
||||||
|
healthchecks = hooks.get('healthchecks')
|
||||||
|
if isinstance(healthchecks, str):
|
||||||
|
config['hooks']['healthchecks'] = {'ping_url': healthchecks}
|
||||||
|
|
||||||
|
cronitor = hooks.get('cronitor')
|
||||||
|
if isinstance(cronitor, str):
|
||||||
|
config['hooks']['cronitor'] = {'ping_url': cronitor}
|
||||||
|
|
||||||
|
pagerduty = hooks.get('pagerduty')
|
||||||
|
if isinstance(pagerduty, str):
|
||||||
|
config['hooks']['pagerduty'] = {'integration_key': pagerduty}
|
||||||
|
|
||||||
|
cronhub = hooks.get('cronhub')
|
||||||
|
if isinstance(cronhub, str):
|
||||||
|
config['hooks']['cronhub'] = {'ping_url': cronhub}
|
||||||
|
|
||||||
|
# Upgrade consistency checks from a list of strings to a list of dicts.
|
||||||
|
checks = consistency.get('checks')
|
||||||
|
if isinstance(checks, list) and len(checks) and isinstance(checks[0], str):
|
||||||
|
config['consistency']['checks'] = [{'name': check_type} for check_type in checks]
|
||||||
|
|
||||||
|
# Rename various configuration options.
|
||||||
|
numeric_owner = location.pop('numeric_owner', None)
|
||||||
|
if numeric_owner is not None:
|
||||||
|
config['location']['numeric_ids'] = numeric_owner
|
||||||
|
|
||||||
|
bsd_flags = location.pop('bsd_flags', None)
|
||||||
|
if bsd_flags is not None:
|
||||||
|
config['location']['flags'] = bsd_flags
|
||||||
|
|
||||||
|
remote_rate_limit = storage.pop('remote_rate_limit', None)
|
||||||
|
if remote_rate_limit is not None:
|
||||||
|
config['storage']['upload_rate_limit'] = remote_rate_limit
|
||||||
|
|
||||||
|
# Upgrade remote repositories to ssh:// syntax, required in Borg 2.
|
||||||
|
repositories = location.get('repositories')
|
||||||
|
if repositories:
|
||||||
|
config['location']['repositories'] = []
|
||||||
|
for repository in repositories:
|
||||||
|
if '~' in repository:
|
||||||
|
logs.append(
|
||||||
|
logging.makeLogRecord(
|
||||||
|
dict(
|
||||||
|
levelno=logging.WARNING,
|
||||||
|
levelname='WARNING',
|
||||||
|
msg=f'{config_filename}: Repository paths containing "~" are deprecated in borgmatic and no longer work in Borg 2.x+.',
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if ':' in repository and not repository.startswith('ssh://'):
|
||||||
|
rewritten_repository = (
|
||||||
|
f"ssh://{repository.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}"
|
||||||
|
)
|
||||||
|
logs.append(
|
||||||
|
logging.makeLogRecord(
|
||||||
|
dict(
|
||||||
|
levelno=logging.WARNING,
|
||||||
|
levelname='WARNING',
|
||||||
|
msg=f'{config_filename}: Remote repository paths without ssh:// syntax are deprecated. Interpreting "{repository}" as "{rewritten_repository}"',
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
config['location']['repositories'].append(rewritten_repository)
|
||||||
|
else:
|
||||||
|
config['location']['repositories'].append(repository)
|
||||||
|
|
||||||
|
return logs
|
||||||
|
|
|
@ -52,22 +52,26 @@ def parse_overrides(raw_overrides):
|
||||||
if not raw_overrides:
|
if not raw_overrides:
|
||||||
return ()
|
return ()
|
||||||
|
|
||||||
try:
|
parsed_overrides = []
|
||||||
return tuple(
|
|
||||||
(tuple(raw_keys.split('.')), convert_value_type(value))
|
for raw_override in raw_overrides:
|
||||||
for raw_override in raw_overrides
|
try:
|
||||||
for raw_keys, value in (raw_override.split('=', 1),)
|
raw_keys, value = raw_override.split('=', 1)
|
||||||
)
|
parsed_overrides.append((tuple(raw_keys.split('.')), convert_value_type(value),))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError('Invalid override. Make sure you use the form: SECTION.OPTION=VALUE')
|
raise ValueError(
|
||||||
except ruamel.yaml.error.YAMLError as error:
|
f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE"
|
||||||
raise ValueError(f'Invalid override value: {error}')
|
)
|
||||||
|
except ruamel.yaml.error.YAMLError as error:
|
||||||
|
raise ValueError(f"Invalid override '{raw_override}': {error.problem}")
|
||||||
|
|
||||||
|
return tuple(parsed_overrides)
|
||||||
|
|
||||||
|
|
||||||
def apply_overrides(config, raw_overrides):
|
def apply_overrides(config, raw_overrides):
|
||||||
'''
|
'''
|
||||||
Given a sequence of configuration file override strings in the form of "section.option=value"
|
Given a configuration dict and a sequence of configuration file override strings in the form of
|
||||||
and a configuration dict, parse each override and set it the configuration dict.
|
"section.option=value", parse each override and set it the configuration dict.
|
||||||
'''
|
'''
|
||||||
overrides = parse_overrides(raw_overrides)
|
overrides = parse_overrides(raw_overrides)
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,6 @@ properties:
|
||||||
https://borgbackup.readthedocs.io/en/stable/usage/create.html
|
https://borgbackup.readthedocs.io/en/stable/usage/create.html
|
||||||
for details.
|
for details.
|
||||||
required:
|
required:
|
||||||
- source_directories
|
|
||||||
- repositories
|
- repositories
|
||||||
additionalProperties: false
|
additionalProperties: false
|
||||||
properties:
|
properties:
|
||||||
|
@ -20,8 +19,8 @@ properties:
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of source directories to backup (required). Globs and
|
List of source directories to backup. Globs and tildes are
|
||||||
tildes are expanded. Do not backslash spaces in path names.
|
expanded. Do not backslash spaces in path names.
|
||||||
example:
|
example:
|
||||||
- /home
|
- /home
|
||||||
- /etc
|
- /etc
|
||||||
|
@ -40,8 +39,9 @@ properties:
|
||||||
is used, then add local repository paths in the systemd
|
is used, then add local repository paths in the systemd
|
||||||
service file to the ReadWritePaths list.
|
service file to the ReadWritePaths list.
|
||||||
example:
|
example:
|
||||||
- user@backupserver:sourcehostname.borg
|
- ssh://user@backupserver/./sourcehostname.borg
|
||||||
- "user@backupserver:{fqdn}"
|
- ssh://user@backupserver/./{fqdn}
|
||||||
|
- /var/local/backups/local.borg
|
||||||
working_directory:
|
working_directory:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
|
@ -53,12 +53,12 @@ properties:
|
||||||
one_file_system:
|
one_file_system:
|
||||||
type: boolean
|
type: boolean
|
||||||
description: |
|
description: |
|
||||||
Stay in same file system (do not cross mount points).
|
Stay in same file system: do not cross mount points beyond
|
||||||
Defaults to false. But when a database hook is used, the
|
the given source directories. Defaults to false. But when a
|
||||||
setting here is ignored and one_file_system is considered
|
database hook is used, the setting here is ignored and
|
||||||
true.
|
one_file_system is considered true.
|
||||||
example: true
|
example: true
|
||||||
numeric_owner:
|
numeric_ids:
|
||||||
type: boolean
|
type: boolean
|
||||||
description: |
|
description: |
|
||||||
Only store/extract numeric user and group identifiers.
|
Only store/extract numeric user and group identifiers.
|
||||||
|
@ -90,10 +90,10 @@ properties:
|
||||||
used, the setting here is ignored and read_special is
|
used, the setting here is ignored and read_special is
|
||||||
considered true.
|
considered true.
|
||||||
example: false
|
example: false
|
||||||
bsd_flags:
|
flags:
|
||||||
type: boolean
|
type: boolean
|
||||||
description: |
|
description: |
|
||||||
Record bsdflags (e.g. NODUMP, IMMUTABLE) in archive.
|
Record filesystem flags (e.g. NODUMP, IMMUTABLE) in archive.
|
||||||
Defaults to true.
|
Defaults to true.
|
||||||
example: true
|
example: true
|
||||||
files_cache:
|
files_cache:
|
||||||
|
@ -122,7 +122,8 @@ properties:
|
||||||
backups. Globs are expanded. (Tildes are not.) See the
|
backups. Globs are expanded. (Tildes are not.) See the
|
||||||
output of "borg help patterns" for more details. Quote any
|
output of "borg help patterns" for more details. Quote any
|
||||||
value if it contains leading punctuation, so it parses
|
value if it contains leading punctuation, so it parses
|
||||||
correctly.
|
correctly. Note that only one of "patterns" and
|
||||||
|
"source_directories" may be used.
|
||||||
example:
|
example:
|
||||||
- 'R /'
|
- 'R /'
|
||||||
- '- /home/*/.cache'
|
- '- /home/*/.cache'
|
||||||
|
@ -145,10 +146,10 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
Any paths matching these patterns are excluded from backups.
|
Any paths matching these patterns are excluded from backups.
|
||||||
Globs and tildes are expanded. (Note however that a glob
|
Globs and tildes are expanded. Note that a glob pattern must
|
||||||
pattern must either start with a glob or be an absolute
|
either start with a glob or be an absolute path. Do not
|
||||||
path.) Do not backslash spaces in path names. See the output
|
backslash spaces in path names. See the output of "borg help
|
||||||
of "borg help patterns" for more details.
|
patterns" for more details.
|
||||||
example:
|
example:
|
||||||
- '*.pyc'
|
- '*.pyc'
|
||||||
- /home/*/.cache
|
- /home/*/.cache
|
||||||
|
@ -255,7 +256,7 @@ properties:
|
||||||
http://borgbackup.readthedocs.io/en/stable/usage/create.html
|
http://borgbackup.readthedocs.io/en/stable/usage/create.html
|
||||||
for details. Defaults to "lz4".
|
for details. Defaults to "lz4".
|
||||||
example: lz4
|
example: lz4
|
||||||
remote_rate_limit:
|
upload_rate_limit:
|
||||||
type: integer
|
type: integer
|
||||||
description: |
|
description: |
|
||||||
Remote network upload rate limit in kiBytes/second. Defaults
|
Remote network upload rate limit in kiBytes/second. Defaults
|
||||||
|
@ -332,10 +333,10 @@ properties:
|
||||||
Name of the archive. Borg placeholders can be used. See the
|
Name of the archive. Borg placeholders can be used. See the
|
||||||
output of "borg help placeholders" for details. Defaults to
|
output of "borg help placeholders" for details. Defaults to
|
||||||
"{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". If you specify this
|
"{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". If you specify this
|
||||||
option, you must also specify a prefix in the retention
|
option, consider also specifying a prefix in the retention
|
||||||
section to avoid accidental pruning of archives with a
|
and consistency sections to avoid accidental
|
||||||
different archive name format. And you should also specify a
|
pruning/checking of archives with different archive name
|
||||||
prefix in the consistency section as well.
|
formats.
|
||||||
example: "{hostname}-documents-{now}"
|
example: "{hostname}-documents-{now}"
|
||||||
relocated_repo_access_is_ok:
|
relocated_repo_access_is_ok:
|
||||||
type: boolean
|
type: boolean
|
||||||
|
@ -447,26 +448,45 @@ properties:
|
||||||
checks:
|
checks:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
type: string
|
type: object
|
||||||
enum:
|
required: ['name']
|
||||||
- repository
|
additionalProperties: false
|
||||||
- archives
|
properties:
|
||||||
- data
|
name:
|
||||||
- extract
|
type: string
|
||||||
- disabled
|
enum:
|
||||||
uniqueItems: true
|
- repository
|
||||||
|
- archives
|
||||||
|
- data
|
||||||
|
- extract
|
||||||
|
- disabled
|
||||||
|
description: |
|
||||||
|
Name of consistency check to run: "repository",
|
||||||
|
"archives", "data", and/or "extract". Set to
|
||||||
|
"disabled" to disable all consistency checks.
|
||||||
|
"repository" checks the consistency of the
|
||||||
|
repository, "archives" checks all of the
|
||||||
|
archives, "data" verifies the integrity of the
|
||||||
|
data within the archives, and "extract" does an
|
||||||
|
extraction dry-run of the most recent archive.
|
||||||
|
Note that "data" implies "archives".
|
||||||
|
example: repository
|
||||||
|
frequency:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
How frequently to run this type of consistency
|
||||||
|
check (as a best effort). The value is a number
|
||||||
|
followed by a unit of time. E.g., "2 weeks" to
|
||||||
|
run this consistency check no more than every
|
||||||
|
two weeks for a given repository or "1 month" to
|
||||||
|
run it no more than monthly. Defaults to
|
||||||
|
"always": running this check every time checks
|
||||||
|
are run.
|
||||||
|
example: 2 weeks
|
||||||
description: |
|
description: |
|
||||||
List of one or more consistency checks to run: "repository",
|
List of one or more consistency checks to run on a periodic
|
||||||
"archives", "data", and/or "extract". Defaults to
|
basis (if "frequency" is set) or every time borgmatic runs
|
||||||
"repository" and "archives". Set to "disabled" to disable
|
checks (if "frequency" is omitted).
|
||||||
all consistency checks. "repository" checks the consistency
|
|
||||||
of the repository, "archives" checks all of the archives,
|
|
||||||
"data" verifies the integrity of the data within the
|
|
||||||
archives, and "extract" does an extraction dry-run of the
|
|
||||||
most recent archive. Note that "data" implies "archives".
|
|
||||||
example:
|
|
||||||
- repository
|
|
||||||
- archives
|
|
||||||
check_repositories:
|
check_repositories:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
|
@ -519,13 +539,22 @@ properties:
|
||||||
prevent potential shell injection or privilege escalation.
|
prevent potential shell injection or privilege escalation.
|
||||||
additionalProperties: false
|
additionalProperties: false
|
||||||
properties:
|
properties:
|
||||||
|
before_actions:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
List of one or more shell commands or scripts to execute
|
||||||
|
before all the actions for each repository.
|
||||||
|
example:
|
||||||
|
- echo "Starting actions."
|
||||||
before_backup:
|
before_backup:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of one or more shell commands or scripts to execute
|
List of one or more shell commands or scripts to execute
|
||||||
before creating a backup, run once per configuration file.
|
before creating a backup, run once per repository.
|
||||||
example:
|
example:
|
||||||
- echo "Starting a backup."
|
- echo "Starting a backup."
|
||||||
before_prune:
|
before_prune:
|
||||||
|
@ -534,7 +563,7 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of one or more shell commands or scripts to execute
|
List of one or more shell commands or scripts to execute
|
||||||
before pruning, run once per configuration file.
|
before pruning, run once per repository.
|
||||||
example:
|
example:
|
||||||
- echo "Starting pruning."
|
- echo "Starting pruning."
|
||||||
before_compact:
|
before_compact:
|
||||||
|
@ -543,7 +572,7 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of one or more shell commands or scripts to execute
|
List of one or more shell commands or scripts to execute
|
||||||
before compaction, run once per configuration file.
|
before compaction, run once per repository.
|
||||||
example:
|
example:
|
||||||
- echo "Starting compaction."
|
- echo "Starting compaction."
|
||||||
before_check:
|
before_check:
|
||||||
|
@ -552,7 +581,7 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of one or more shell commands or scripts to execute
|
List of one or more shell commands or scripts to execute
|
||||||
before consistency checks, run once per configuration file.
|
before consistency checks, run once per repository.
|
||||||
example:
|
example:
|
||||||
- echo "Starting checks."
|
- echo "Starting checks."
|
||||||
before_extract:
|
before_extract:
|
||||||
|
@ -561,7 +590,7 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of one or more shell commands or scripts to execute
|
List of one or more shell commands or scripts to execute
|
||||||
before extracting a backup, run once per configuration file.
|
before extracting a backup, run once per repository.
|
||||||
example:
|
example:
|
||||||
- echo "Starting extracting."
|
- echo "Starting extracting."
|
||||||
after_backup:
|
after_backup:
|
||||||
|
@ -570,7 +599,7 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of one or more shell commands or scripts to execute
|
List of one or more shell commands or scripts to execute
|
||||||
after creating a backup, run once per configuration file.
|
after creating a backup, run once per repository.
|
||||||
example:
|
example:
|
||||||
- echo "Finished a backup."
|
- echo "Finished a backup."
|
||||||
after_compact:
|
after_compact:
|
||||||
|
@ -579,7 +608,7 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of one or more shell commands or scripts to execute
|
List of one or more shell commands or scripts to execute
|
||||||
after compaction, run once per configuration file.
|
after compaction, run once per repository.
|
||||||
example:
|
example:
|
||||||
- echo "Finished compaction."
|
- echo "Finished compaction."
|
||||||
after_prune:
|
after_prune:
|
||||||
|
@ -588,7 +617,7 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of one or more shell commands or scripts to execute
|
List of one or more shell commands or scripts to execute
|
||||||
after pruning, run once per configuration file.
|
after pruning, run once per repository.
|
||||||
example:
|
example:
|
||||||
- echo "Finished pruning."
|
- echo "Finished pruning."
|
||||||
after_check:
|
after_check:
|
||||||
|
@ -597,7 +626,7 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of one or more shell commands or scripts to execute
|
List of one or more shell commands or scripts to execute
|
||||||
after consistency checks, run once per configuration file.
|
after consistency checks, run once per repository.
|
||||||
example:
|
example:
|
||||||
- echo "Finished checks."
|
- echo "Finished checks."
|
||||||
after_extract:
|
after_extract:
|
||||||
|
@ -606,9 +635,18 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
List of one or more shell commands or scripts to execute
|
List of one or more shell commands or scripts to execute
|
||||||
after extracting a backup, run once per configuration file.
|
after extracting a backup, run once per repository.
|
||||||
example:
|
example:
|
||||||
- echo "Finished extracting."
|
- echo "Finished extracting."
|
||||||
|
after_actions:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
List of one or more shell commands or scripts to execute
|
||||||
|
after all actions for each repository.
|
||||||
|
example:
|
||||||
|
- echo "Finished actions."
|
||||||
on_error:
|
on_error:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
|
@ -653,10 +691,13 @@ properties:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
Database name (required if using this hook). Or
|
Database name (required if using this hook). Or
|
||||||
"all" to dump all databases on the host. Note
|
"all" to dump all databases on the host. (Also
|
||||||
that using this database hook implicitly enables
|
set the "format" to dump each database to a
|
||||||
both read_special and one_file_system (see
|
separate file instead of one combined file.)
|
||||||
above) to support dump and restore streaming.
|
Note that using this database hook implicitly
|
||||||
|
enables both read_special and one_file_system
|
||||||
|
(see above) to support dump and restore
|
||||||
|
streaming.
|
||||||
example: users
|
example: users
|
||||||
hostname:
|
hostname:
|
||||||
type: string
|
type: string
|
||||||
|
@ -691,9 +732,14 @@ properties:
|
||||||
description: |
|
description: |
|
||||||
Database dump output format. One of "plain",
|
Database dump output format. One of "plain",
|
||||||
"custom", "directory", or "tar". Defaults to
|
"custom", "directory", or "tar". Defaults to
|
||||||
"custom" (unlike raw pg_dump). See pg_dump
|
"custom" (unlike raw pg_dump) for a single
|
||||||
documentation for details. Note that format is
|
database. Or, when database name is "all" and
|
||||||
ignored when the database name is "all".
|
format is blank, dumps all databases to a single
|
||||||
|
file. But if a format is specified with an "all"
|
||||||
|
database name, dumps each database to a separate
|
||||||
|
file of that format, allowing more convenient
|
||||||
|
restores of individual databases. See the
|
||||||
|
pg_dump documentation for more about formats.
|
||||||
example: directory
|
example: directory
|
||||||
ssl_mode:
|
ssl_mode:
|
||||||
type: string
|
type: string
|
||||||
|
@ -726,6 +772,32 @@ properties:
|
||||||
description: |
|
description: |
|
||||||
Path to a certificate revocation list.
|
Path to a certificate revocation list.
|
||||||
example: "/root/.postgresql/root.crl"
|
example: "/root/.postgresql/root.crl"
|
||||||
|
pg_dump_command:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
Command to use instead of "pg_dump" or
|
||||||
|
"pg_dumpall". This can be used to run a specific
|
||||||
|
pg_dump version (e.g., one inside a running
|
||||||
|
docker container). Defaults to "pg_dump" for
|
||||||
|
single database dump or "pg_dumpall" to dump
|
||||||
|
all databases.
|
||||||
|
example: docker exec my_pg_container pg_dump
|
||||||
|
pg_restore_command:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
Command to use instead of "pg_restore". This
|
||||||
|
can be used to run a specific pg_restore
|
||||||
|
version (e.g., one inside a running docker
|
||||||
|
container). Defaults to "pg_restore".
|
||||||
|
example: docker exec my_pg_container pg_restore
|
||||||
|
psql_command:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
Command to use instead of "psql". This can be
|
||||||
|
used to run a specific psql version (e.g.,
|
||||||
|
one inside a running docker container).
|
||||||
|
Defaults to "psql".
|
||||||
|
example: docker exec my_pg_container psql
|
||||||
options:
|
options:
|
||||||
type: string
|
type: string
|
||||||
description: |
|
description: |
|
||||||
|
@ -859,8 +931,8 @@ properties:
|
||||||
enum: ['archive', 'directory']
|
enum: ['archive', 'directory']
|
||||||
description: |
|
description: |
|
||||||
Database dump output format. One of "archive",
|
Database dump output format. One of "archive",
|
||||||
or "directory". Defaults to "archive". See
|
or "directory". Defaults to "archive". See
|
||||||
mongodump documentation for details. Note that
|
mongodump documentation for details. Note that
|
||||||
format is ignored when the database name is
|
format is ignored when the database name is
|
||||||
"all".
|
"all".
|
||||||
example: directory
|
example: directory
|
||||||
|
@ -881,42 +953,207 @@ properties:
|
||||||
https://docs.mongodb.com/database-tools/mongodump/ and
|
https://docs.mongodb.com/database-tools/mongodump/ and
|
||||||
https://docs.mongodb.com/database-tools/mongorestore/ for
|
https://docs.mongodb.com/database-tools/mongorestore/ for
|
||||||
details.
|
details.
|
||||||
|
ntfy:
|
||||||
|
type: object
|
||||||
|
required: ['topic']
|
||||||
|
additionalProperties: false
|
||||||
|
properties:
|
||||||
|
topic:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The topic to publish to.
|
||||||
|
(https://ntfy.sh/docs/publish/)
|
||||||
|
example: topic
|
||||||
|
server:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The address of your self-hosted ntfy.sh instance.
|
||||||
|
example: https://ntfy.your-domain.com
|
||||||
|
start:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
title:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The title of the message
|
||||||
|
example: Ping!
|
||||||
|
message:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The message body to publish.
|
||||||
|
example: Your backups have failed.
|
||||||
|
priority:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The priority to set.
|
||||||
|
example: urgent
|
||||||
|
tags:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
Tags to attach to the message.
|
||||||
|
example: incoming_envelope
|
||||||
|
finish:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
title:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The title of the message.
|
||||||
|
example: Ping!
|
||||||
|
message:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The message body to publish.
|
||||||
|
example: Your backups have failed.
|
||||||
|
priority:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The priority to set.
|
||||||
|
example: urgent
|
||||||
|
tags:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
Tags to attach to the message.
|
||||||
|
example: incoming_envelope
|
||||||
|
fail:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
title:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The title of the message.
|
||||||
|
example: Ping!
|
||||||
|
message:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The message body to publish.
|
||||||
|
example: Your backups have failed.
|
||||||
|
priority:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
The priority to set.
|
||||||
|
example: urgent
|
||||||
|
tags:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
Tags to attach to the message.
|
||||||
|
example: incoming_envelope
|
||||||
|
states:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- start
|
||||||
|
- finish
|
||||||
|
- fail
|
||||||
|
uniqueItems: true
|
||||||
|
description: |
|
||||||
|
List of one or more monitoring states to ping for:
|
||||||
|
"start", "finish", and/or "fail". Defaults to
|
||||||
|
pinging for failure only.
|
||||||
|
example:
|
||||||
|
- start
|
||||||
|
- finish
|
||||||
healthchecks:
|
healthchecks:
|
||||||
type: string
|
type: object
|
||||||
|
required: ['ping_url']
|
||||||
|
additionalProperties: false
|
||||||
|
properties:
|
||||||
|
ping_url:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
Healthchecks ping URL or UUID to notify when a
|
||||||
|
backup begins, ends, or errors.
|
||||||
|
example: https://hc-ping.com/your-uuid-here
|
||||||
|
verify_tls:
|
||||||
|
type: boolean
|
||||||
|
description: |
|
||||||
|
Verify the TLS certificate of the ping URL host.
|
||||||
|
Defaults to true.
|
||||||
|
example: false
|
||||||
|
send_logs:
|
||||||
|
type: boolean
|
||||||
|
description: |
|
||||||
|
Send borgmatic logs to Healthchecks as part the
|
||||||
|
"finish" state. Defaults to true.
|
||||||
|
example: false
|
||||||
|
ping_body_limit:
|
||||||
|
type: integer
|
||||||
|
description: |
|
||||||
|
Number of bytes of borgmatic logs to send to
|
||||||
|
Healthchecks, ideally the same as PING_BODY_LIMIT
|
||||||
|
configured on the Healthchecks server. Set to 0 to
|
||||||
|
send all logs and disable this truncation. Defaults
|
||||||
|
to 100000.
|
||||||
|
example: 200000
|
||||||
|
states:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- start
|
||||||
|
- finish
|
||||||
|
- fail
|
||||||
|
uniqueItems: true
|
||||||
|
description: |
|
||||||
|
List of one or more monitoring states to ping for:
|
||||||
|
"start", "finish", and/or "fail". Defaults to
|
||||||
|
pinging for all states.
|
||||||
|
example:
|
||||||
|
- finish
|
||||||
description: |
|
description: |
|
||||||
Healthchecks ping URL or UUID to notify when a backup
|
Configuration for a monitoring integration with
|
||||||
begins, ends, or errors. Create an account at
|
Healthchecks. Create an account at https://healthchecks.io
|
||||||
https://healthchecks.io if you'd like to use this service.
|
(or self-host Healthchecks) if you'd like to use this
|
||||||
See borgmatic monitoring documentation for details.
|
|
||||||
example:
|
|
||||||
https://hc-ping.com/your-uuid-here
|
|
||||||
cronitor:
|
|
||||||
type: string
|
|
||||||
description: |
|
|
||||||
Cronitor ping URL to notify when a backup begins, ends, or
|
|
||||||
errors. Create an account at https://cronitor.io if you'd
|
|
||||||
like to use this service. See borgmatic monitoring
|
|
||||||
documentation for details.
|
|
||||||
example:
|
|
||||||
https://cronitor.link/d3x0c1
|
|
||||||
pagerduty:
|
|
||||||
type: string
|
|
||||||
description: |
|
|
||||||
PagerDuty integration key used to notify PagerDuty when a
|
|
||||||
backup errors. Create an account at
|
|
||||||
https://www.pagerduty.com/ if you'd like to use this
|
|
||||||
service. See borgmatic monitoring documentation for details.
|
service. See borgmatic monitoring documentation for details.
|
||||||
example:
|
cronitor:
|
||||||
a177cad45bd374409f78906a810a3074
|
type: object
|
||||||
cronhub:
|
required: ['ping_url']
|
||||||
type: string
|
additionalProperties: false
|
||||||
|
properties:
|
||||||
|
ping_url:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
Cronitor ping URL to notify when a backup begins,
|
||||||
|
ends, or errors.
|
||||||
|
example: https://cronitor.link/d3x0c1
|
||||||
description: |
|
description: |
|
||||||
Cronhub ping URL to notify when a backup begins, ends, or
|
Configuration for a monitoring integration with Cronitor.
|
||||||
errors. Create an account at https://cronhub.io if you'd
|
Create an account at https://cronitor.io if you'd
|
||||||
like to use this service. See borgmatic monitoring
|
like to use this service. See borgmatic monitoring
|
||||||
documentation for details.
|
documentation for details.
|
||||||
example:
|
pagerduty:
|
||||||
https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d01
|
type: object
|
||||||
|
required: ['integration_key']
|
||||||
|
additionalProperties: false
|
||||||
|
properties:
|
||||||
|
integration_key:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
PagerDuty integration key used to notify PagerDuty
|
||||||
|
when a backup errors.
|
||||||
|
example: a177cad45bd374409f78906a810a3074
|
||||||
|
description: |
|
||||||
|
Configuration for a monitoring integration with PagerDuty.
|
||||||
|
Create an account at https://www.pagerduty.com/ if you'd
|
||||||
|
like to use this service. See borgmatic monitoring
|
||||||
|
documentation for details.
|
||||||
|
cronhub:
|
||||||
|
type: object
|
||||||
|
required: ['ping_url']
|
||||||
|
additionalProperties: false
|
||||||
|
properties:
|
||||||
|
ping_url:
|
||||||
|
type: string
|
||||||
|
description: |
|
||||||
|
Cronhub ping URL to notify when a backup begins,
|
||||||
|
ends, or errors.
|
||||||
|
example: https://cronhub.io/ping/1f5e3410-254c-5587
|
||||||
|
description: |
|
||||||
|
Configuration for a monitoring integration with Crunhub.
|
||||||
|
Create an account at https://cronhub.io if you'd like to
|
||||||
|
use this service. See borgmatic monitoring documentation
|
||||||
|
for details.
|
||||||
umask:
|
umask:
|
||||||
type: integer
|
type: integer
|
||||||
description: |
|
description: |
|
||||||
|
|
|
@ -4,7 +4,7 @@ import jsonschema
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
import ruamel.yaml
|
import ruamel.yaml
|
||||||
|
|
||||||
from borgmatic.config import load, normalize, override
|
from borgmatic.config import environment, load, normalize, override
|
||||||
|
|
||||||
|
|
||||||
def schema_filename():
|
def schema_filename():
|
||||||
|
@ -65,15 +65,6 @@ def apply_logical_validation(config_filename, parsed_configuration):
|
||||||
below), run through any additional logical validation checks. If there are any such validation
|
below), run through any additional logical validation checks. If there are any such validation
|
||||||
problems, raise a Validation_error.
|
problems, raise a Validation_error.
|
||||||
'''
|
'''
|
||||||
archive_name_format = parsed_configuration.get('storage', {}).get('archive_name_format')
|
|
||||||
prefix = parsed_configuration.get('retention', {}).get('prefix')
|
|
||||||
|
|
||||||
if archive_name_format and not prefix:
|
|
||||||
raise Validation_error(
|
|
||||||
config_filename,
|
|
||||||
('If you provide an archive_name_format, you must also specify a retention prefix.',),
|
|
||||||
)
|
|
||||||
|
|
||||||
location_repositories = parsed_configuration.get('location', {}).get('repositories')
|
location_repositories = parsed_configuration.get('location', {}).get('repositories')
|
||||||
check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', [])
|
check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', [])
|
||||||
for repository in check_repositories:
|
for repository in check_repositories:
|
||||||
|
@ -81,14 +72,14 @@ def apply_logical_validation(config_filename, parsed_configuration):
|
||||||
raise Validation_error(
|
raise Validation_error(
|
||||||
config_filename,
|
config_filename,
|
||||||
(
|
(
|
||||||
'Unknown repository in the consistency section\'s check_repositories: {}'.format(
|
'Unknown repository in the "consistency" section\'s "check_repositories": {}'.format(
|
||||||
repository
|
repository
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_configuration(config_filename, schema_filename, overrides=None):
|
def parse_configuration(config_filename, schema_filename, overrides=None, resolve_env=True):
|
||||||
'''
|
'''
|
||||||
Given the path to a config filename in YAML format, the path to a schema filename in a YAML
|
Given the path to a config filename in YAML format, the path to a schema filename in a YAML
|
||||||
rendition of JSON Schema format, a sequence of configuration file override strings in the form
|
rendition of JSON Schema format, a sequence of configuration file override strings in the form
|
||||||
|
@ -98,6 +89,9 @@ def parse_configuration(config_filename, schema_filename, overrides=None):
|
||||||
{'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'},
|
{'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'},
|
||||||
'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}}
|
'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}}
|
||||||
|
|
||||||
|
Also return a sequence of logging.LogRecord instances containing any warnings about the
|
||||||
|
configuration.
|
||||||
|
|
||||||
Raise FileNotFoundError if the file does not exist, PermissionError if the user does not
|
Raise FileNotFoundError if the file does not exist, PermissionError if the user does not
|
||||||
have permissions to read the file, or Validation_error if the config does not match the schema.
|
have permissions to read the file, or Validation_error if the config does not match the schema.
|
||||||
'''
|
'''
|
||||||
|
@ -108,7 +102,9 @@ def parse_configuration(config_filename, schema_filename, overrides=None):
|
||||||
raise Validation_error(config_filename, (str(error),))
|
raise Validation_error(config_filename, (str(error),))
|
||||||
|
|
||||||
override.apply_overrides(config, overrides)
|
override.apply_overrides(config, overrides)
|
||||||
normalize.normalize(config)
|
logs = normalize.normalize(config_filename, config)
|
||||||
|
if resolve_env:
|
||||||
|
environment.resolve_env_variables(config)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
validator = jsonschema.Draft7Validator(schema)
|
validator = jsonschema.Draft7Validator(schema)
|
||||||
|
@ -123,7 +119,7 @@ def parse_configuration(config_filename, schema_filename, overrides=None):
|
||||||
|
|
||||||
apply_logical_validation(config_filename, config)
|
apply_logical_validation(config_filename, config)
|
||||||
|
|
||||||
return config
|
return config, logs
|
||||||
|
|
||||||
|
|
||||||
def normalize_repository_path(repository):
|
def normalize_repository_path(repository):
|
||||||
|
@ -147,27 +143,13 @@ def repositories_match(first, second):
|
||||||
def guard_configuration_contains_repository(repository, configurations):
|
def guard_configuration_contains_repository(repository, configurations):
|
||||||
'''
|
'''
|
||||||
Given a repository path and a dict mapping from config filename to corresponding parsed config
|
Given a repository path and a dict mapping from config filename to corresponding parsed config
|
||||||
dict, ensure that the repository is declared exactly once in all of the configurations.
|
dict, ensure that the repository is declared exactly once in all of the configurations. If no
|
||||||
|
repository is given, skip this check.
|
||||||
If no repository is given, then error if there are multiple configured repositories.
|
|
||||||
|
|
||||||
Raise ValueError if the repository is not found in a configuration, or is declared multiple
|
Raise ValueError if the repository is not found in a configuration, or is declared multiple
|
||||||
times.
|
times.
|
||||||
'''
|
'''
|
||||||
if not repository:
|
if not repository:
|
||||||
count = len(
|
|
||||||
tuple(
|
|
||||||
config_repository
|
|
||||||
for config in configurations.values()
|
|
||||||
for config_repository in config['location']['repositories']
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if count > 1:
|
|
||||||
raise ValueError(
|
|
||||||
'Can\'t determine which repository to use. Use --repository option to disambiguate'
|
|
||||||
)
|
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
count = len(
|
count = len(
|
||||||
|
@ -183,3 +165,26 @@ def guard_configuration_contains_repository(repository, configurations):
|
||||||
raise ValueError('Repository {} not found in configuration files'.format(repository))
|
raise ValueError('Repository {} not found in configuration files'.format(repository))
|
||||||
if count > 1:
|
if count > 1:
|
||||||
raise ValueError('Repository {} found in multiple configuration files'.format(repository))
|
raise ValueError('Repository {} found in multiple configuration files'.format(repository))
|
||||||
|
|
||||||
|
|
||||||
|
def guard_single_repository_selected(repository, configurations):
|
||||||
|
'''
|
||||||
|
Given a repository path and a dict mapping from config filename to corresponding parsed config
|
||||||
|
dict, ensure either a single repository exists across all configuration files or a repository
|
||||||
|
path was given.
|
||||||
|
'''
|
||||||
|
if repository:
|
||||||
|
return
|
||||||
|
|
||||||
|
count = len(
|
||||||
|
tuple(
|
||||||
|
config_repository
|
||||||
|
for config in configurations.values()
|
||||||
|
for config_repository in config['location']['repositories']
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if count != 1:
|
||||||
|
raise ValueError(
|
||||||
|
'Can\'t determine which repository to use. Use --repository to disambiguate'
|
||||||
|
)
|
||||||
|
|
|
@ -51,6 +51,9 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
|
||||||
process with the requested log level. Additionally, raise a CalledProcessError if a process
|
process with the requested log level. Additionally, raise a CalledProcessError if a process
|
||||||
exits with an error (or a warning for exit code 1, if that process matches the Borg local path).
|
exits with an error (or a warning for exit code 1, if that process matches the Borg local path).
|
||||||
|
|
||||||
|
If output log level is None, then instead of logging, capture output for each process and return
|
||||||
|
it as a dict from the process to its output.
|
||||||
|
|
||||||
For simplicity, it's assumed that the output buffer for each process is its stdout. But if any
|
For simplicity, it's assumed that the output buffer for each process is its stdout. But if any
|
||||||
stdouts are given to exclude, then for any matching processes, log from their stderr instead.
|
stdouts are given to exclude, then for any matching processes, log from their stderr instead.
|
||||||
|
|
||||||
|
@ -65,6 +68,8 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
|
||||||
if process.stdout or process.stderr
|
if process.stdout or process.stderr
|
||||||
}
|
}
|
||||||
output_buffers = list(process_for_output_buffer.keys())
|
output_buffers = list(process_for_output_buffer.keys())
|
||||||
|
captured_outputs = collections.defaultdict(list)
|
||||||
|
still_running = True
|
||||||
|
|
||||||
# Log output for each process until they all exit.
|
# Log output for each process until they all exit.
|
||||||
while True:
|
while True:
|
||||||
|
@ -87,18 +92,25 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
|
||||||
# Add the process's output to output_buffers to ensure it'll get read.
|
# Add the process's output to output_buffers to ensure it'll get read.
|
||||||
output_buffers.append(other_process.stdout)
|
output_buffers.append(other_process.stdout)
|
||||||
|
|
||||||
line = ready_buffer.readline().rstrip().decode()
|
while True:
|
||||||
if not line or not ready_process:
|
line = ready_buffer.readline().rstrip().decode()
|
||||||
continue
|
if not line or not ready_process:
|
||||||
|
break
|
||||||
|
|
||||||
# Keep the last few lines of output in case the process errors, and we need the output for
|
# Keep the last few lines of output in case the process errors, and we need the output for
|
||||||
# the exception below.
|
# the exception below.
|
||||||
last_lines = buffer_last_lines[ready_buffer]
|
last_lines = buffer_last_lines[ready_buffer]
|
||||||
last_lines.append(line)
|
last_lines.append(line)
|
||||||
if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT:
|
if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT:
|
||||||
last_lines.pop(0)
|
last_lines.pop(0)
|
||||||
|
|
||||||
logger.log(output_log_level, line)
|
if output_log_level is None:
|
||||||
|
captured_outputs[ready_process].append(line)
|
||||||
|
else:
|
||||||
|
logger.log(output_log_level, line)
|
||||||
|
|
||||||
|
if not still_running:
|
||||||
|
break
|
||||||
|
|
||||||
still_running = False
|
still_running = False
|
||||||
|
|
||||||
|
@ -129,26 +141,13 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
|
||||||
exit_code, command_for_process(process), '\n'.join(last_lines)
|
exit_code, command_for_process(process), '\n'.join(last_lines)
|
||||||
)
|
)
|
||||||
|
|
||||||
if not still_running:
|
if captured_outputs:
|
||||||
break
|
return {
|
||||||
|
process: '\n'.join(output_lines) for process, output_lines in captured_outputs.items()
|
||||||
# Consume any remaining output that we missed (if any).
|
}
|
||||||
for process in processes:
|
|
||||||
output_buffer = output_buffer_for_process(process, exclude_stdouts)
|
|
||||||
|
|
||||||
if not output_buffer:
|
|
||||||
continue
|
|
||||||
|
|
||||||
while True: # pragma: no cover
|
|
||||||
remaining_output = output_buffer.readline().rstrip().decode()
|
|
||||||
|
|
||||||
if not remaining_output:
|
|
||||||
break
|
|
||||||
|
|
||||||
logger.log(output_log_level, remaining_output)
|
|
||||||
|
|
||||||
|
|
||||||
def log_command(full_command, input_file, output_file):
|
def log_command(full_command, input_file=None, output_file=None):
|
||||||
'''
|
'''
|
||||||
Log the given command (a sequence of command/argument strings), along with its input/output file
|
Log the given command (a sequence of command/argument strings), along with its input/output file
|
||||||
paths.
|
paths.
|
||||||
|
@ -179,15 +178,14 @@ def execute_command(
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Execute the given command (a sequence of command/argument strings) and log its output at the
|
Execute the given command (a sequence of command/argument strings) and log its output at the
|
||||||
given log level. If output log level is None, instead capture and return the output. (Implies
|
given log level. If an open output file object is given, then write stdout to the file and only
|
||||||
run_to_completion.) If an open output file object is given, then write stdout to the file and
|
log stderr. If an open input file object is given, then read stdin from the file. If shell is
|
||||||
only log stderr (but only if an output log level is set). If an open input file object is given,
|
True, execute the command within a shell. If an extra environment dict is given, then use it to
|
||||||
then read stdin from the file. If shell is True, execute the command within a shell. If an extra
|
augment the current environment, and pass the result into the command. If a working directory is
|
||||||
environment dict is given, then use it to augment the current environment, and pass the result
|
given, use that as the present working directory when running the command. If a Borg local path
|
||||||
into the command. If a working directory is given, use that as the present working directory
|
is given, and the command matches it (regardless of arguments), treat exit code 1 as a warning
|
||||||
when running the command. If a Borg local path is given, and the command matches it (regardless
|
instead of an error. If run to completion is False, then return the process for the command
|
||||||
of arguments), treat exit code 1 as a warning instead of an error. If run to completion is
|
without executing it to completion.
|
||||||
False, then return the process for the command without executing it to completion.
|
|
||||||
|
|
||||||
Raise subprocesses.CalledProcessError if an error occurs while running the command.
|
Raise subprocesses.CalledProcessError if an error occurs while running the command.
|
||||||
'''
|
'''
|
||||||
|
@ -196,12 +194,6 @@ def execute_command(
|
||||||
do_not_capture = bool(output_file is DO_NOT_CAPTURE)
|
do_not_capture = bool(output_file is DO_NOT_CAPTURE)
|
||||||
command = ' '.join(full_command) if shell else full_command
|
command = ' '.join(full_command) if shell else full_command
|
||||||
|
|
||||||
if output_log_level is None:
|
|
||||||
output = subprocess.check_output(
|
|
||||||
command, shell=shell, env=environment, cwd=working_directory
|
|
||||||
)
|
|
||||||
return output.decode() if output is not None else None
|
|
||||||
|
|
||||||
process = subprocess.Popen(
|
process = subprocess.Popen(
|
||||||
command,
|
command,
|
||||||
stdin=input_file,
|
stdin=input_file,
|
||||||
|
@ -219,6 +211,33 @@ def execute_command(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def execute_command_and_capture_output(
|
||||||
|
full_command, capture_stderr=False, shell=False, extra_environment=None, working_directory=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Execute the given command (a sequence of command/argument strings), capturing and returning its
|
||||||
|
output (stdout). If capture stderr is True, then capture and return stderr in addition to
|
||||||
|
stdout. If shell is True, execute the command within a shell. If an extra environment dict is
|
||||||
|
given, then use it to augment the current environment, and pass the result into the command. If
|
||||||
|
a working directory is given, use that as the present working directory when running the command.
|
||||||
|
|
||||||
|
Raise subprocesses.CalledProcessError if an error occurs while running the command.
|
||||||
|
'''
|
||||||
|
log_command(full_command)
|
||||||
|
environment = {**os.environ, **extra_environment} if extra_environment else None
|
||||||
|
command = ' '.join(full_command) if shell else full_command
|
||||||
|
|
||||||
|
output = subprocess.check_output(
|
||||||
|
command,
|
||||||
|
stderr=subprocess.STDOUT if capture_stderr else None,
|
||||||
|
shell=shell,
|
||||||
|
env=environment,
|
||||||
|
cwd=working_directory,
|
||||||
|
)
|
||||||
|
|
||||||
|
return output.decode() if output is not None else None
|
||||||
|
|
||||||
|
|
||||||
def execute_command_with_processes(
|
def execute_command_with_processes(
|
||||||
full_command,
|
full_command,
|
||||||
processes,
|
processes,
|
||||||
|
@ -236,13 +255,14 @@ def execute_command_with_processes(
|
||||||
run as well. This is useful, for instance, for processes that are streaming output to a named
|
run as well. This is useful, for instance, for processes that are streaming output to a named
|
||||||
pipe that the given command is consuming from.
|
pipe that the given command is consuming from.
|
||||||
|
|
||||||
If an open output file object is given, then write stdout to the file and only log stderr (but
|
If an open output file object is given, then write stdout to the file and only log stderr. But
|
||||||
only if an output log level is set). If an open input file object is given, then read stdin from
|
if output log level is None, instead suppress logging and return the captured output for (only)
|
||||||
the file. If shell is True, execute the command within a shell. If an extra environment dict is
|
the given command. If an open input file object is given, then read stdin from the file. If
|
||||||
given, then use it to augment the current environment, and pass the result into the command. If
|
shell is True, execute the command within a shell. If an extra environment dict is given, then
|
||||||
a working directory is given, use that as the present working directory when running the
|
use it to augment the current environment, and pass the result into the command. If a working
|
||||||
command. If a Borg local path is given, then for any matching command or process (regardless of
|
directory is given, use that as the present working directory when running the command. If a
|
||||||
arguments), treat exit code 1 as a warning instead of an error.
|
Borg local path is given, then for any matching command or process (regardless of arguments),
|
||||||
|
treat exit code 1 as a warning instead of an error.
|
||||||
|
|
||||||
Raise subprocesses.CalledProcessError if an error occurs while running the command or in the
|
Raise subprocesses.CalledProcessError if an error occurs while running the command or in the
|
||||||
upstream process.
|
upstream process.
|
||||||
|
@ -273,9 +293,12 @@ def execute_command_with_processes(
|
||||||
process.kill()
|
process.kill()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
log_outputs(
|
captured_outputs = log_outputs(
|
||||||
tuple(processes) + (command_process,),
|
tuple(processes) + (command_process,),
|
||||||
(input_file, output_file),
|
(input_file, output_file),
|
||||||
output_log_level,
|
output_log_level,
|
||||||
borg_local_path=borg_local_path,
|
borg_local_path=borg_local_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if output_log_level is None:
|
||||||
|
return captured_outputs.get(command_process)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
from borgmatic import execute
|
from borgmatic import execute
|
||||||
|
|
||||||
|
@ -9,14 +10,19 @@ logger = logging.getLogger(__name__)
|
||||||
SOFT_FAIL_EXIT_CODE = 75
|
SOFT_FAIL_EXIT_CODE = 75
|
||||||
|
|
||||||
|
|
||||||
def interpolate_context(command, context):
|
def interpolate_context(config_filename, hook_description, command, context):
|
||||||
'''
|
'''
|
||||||
Given a single hook command and a dict of context names/values, interpolate the values by
|
Given a config filename, a hook description, a single hook command, and a dict of context
|
||||||
"{name}" into the command and return the result.
|
names/values, interpolate the values by "{name}" into the command and return the result.
|
||||||
'''
|
'''
|
||||||
for name, value in context.items():
|
for name, value in context.items():
|
||||||
command = command.replace('{%s}' % name, str(value))
|
command = command.replace('{%s}' % name, str(value))
|
||||||
|
|
||||||
|
for unsupported_variable in re.findall(r'{\w+}', command):
|
||||||
|
logger.warning(
|
||||||
|
f"{config_filename}: Variable '{unsupported_variable}' is not supported in {hook_description} hook"
|
||||||
|
)
|
||||||
|
|
||||||
return command
|
return command
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,8 +32,7 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
|
||||||
a hook description, and whether this is a dry run, run the given commands. Or, don't run them
|
a hook description, and whether this is a dry run, run the given commands. Or, don't run them
|
||||||
if this is a dry run.
|
if this is a dry run.
|
||||||
|
|
||||||
The context contains optional values interpolated by name into the hook commands. Currently,
|
The context contains optional values interpolated by name into the hook commands.
|
||||||
this only applies to the on_error hook.
|
|
||||||
|
|
||||||
Raise ValueError if the umask cannot be parsed.
|
Raise ValueError if the umask cannot be parsed.
|
||||||
Raise subprocesses.CalledProcessError if an error occurs in a hook.
|
Raise subprocesses.CalledProcessError if an error occurs in a hook.
|
||||||
|
@ -39,7 +44,9 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
|
||||||
dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
|
||||||
|
|
||||||
context['configuration_filename'] = config_filename
|
context['configuration_filename'] = config_filename
|
||||||
commands = [interpolate_context(command, context) for command in commands]
|
commands = [
|
||||||
|
interpolate_context(config_filename, description, command, context) for command in commands
|
||||||
|
]
|
||||||
|
|
||||||
if len(commands) == 1:
|
if len(commands) == 1:
|
||||||
logger.info(
|
logger.info(
|
||||||
|
|
|
@ -22,14 +22,18 @@ def initialize_monitor(
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def ping_monitor(ping_url, config_filename, state, monitoring_log_level, dry_run):
|
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
'''
|
'''
|
||||||
Ping the given Cronhub URL, modified with the monitor.State. Use the given configuration
|
Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration
|
||||||
filename in any log entries. If this is a dry run, then don't actually ping anything.
|
filename in any log entries. If this is a dry run, then don't actually ping anything.
|
||||||
'''
|
'''
|
||||||
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
formatted_state = '/{}/'.format(MONITOR_STATE_TO_CRONHUB[state])
|
formatted_state = '/{}/'.format(MONITOR_STATE_TO_CRONHUB[state])
|
||||||
ping_url = ping_url.replace('/start/', formatted_state).replace('/ping/', formatted_state)
|
ping_url = (
|
||||||
|
hook_config['ping_url']
|
||||||
|
.replace('/start/', formatted_state)
|
||||||
|
.replace('/ping/', formatted_state)
|
||||||
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
'{}: Pinging Cronhub {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
'{}: Pinging Cronhub {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
||||||
|
@ -38,7 +42,12 @@ def ping_monitor(ping_url, config_filename, state, monitoring_log_level, dry_run
|
||||||
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
requests.get(ping_url)
|
try:
|
||||||
|
response = requests.get(ping_url)
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: Cronhub error: {error}')
|
||||||
|
|
||||||
|
|
||||||
def destroy_monitor(
|
def destroy_monitor(
|
||||||
|
|
|
@ -22,13 +22,13 @@ def initialize_monitor(
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def ping_monitor(ping_url, config_filename, state, monitoring_log_level, dry_run):
|
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
'''
|
'''
|
||||||
Ping the given Cronitor URL, modified with the monitor.State. Use the given configuration
|
Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration
|
||||||
filename in any log entries. If this is a dry run, then don't actually ping anything.
|
filename in any log entries. If this is a dry run, then don't actually ping anything.
|
||||||
'''
|
'''
|
||||||
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
ping_url = '{}/{}'.format(ping_url, MONITOR_STATE_TO_CRONITOR[state])
|
ping_url = '{}/{}'.format(hook_config['ping_url'], MONITOR_STATE_TO_CRONITOR[state])
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
'{}: Pinging Cronitor {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
'{}: Pinging Cronitor {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
||||||
|
@ -37,7 +37,12 @@ def ping_monitor(ping_url, config_filename, state, monitoring_log_level, dry_run
|
||||||
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
requests.get(ping_url)
|
try:
|
||||||
|
response = requests.get(ping_url)
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: Cronitor error: {error}')
|
||||||
|
|
||||||
|
|
||||||
def destroy_monitor(
|
def destroy_monitor(
|
||||||
|
|
|
@ -1,17 +1,27 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from borgmatic.hooks import cronhub, cronitor, healthchecks, mongodb, mysql, pagerduty, postgresql
|
from borgmatic.hooks import (
|
||||||
|
cronhub,
|
||||||
|
cronitor,
|
||||||
|
healthchecks,
|
||||||
|
mongodb,
|
||||||
|
mysql,
|
||||||
|
ntfy,
|
||||||
|
pagerduty,
|
||||||
|
postgresql,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
HOOK_NAME_TO_MODULE = {
|
HOOK_NAME_TO_MODULE = {
|
||||||
'healthchecks': healthchecks,
|
|
||||||
'cronitor': cronitor,
|
|
||||||
'cronhub': cronhub,
|
'cronhub': cronhub,
|
||||||
|
'cronitor': cronitor,
|
||||||
|
'healthchecks': healthchecks,
|
||||||
|
'mongodb_databases': mongodb,
|
||||||
|
'mysql_databases': mysql,
|
||||||
|
'ntfy': ntfy,
|
||||||
'pagerduty': pagerduty,
|
'pagerduty': pagerduty,
|
||||||
'postgresql_databases': postgresql,
|
'postgresql_databases': postgresql,
|
||||||
'mysql_databases': mysql,
|
|
||||||
'mongodb_databases': mongodb,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,19 +29,14 @@ def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
|
||||||
'''
|
'''
|
||||||
Given the hooks configuration dict and a prefix to use in log entries, call the requested
|
Given the hooks configuration dict and a prefix to use in log entries, call the requested
|
||||||
function of the Python module corresponding to the given hook name. Supply that call with the
|
function of the Python module corresponding to the given hook name. Supply that call with the
|
||||||
configuration for this hook, the log prefix, and any given args and kwargs. Return any return
|
configuration for this hook (if any), the log prefix, and any given args and kwargs. Return any
|
||||||
value.
|
return value.
|
||||||
|
|
||||||
If the hook name is not present in the hooks configuration, then bail without calling anything.
|
|
||||||
|
|
||||||
Raise ValueError if the hook name is unknown.
|
Raise ValueError if the hook name is unknown.
|
||||||
Raise AttributeError if the function name is not found in the module.
|
Raise AttributeError if the function name is not found in the module.
|
||||||
Raise anything else that the called function raises.
|
Raise anything else that the called function raises.
|
||||||
'''
|
'''
|
||||||
config = hooks.get(hook_name)
|
config = hooks.get(hook_name, {})
|
||||||
if not config:
|
|
||||||
logger.debug('{}: No {} hook configured.'.format(log_prefix, hook_name))
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
module = HOOK_NAME_TO_MODULE[hook_name]
|
module = HOOK_NAME_TO_MODULE[hook_name]
|
||||||
|
@ -49,7 +54,7 @@ def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
|
||||||
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return
|
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return
|
||||||
values into a dict from hook name to return value.
|
values into a dict from hook name to return value.
|
||||||
|
|
||||||
If the hook name is not present in the hooks configuration, then don't call the function for it,
|
If the hook name is not present in the hooks configuration, then don't call the function for it
|
||||||
and omit it from the return values.
|
and omit it from the return values.
|
||||||
|
|
||||||
Raise ValueError if the hook name is unknown.
|
Raise ValueError if the hook name is unknown.
|
||||||
|
@ -61,3 +66,19 @@ def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
|
||||||
for hook_name in hook_names
|
for hook_name in hook_names
|
||||||
if hooks.get(hook_name)
|
if hooks.get(hook_name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def call_hooks_even_if_unconfigured(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
|
||||||
|
'''
|
||||||
|
Given the hooks configuration dict and a prefix to use in log entries, call the requested
|
||||||
|
function of the Python module corresponding to each given hook name. Supply each call with the
|
||||||
|
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return
|
||||||
|
values into a dict from hook name to return value.
|
||||||
|
|
||||||
|
Raise AttributeError if the function name is not found in the module.
|
||||||
|
Raise anything else that a called function raises. An error stops calls to subsequent functions.
|
||||||
|
'''
|
||||||
|
return {
|
||||||
|
hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs)
|
||||||
|
for hook_name in hook_names
|
||||||
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
from borgmatic.borg.create import DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
from borgmatic.borg.state import DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ def remove_database_dumps(dump_path, database_type_name, log_prefix, dry_run):
|
||||||
'''
|
'''
|
||||||
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
|
||||||
|
|
||||||
logger.info(
|
logger.debug(
|
||||||
'{}: Removing {} database dumps{}'.format(log_prefix, database_type_name, dry_run_label)
|
'{}: Removing {} database dumps{}'.format(log_prefix, database_type_name, dry_run_label)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -13,13 +13,14 @@ MONITOR_STATE_TO_HEALTHCHECKS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
PAYLOAD_TRUNCATION_INDICATOR = '...\n'
|
PAYLOAD_TRUNCATION_INDICATOR = '...\n'
|
||||||
PAYLOAD_LIMIT_BYTES = 10 * 1024 - len(PAYLOAD_TRUNCATION_INDICATOR)
|
DEFAULT_PING_BODY_LIMIT_BYTES = 100000
|
||||||
|
|
||||||
|
|
||||||
class Forgetful_buffering_handler(logging.Handler):
|
class Forgetful_buffering_handler(logging.Handler):
|
||||||
'''
|
'''
|
||||||
A buffering log handler that stores log messages in memory, and throws away messages (oldest
|
A buffering log handler that stores log messages in memory, and throws away messages (oldest
|
||||||
first) once a particular capacity in bytes is reached.
|
first) once a particular capacity in bytes is reached. But if the given byte capacity is zero,
|
||||||
|
don't throw away any messages.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def __init__(self, byte_capacity, log_level):
|
def __init__(self, byte_capacity, log_level):
|
||||||
|
@ -36,6 +37,9 @@ class Forgetful_buffering_handler(logging.Handler):
|
||||||
self.byte_count += len(message)
|
self.byte_count += len(message)
|
||||||
self.buffer.append(message)
|
self.buffer.append(message)
|
||||||
|
|
||||||
|
if not self.byte_capacity:
|
||||||
|
return
|
||||||
|
|
||||||
while self.byte_count > self.byte_capacity and self.buffer:
|
while self.byte_count > self.byte_capacity and self.buffer:
|
||||||
self.byte_count -= len(self.buffer[0])
|
self.byte_count -= len(self.buffer[0])
|
||||||
self.buffer.pop(0)
|
self.buffer.pop(0)
|
||||||
|
@ -65,31 +69,45 @@ def format_buffered_logs_for_payload():
|
||||||
return payload
|
return payload
|
||||||
|
|
||||||
|
|
||||||
def initialize_monitor(
|
def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_run):
|
||||||
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run
|
|
||||||
): # pragma: no cover
|
|
||||||
'''
|
'''
|
||||||
Add a handler to the root logger that stores in memory the most recent logs emitted. That
|
Add a handler to the root logger that stores in memory the most recent logs emitted. That way,
|
||||||
way, we can send them all to Healthchecks upon a finish or failure state.
|
we can send them all to Healthchecks upon a finish or failure state. But skip this if the
|
||||||
|
"send_logs" option is false.
|
||||||
'''
|
'''
|
||||||
|
if hook_config.get('send_logs') is False:
|
||||||
|
return
|
||||||
|
|
||||||
|
ping_body_limit = max(
|
||||||
|
hook_config.get('ping_body_limit', DEFAULT_PING_BODY_LIMIT_BYTES)
|
||||||
|
- len(PAYLOAD_TRUNCATION_INDICATOR),
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
|
||||||
logging.getLogger().addHandler(
|
logging.getLogger().addHandler(
|
||||||
Forgetful_buffering_handler(PAYLOAD_LIMIT_BYTES, monitoring_log_level)
|
Forgetful_buffering_handler(ping_body_limit, monitoring_log_level)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def ping_monitor(ping_url_or_uuid, config_filename, state, monitoring_log_level, dry_run):
|
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
'''
|
'''
|
||||||
Ping the given Healthchecks URL or UUID, modified with the monitor.State. Use the given
|
Ping the configured Healthchecks URL or UUID, modified with the monitor.State. Use the given
|
||||||
configuration filename in any log entries, and log to Healthchecks with the giving log level.
|
configuration filename in any log entries, and log to Healthchecks with the giving log level.
|
||||||
If this is a dry run, then don't actually ping anything.
|
If this is a dry run, then don't actually ping anything.
|
||||||
'''
|
'''
|
||||||
ping_url = (
|
ping_url = (
|
||||||
ping_url_or_uuid
|
hook_config['ping_url']
|
||||||
if ping_url_or_uuid.startswith('http')
|
if hook_config['ping_url'].startswith('http')
|
||||||
else 'https://hc-ping.com/{}'.format(ping_url_or_uuid)
|
else 'https://hc-ping.com/{}'.format(hook_config['ping_url'])
|
||||||
)
|
)
|
||||||
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
|
|
||||||
|
if 'states' in hook_config and state.name.lower() not in hook_config['states']:
|
||||||
|
logger.info(
|
||||||
|
f'{config_filename}: Skipping Healthchecks {state.name.lower()} ping due to configured states'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
healthchecks_state = MONITOR_STATE_TO_HEALTHCHECKS.get(state)
|
healthchecks_state = MONITOR_STATE_TO_HEALTHCHECKS.get(state)
|
||||||
if healthchecks_state:
|
if healthchecks_state:
|
||||||
ping_url = '{}/{}'.format(ping_url, healthchecks_state)
|
ping_url = '{}/{}'.format(ping_url, healthchecks_state)
|
||||||
|
@ -106,10 +124,17 @@ def ping_monitor(ping_url_or_uuid, config_filename, state, monitoring_log_level,
|
||||||
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
requests.post(ping_url, data=payload.encode('utf-8'))
|
try:
|
||||||
|
response = requests.post(
|
||||||
|
ping_url, data=payload.encode('utf-8'), verify=hook_config.get('verify_tls', True)
|
||||||
|
)
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: Healthchecks error: {error}')
|
||||||
|
|
||||||
|
|
||||||
def destroy_monitor(ping_url_or_uuid, config_filename, monitoring_log_level, dry_run):
|
def destroy_monitor(hook_config, config_filename, monitoring_log_level, dry_run):
|
||||||
'''
|
'''
|
||||||
Remove the monitor handler that was added to the root logger. This prevents the handler from
|
Remove the monitor handler that was added to the root logger. This prevents the handler from
|
||||||
getting reused by other instances of this monitor.
|
getting reused by other instances of this monitor.
|
||||||
|
|
|
@ -131,12 +131,13 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
|
||||||
if dry_run:
|
if dry_run:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
|
||||||
|
# if the restore paths don't exist in the archive.
|
||||||
execute_command_with_processes(
|
execute_command_with_processes(
|
||||||
restore_command,
|
restore_command,
|
||||||
[extract_process] if extract_process else [],
|
[extract_process] if extract_process else [],
|
||||||
output_log_level=logging.DEBUG,
|
output_log_level=logging.DEBUG,
|
||||||
input_file=extract_process.stdout if extract_process else None,
|
input_file=extract_process.stdout if extract_process else None,
|
||||||
borg_local_path=location_config.get('local_path', 'borg'),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
MONITOR_HOOK_NAMES = ('healthchecks', 'cronitor', 'cronhub', 'pagerduty')
|
MONITOR_HOOK_NAMES = ('healthchecks', 'cronitor', 'cronhub', 'pagerduty', 'ntfy')
|
||||||
|
|
||||||
|
|
||||||
class State(Enum):
|
class State(Enum):
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from borgmatic.execute import execute_command, execute_command_with_processes
|
from borgmatic.execute import (
|
||||||
|
execute_command,
|
||||||
|
execute_command_and_capture_output,
|
||||||
|
execute_command_with_processes,
|
||||||
|
)
|
||||||
from borgmatic.hooks import dump
|
from borgmatic.hooks import dump
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -20,7 +24,7 @@ SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 's
|
||||||
|
|
||||||
def database_names_to_dump(database, extra_environment, log_prefix, dry_run_label):
|
def database_names_to_dump(database, extra_environment, log_prefix, dry_run_label):
|
||||||
'''
|
'''
|
||||||
Given a requested database name, return the corresponding sequence of database names to dump.
|
Given a requested database config, return the corresponding sequence of database names to dump.
|
||||||
In the case of "all", query for the names of databases on the configured host and return them,
|
In the case of "all", query for the names of databases on the configured host and return them,
|
||||||
excluding any system databases that will cause problems during restore.
|
excluding any system databases that will cause problems during restore.
|
||||||
'''
|
'''
|
||||||
|
@ -42,8 +46,8 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run_labe
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'{}: Querying for "all" MySQL databases to dump{}'.format(log_prefix, dry_run_label)
|
'{}: Querying for "all" MySQL databases to dump{}'.format(log_prefix, dry_run_label)
|
||||||
)
|
)
|
||||||
show_output = execute_command(
|
show_output = execute_command_and_capture_output(
|
||||||
show_command, output_log_level=None, extra_environment=extra_environment
|
show_command, extra_environment=extra_environment
|
||||||
)
|
)
|
||||||
|
|
||||||
return tuple(
|
return tuple(
|
||||||
|
@ -166,11 +170,12 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
|
||||||
if dry_run:
|
if dry_run:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
|
||||||
|
# if the restore paths don't exist in the archive.
|
||||||
execute_command_with_processes(
|
execute_command_with_processes(
|
||||||
restore_command,
|
restore_command,
|
||||||
[extract_process],
|
[extract_process],
|
||||||
output_log_level=logging.DEBUG,
|
output_log_level=logging.DEBUG,
|
||||||
input_file=extract_process.stdout,
|
input_file=extract_process.stdout,
|
||||||
extra_environment=extra_environment,
|
extra_environment=extra_environment,
|
||||||
borg_local_path=location_config.get('local_path', 'borg'),
|
|
||||||
)
|
)
|
||||||
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from borgmatic.hooks import monitor
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
MONITOR_STATE_TO_NTFY = {
|
||||||
|
monitor.State.START: None,
|
||||||
|
monitor.State.FINISH: None,
|
||||||
|
monitor.State.FAIL: None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_monitor(
|
||||||
|
ping_url, config_filename, monitoring_log_level, dry_run
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
No initialization is necessary for this monitor.
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
|
'''
|
||||||
|
Ping the configured Ntfy topic. Use the given configuration filename in any log entries.
|
||||||
|
If this is a dry run, then don't actually ping anything.
|
||||||
|
'''
|
||||||
|
|
||||||
|
run_states = hook_config.get('states', ['fail'])
|
||||||
|
|
||||||
|
if state.name.lower() in run_states:
|
||||||
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
|
|
||||||
|
state_config = hook_config.get(
|
||||||
|
state.name.lower(),
|
||||||
|
{
|
||||||
|
'title': f'A Borgmatic {state.name} event happened',
|
||||||
|
'message': f'A Borgmatic {state.name} event happened',
|
||||||
|
'priority': 'default',
|
||||||
|
'tags': 'borgmatic',
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
base_url = hook_config.get('server', 'https://ntfy.sh')
|
||||||
|
topic = hook_config.get('topic')
|
||||||
|
|
||||||
|
logger.info(f'{config_filename}: Pinging ntfy topic {topic}{dry_run_label}')
|
||||||
|
logger.debug(f'{config_filename}: Using Ntfy ping URL {base_url}/{topic}')
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'X-Title': state_config.get('title'),
|
||||||
|
'X-Message': state_config.get('message'),
|
||||||
|
'X-Priority': state_config.get('priority'),
|
||||||
|
'X-Tags': state_config.get('tags'),
|
||||||
|
}
|
||||||
|
|
||||||
|
if not dry_run:
|
||||||
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
|
try:
|
||||||
|
response = requests.post(f'{base_url}/{topic}', headers=headers)
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: Ntfy error: {error}')
|
||||||
|
|
||||||
|
|
||||||
|
def destroy_monitor(
|
||||||
|
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
No destruction is necessary for this monitor.
|
||||||
|
'''
|
||||||
|
pass
|
|
@ -21,10 +21,10 @@ def initialize_monitor(
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def ping_monitor(integration_key, config_filename, state, monitoring_log_level, dry_run):
|
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
'''
|
'''
|
||||||
If this is an error state, create a PagerDuty event with the given integration key. Use the
|
If this is an error state, create a PagerDuty event with the configured integration key. Use
|
||||||
given configuration filename in any log entries. If this is a dry run, then don't actually
|
the given configuration filename in any log entries. If this is a dry run, then don't actually
|
||||||
create an event.
|
create an event.
|
||||||
'''
|
'''
|
||||||
if state != monitor.State.FAIL:
|
if state != monitor.State.FAIL:
|
||||||
|
@ -47,7 +47,7 @@ def ping_monitor(integration_key, config_filename, state, monitoring_log_level,
|
||||||
)
|
)
|
||||||
payload = json.dumps(
|
payload = json.dumps(
|
||||||
{
|
{
|
||||||
'routing_key': integration_key,
|
'routing_key': hook_config['integration_key'],
|
||||||
'event_action': 'trigger',
|
'event_action': 'trigger',
|
||||||
'payload': {
|
'payload': {
|
||||||
'summary': 'backup failed on {}'.format(hostname),
|
'summary': 'backup failed on {}'.format(hostname),
|
||||||
|
@ -68,7 +68,12 @@ def ping_monitor(integration_key, config_filename, state, monitoring_log_level,
|
||||||
logger.debug('{}: Using PagerDuty payload: {}'.format(config_filename, payload))
|
logger.debug('{}: Using PagerDuty payload: {}'.format(config_filename, payload))
|
||||||
|
|
||||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
requests.post(EVENTS_API_URL, data=payload.encode('utf-8'))
|
try:
|
||||||
|
response = requests.post(EVENTS_API_URL, data=payload.encode('utf-8'))
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: PagerDuty error: {error}')
|
||||||
|
|
||||||
|
|
||||||
def destroy_monitor(
|
def destroy_monitor(
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
|
import csv
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from borgmatic.execute import execute_command, execute_command_with_processes
|
from borgmatic.execute import (
|
||||||
|
execute_command,
|
||||||
|
execute_command_and_capture_output,
|
||||||
|
execute_command_with_processes,
|
||||||
|
)
|
||||||
from borgmatic.hooks import dump
|
from borgmatic.hooks import dump
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -34,6 +39,44 @@ def make_extra_environment(database):
|
||||||
return extra
|
return extra
|
||||||
|
|
||||||
|
|
||||||
|
EXCLUDED_DATABASE_NAMES = ('template0', 'template1')
|
||||||
|
|
||||||
|
|
||||||
|
def database_names_to_dump(database, extra_environment, log_prefix, dry_run_label):
|
||||||
|
'''
|
||||||
|
Given a requested database config, return the corresponding sequence of database names to dump.
|
||||||
|
In the case of "all" when a database format is given, query for the names of databases on the
|
||||||
|
configured host and return them. For "all" without a database format, just return a sequence
|
||||||
|
containing "all".
|
||||||
|
'''
|
||||||
|
requested_name = database['name']
|
||||||
|
|
||||||
|
if requested_name != 'all':
|
||||||
|
return (requested_name,)
|
||||||
|
if not database.get('format'):
|
||||||
|
return ('all',)
|
||||||
|
|
||||||
|
list_command = (
|
||||||
|
('psql', '--list', '--no-password', '--csv', '--tuples-only')
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
|
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
|
||||||
|
)
|
||||||
|
logger.debug(
|
||||||
|
'{}: Querying for "all" PostgreSQL databases to dump{}'.format(log_prefix, dry_run_label)
|
||||||
|
)
|
||||||
|
list_output = execute_command_and_capture_output(
|
||||||
|
list_command, extra_environment=extra_environment
|
||||||
|
)
|
||||||
|
|
||||||
|
return tuple(
|
||||||
|
row[0]
|
||||||
|
for row in csv.reader(list_output.splitlines(), delimiter=',', quotechar='"')
|
||||||
|
if row[0] not in EXCLUDED_DATABASE_NAMES
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def dump_databases(databases, log_prefix, location_config, dry_run):
|
def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
'''
|
'''
|
||||||
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
|
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
|
||||||
|
@ -43,6 +86,8 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
|
|
||||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||||
|
|
||||||
|
Raise ValueError if the databases to dump cannot be determined.
|
||||||
'''
|
'''
|
||||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||||
processes = []
|
processes = []
|
||||||
|
@ -50,51 +95,59 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label))
|
logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label))
|
||||||
|
|
||||||
for database in databases:
|
for database in databases:
|
||||||
name = database['name']
|
|
||||||
dump_filename = dump.make_database_dump_filename(
|
|
||||||
make_dump_path(location_config), name, database.get('hostname')
|
|
||||||
)
|
|
||||||
all_databases = bool(name == 'all')
|
|
||||||
dump_format = database.get('format', 'custom')
|
|
||||||
command = (
|
|
||||||
(
|
|
||||||
'pg_dumpall' if all_databases else 'pg_dump',
|
|
||||||
'--no-password',
|
|
||||||
'--clean',
|
|
||||||
'--if-exists',
|
|
||||||
)
|
|
||||||
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
|
||||||
+ (('--port', str(database['port'])) if 'port' in database else ())
|
|
||||||
+ (('--username', database['username']) if 'username' in database else ())
|
|
||||||
+ (() if all_databases else ('--format', dump_format))
|
|
||||||
+ (('--file', dump_filename) if dump_format == 'directory' else ())
|
|
||||||
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
|
|
||||||
+ (() if all_databases else (name,))
|
|
||||||
# Use shell redirection rather than the --file flag to sidestep synchronization issues
|
|
||||||
# when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
|
|
||||||
# format in a particular, a named destination is required, and redirection doesn't work.
|
|
||||||
+ (('>', dump_filename) if dump_format != 'directory' else ())
|
|
||||||
)
|
|
||||||
extra_environment = make_extra_environment(database)
|
extra_environment = make_extra_environment(database)
|
||||||
|
dump_path = make_dump_path(location_config)
|
||||||
logger.debug(
|
dump_database_names = database_names_to_dump(
|
||||||
'{}: Dumping PostgreSQL database {} to {}{}'.format(
|
database, extra_environment, log_prefix, dry_run_label
|
||||||
log_prefix, name, dump_filename, dry_run_label
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
if dry_run:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if dump_format == 'directory':
|
if not dump_database_names:
|
||||||
dump.create_parent_directory_for_dump(dump_filename)
|
raise ValueError('Cannot find any PostgreSQL databases to dump.')
|
||||||
else:
|
|
||||||
dump.create_named_pipe_for_dump(dump_filename)
|
|
||||||
|
|
||||||
processes.append(
|
for database_name in dump_database_names:
|
||||||
execute_command(
|
dump_format = database.get('format', None if database_name == 'all' else 'custom')
|
||||||
command, shell=True, extra_environment=extra_environment, run_to_completion=False
|
default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
|
||||||
|
dump_command = database.get('pg_dump_command') or default_dump_command
|
||||||
|
dump_filename = dump.make_database_dump_filename(
|
||||||
|
dump_path, database_name, database.get('hostname')
|
||||||
|
)
|
||||||
|
|
||||||
|
command = (
|
||||||
|
(dump_command, '--no-password', '--clean', '--if-exists',)
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
|
+ (('--format', dump_format) if dump_format else ())
|
||||||
|
+ (('--file', dump_filename) if dump_format == 'directory' else ())
|
||||||
|
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
|
||||||
|
+ (() if database_name == 'all' else (database_name,))
|
||||||
|
# Use shell redirection rather than the --file flag to sidestep synchronization issues
|
||||||
|
# when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
|
||||||
|
# format in a particular, a named destination is required, and redirection doesn't work.
|
||||||
|
+ (('>', dump_filename) if dump_format != 'directory' else ())
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
'{}: Dumping PostgreSQL database "{}" to {}{}'.format(
|
||||||
|
log_prefix, database_name, dump_filename, dry_run_label
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if dry_run:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if dump_format == 'directory':
|
||||||
|
dump.create_parent_directory_for_dump(dump_filename)
|
||||||
|
else:
|
||||||
|
dump.create_named_pipe_for_dump(dump_filename)
|
||||||
|
|
||||||
|
processes.append(
|
||||||
|
execute_command(
|
||||||
|
command,
|
||||||
|
shell=True,
|
||||||
|
extra_environment=extra_environment,
|
||||||
|
run_to_completion=False,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
return processes
|
return processes
|
||||||
|
|
||||||
|
@ -140,16 +193,18 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
|
||||||
dump_filename = dump.make_database_dump_filename(
|
dump_filename = dump.make_database_dump_filename(
|
||||||
make_dump_path(location_config), database['name'], database.get('hostname')
|
make_dump_path(location_config), database['name'], database.get('hostname')
|
||||||
)
|
)
|
||||||
|
psql_command = database.get('psql_command') or 'psql'
|
||||||
analyze_command = (
|
analyze_command = (
|
||||||
('psql', '--no-password', '--quiet')
|
(psql_command, '--no-password', '--quiet')
|
||||||
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
+ (('--port', str(database['port'])) if 'port' in database else ())
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
+ (('--username', database['username']) if 'username' in database else ())
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
+ (('--dbname', database['name']) if not all_databases else ())
|
+ (('--dbname', database['name']) if not all_databases else ())
|
||||||
+ ('--command', 'ANALYZE')
|
+ ('--command', 'ANALYZE')
|
||||||
)
|
)
|
||||||
|
pg_restore_command = database.get('pg_restore_command') or 'pg_restore'
|
||||||
restore_command = (
|
restore_command = (
|
||||||
('psql' if all_databases else 'pg_restore', '--no-password')
|
(psql_command if all_databases else pg_restore_command, '--no-password')
|
||||||
+ (
|
+ (
|
||||||
('--if-exists', '--exit-on-error', '--clean', '--dbname', database['name'])
|
('--if-exists', '--exit-on-error', '--clean', '--dbname', database['name'])
|
||||||
if not all_databases
|
if not all_databases
|
||||||
|
@ -168,12 +223,13 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
|
||||||
if dry_run:
|
if dry_run:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
|
||||||
|
# if the restore paths don't exist in the archive.
|
||||||
execute_command_with_processes(
|
execute_command_with_processes(
|
||||||
restore_command,
|
restore_command,
|
||||||
[extract_process] if extract_process else [],
|
[extract_process] if extract_process else [],
|
||||||
output_log_level=logging.DEBUG,
|
output_log_level=logging.DEBUG,
|
||||||
input_file=extract_process.stdout if extract_process else None,
|
input_file=extract_process.stdout if extract_process else None,
|
||||||
extra_environment=extra_environment,
|
extra_environment=extra_environment,
|
||||||
borg_local_path=location_config.get('local_path', 'borg'),
|
|
||||||
)
|
)
|
||||||
execute_command(analyze_command, extra_environment=extra_environment)
|
execute_command(analyze_command, extra_environment=extra_environment)
|
||||||
|
|
|
@ -85,18 +85,19 @@ class Multi_stream_handler(logging.Handler):
|
||||||
handler.setLevel(level)
|
handler.setLevel(level)
|
||||||
|
|
||||||
|
|
||||||
LOG_LEVEL_TO_COLOR = {
|
|
||||||
logging.CRITICAL: colorama.Fore.RED,
|
|
||||||
logging.ERROR: colorama.Fore.RED,
|
|
||||||
logging.WARN: colorama.Fore.YELLOW,
|
|
||||||
logging.INFO: colorama.Fore.GREEN,
|
|
||||||
logging.DEBUG: colorama.Fore.CYAN,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Console_color_formatter(logging.Formatter):
|
class Console_color_formatter(logging.Formatter):
|
||||||
def format(self, record):
|
def format(self, record):
|
||||||
color = LOG_LEVEL_TO_COLOR.get(record.levelno)
|
add_custom_log_levels()
|
||||||
|
|
||||||
|
color = {
|
||||||
|
logging.CRITICAL: colorama.Fore.RED,
|
||||||
|
logging.ERROR: colorama.Fore.RED,
|
||||||
|
logging.WARN: colorama.Fore.YELLOW,
|
||||||
|
logging.ANSWER: colorama.Fore.MAGENTA,
|
||||||
|
logging.INFO: colorama.Fore.GREEN,
|
||||||
|
logging.DEBUG: colorama.Fore.CYAN,
|
||||||
|
}.get(record.levelno)
|
||||||
|
|
||||||
return color_text(color, record.msg)
|
return color_text(color, record.msg)
|
||||||
|
|
||||||
|
|
||||||
|
@ -110,6 +111,45 @@ def color_text(color, message):
|
||||||
return '{}{}{}'.format(color, message, colorama.Style.RESET_ALL)
|
return '{}{}{}'.format(color, message, colorama.Style.RESET_ALL)
|
||||||
|
|
||||||
|
|
||||||
|
def add_logging_level(level_name, level_number):
|
||||||
|
'''
|
||||||
|
Globally add a custom logging level based on the given (all uppercase) level name and number.
|
||||||
|
Do this idempotently.
|
||||||
|
|
||||||
|
Inspired by https://stackoverflow.com/questions/2183233/how-to-add-a-custom-loglevel-to-pythons-logging-facility/35804945#35804945
|
||||||
|
'''
|
||||||
|
method_name = level_name.lower()
|
||||||
|
|
||||||
|
if not hasattr(logging, level_name):
|
||||||
|
logging.addLevelName(level_number, level_name)
|
||||||
|
setattr(logging, level_name, level_number)
|
||||||
|
|
||||||
|
if not hasattr(logging, method_name):
|
||||||
|
|
||||||
|
def log_for_level(self, message, *args, **kwargs): # pragma: no cover
|
||||||
|
if self.isEnabledFor(level_number):
|
||||||
|
self._log(level_number, message, args, **kwargs)
|
||||||
|
|
||||||
|
setattr(logging.getLoggerClass(), method_name, log_for_level)
|
||||||
|
|
||||||
|
if not hasattr(logging.getLoggerClass(), method_name):
|
||||||
|
|
||||||
|
def log_to_root(message, *args, **kwargs): # pragma: no cover
|
||||||
|
logging.log(level_number, message, *args, **kwargs)
|
||||||
|
|
||||||
|
setattr(logging, method_name, log_to_root)
|
||||||
|
|
||||||
|
|
||||||
|
ANSWER = logging.WARN - 5
|
||||||
|
|
||||||
|
|
||||||
|
def add_custom_log_levels(): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Add a custom log level between WARN and INFO for user-requested answers.
|
||||||
|
'''
|
||||||
|
add_logging_level('ANSWER', ANSWER)
|
||||||
|
|
||||||
|
|
||||||
def configure_logging(
|
def configure_logging(
|
||||||
console_log_level,
|
console_log_level,
|
||||||
syslog_log_level=None,
|
syslog_log_level=None,
|
||||||
|
@ -130,6 +170,8 @@ def configure_logging(
|
||||||
if monitoring_log_level is None:
|
if monitoring_log_level is None:
|
||||||
monitoring_log_level = console_log_level
|
monitoring_log_level = console_log_level
|
||||||
|
|
||||||
|
add_custom_log_levels()
|
||||||
|
|
||||||
# Log certain log levels to console stderr and others to stdout. This supports use cases like
|
# Log certain log levels to console stderr and others to stdout. This supports use cases like
|
||||||
# grepping (non-error) output.
|
# grepping (non-error) output.
|
||||||
console_error_handler = logging.StreamHandler(sys.stderr)
|
console_error_handler = logging.StreamHandler(sys.stderr)
|
||||||
|
@ -138,7 +180,8 @@ def configure_logging(
|
||||||
{
|
{
|
||||||
logging.CRITICAL: console_error_handler,
|
logging.CRITICAL: console_error_handler,
|
||||||
logging.ERROR: console_error_handler,
|
logging.ERROR: console_error_handler,
|
||||||
logging.WARN: console_standard_handler,
|
logging.WARN: console_error_handler,
|
||||||
|
logging.ANSWER: console_standard_handler,
|
||||||
logging.INFO: console_standard_handler,
|
logging.INFO: console_standard_handler,
|
||||||
logging.DEBUG: console_standard_handler,
|
logging.DEBUG: console_standard_handler,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,23 +1,34 @@
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
|
import sys
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _handle_signal(signal_number, frame): # pragma: no cover
|
EXIT_CODE_FROM_SIGNAL = 128
|
||||||
|
|
||||||
|
|
||||||
|
def handle_signal(signal_number, frame):
|
||||||
'''
|
'''
|
||||||
Send the signal to all processes in borgmatic's process group, which includes child processes.
|
Send the signal to all processes in borgmatic's process group, which includes child processes.
|
||||||
'''
|
'''
|
||||||
# Prevent infinite signal handler recursion. If the parent frame is this very same handler
|
# Prevent infinite signal handler recursion. If the parent frame is this very same handler
|
||||||
# function, we know we're recursing.
|
# function, we know we're recursing.
|
||||||
if frame.f_back.f_code.co_name == _handle_signal.__name__:
|
if frame.f_back.f_code.co_name == handle_signal.__name__:
|
||||||
return
|
return
|
||||||
|
|
||||||
os.killpg(os.getpgrp(), signal_number)
|
os.killpg(os.getpgrp(), signal_number)
|
||||||
|
|
||||||
|
if signal_number == signal.SIGTERM:
|
||||||
|
logger.critical('Exiting due to TERM signal')
|
||||||
|
sys.exit(EXIT_CODE_FROM_SIGNAL + signal.SIGTERM)
|
||||||
|
|
||||||
def configure_signals(): # pragma: no cover
|
|
||||||
|
def configure_signals():
|
||||||
'''
|
'''
|
||||||
Configure borgmatic's signal handlers to pass relevant signals through to any child processes
|
Configure borgmatic's signal handlers to pass relevant signals through to any child processes
|
||||||
like Borg. Note that SIGINT gets passed through even without these changes.
|
like Borg. Note that SIGINT gets passed through even without these changes.
|
||||||
'''
|
'''
|
||||||
for signal_number in (signal.SIGHUP, signal.SIGTERM, signal.SIGUSR1, signal.SIGUSR2):
|
for signal_number in (signal.SIGHUP, signal.SIGTERM, signal.SIGUSR1, signal.SIGUSR2):
|
||||||
signal.signal(signal_number, _handle_signal)
|
signal.signal(signal_number, handle_signal)
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
|
||||||
VERBOSITY_ERROR = -1
|
VERBOSITY_ERROR = -1
|
||||||
VERBOSITY_WARNING = 0
|
VERBOSITY_ANSWER = 0
|
||||||
VERBOSITY_SOME = 1
|
VERBOSITY_SOME = 1
|
||||||
VERBOSITY_LOTS = 2
|
VERBOSITY_LOTS = 2
|
||||||
|
|
||||||
|
@ -10,9 +12,11 @@ def verbosity_to_log_level(verbosity):
|
||||||
'''
|
'''
|
||||||
Given a borgmatic verbosity value, return the corresponding Python log level.
|
Given a borgmatic verbosity value, return the corresponding Python log level.
|
||||||
'''
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
VERBOSITY_ERROR: logging.ERROR,
|
VERBOSITY_ERROR: logging.ERROR,
|
||||||
VERBOSITY_WARNING: logging.WARNING,
|
VERBOSITY_ANSWER: logging.ANSWER,
|
||||||
VERBOSITY_SOME: logging.INFO,
|
VERBOSITY_SOME: logging.INFO,
|
||||||
VERBOSITY_LOTS: logging.DEBUG,
|
VERBOSITY_LOTS: logging.DEBUG,
|
||||||
}.get(verbosity, logging.WARNING)
|
}.get(verbosity, logging.WARNING)
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
FROM python:3.8-alpine3.13 as borgmatic
|
FROM alpine:3.16.0 as borgmatic
|
||||||
|
|
||||||
COPY . /app
|
COPY . /app
|
||||||
RUN apk add --no-cache py3-ruamel.yaml py3-ruamel.yaml.clib
|
RUN apk add --no-cache py3-pip py3-ruamel.yaml py3-ruamel.yaml.clib
|
||||||
RUN pip install --no-cache /app && generate-borgmatic-config && chmod +r /etc/borgmatic/config.yaml
|
RUN pip install --no-cache /app && generate-borgmatic-config && chmod +r /etc/borgmatic/config.yaml
|
||||||
RUN borgmatic --help > /command-line.txt \
|
RUN borgmatic --help > /command-line.txt \
|
||||||
&& for action in init prune compact create check extract export-tar mount umount restore list info borg; do \
|
&& for action in rcreate transfer prune compact create check extract export-tar mount umount restore rlist list rinfo info break-lock borg; do \
|
||||||
echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \
|
echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \
|
||||||
&& borgmatic "$action" --help >> /command-line.txt; done
|
&& borgmatic "$action" --help >> /command-line.txt; done
|
||||||
|
|
||||||
FROM node:15.2.1-alpine as html
|
FROM node:18.4.0-alpine as html
|
||||||
|
|
||||||
ARG ENVIRONMENT=production
|
ARG ENVIRONMENT=production
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ COPY . /source
|
||||||
RUN NODE_ENV=${ENVIRONMENT} npx eleventy --input=/source/docs --output=/output/docs \
|
RUN NODE_ENV=${ENVIRONMENT} npx eleventy --input=/source/docs --output=/output/docs \
|
||||||
&& mv /output/docs/index.html /output/index.html
|
&& mv /output/docs/index.html /output/index.html
|
||||||
|
|
||||||
FROM nginx:1.19.4-alpine
|
FROM nginx:1.22.0-alpine
|
||||||
|
|
||||||
COPY --from=html /output /usr/share/nginx/html
|
COPY --from=html /output /usr/share/nginx/html
|
||||||
COPY --from=borgmatic /etc/borgmatic/config.yaml /usr/share/nginx/html/docs/reference/config.yaml
|
COPY --from=borgmatic /etc/borgmatic/config.yaml /usr/share/nginx/html/docs/reference/config.yaml
|
||||||
|
|
|
@ -530,3 +530,11 @@ main .elv-toc + h1 .direct-link {
|
||||||
display: none ;
|
display: none ;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.header-anchor {
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-anchor:hover::after {
|
||||||
|
content: " 🔗";
|
||||||
|
}
|
||||||
|
|
|
@ -1,17 +1,18 @@
|
||||||
---
|
---
|
||||||
title: How to add preparation and cleanup steps to backups
|
title: How to add preparation and cleanup steps to backups
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Add preparation and cleanup steps
|
key: 🧹 Add preparation and cleanup steps
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 8
|
order: 9
|
||||||
---
|
---
|
||||||
## Preparation and cleanup hooks
|
## Preparation and cleanup hooks
|
||||||
|
|
||||||
If you find yourself performing prepraration tasks before your backup runs, or
|
If you find yourself performing preparation tasks before your backup runs, or
|
||||||
cleanup work afterwards, borgmatic hooks may be of interest. Hooks are shell
|
cleanup work afterwards, borgmatic hooks may be of interest. Hooks are shell
|
||||||
commands that borgmatic executes for you at various points, and they're
|
commands that borgmatic executes for you at various points as it runs, and
|
||||||
configured in the `hooks` section of your configuration file. But if you're
|
they're configured in the `hooks` section of your configuration file. But if
|
||||||
looking to backup a database, it's probably easier to use the [database backup
|
you're looking to backup a database, it's probably easier to use the [database
|
||||||
|
backup
|
||||||
feature](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/)
|
feature](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/)
|
||||||
instead.
|
instead.
|
||||||
|
|
||||||
|
@ -27,15 +28,24 @@ hooks:
|
||||||
- umount /some/filesystem
|
- umount /some/filesystem
|
||||||
```
|
```
|
||||||
|
|
||||||
The `before_backup` and `after_backup` hooks each run once per configuration
|
<span class="minilink minilink-addedin">New in version 1.6.0</span> The
|
||||||
file. `before_backup` hooks run prior to backups of all repositories in a
|
`before_backup` and `after_backup` hooks each run once per repository in a
|
||||||
configuration file, right before the `create` action. `after_backup` hooks run
|
configuration file. `before_backup` hooks runs right before the `create`
|
||||||
afterwards, but not if an error occurs in a previous hook or in the backups
|
action for a particular repository, and `after_backup` hooks run afterwards,
|
||||||
themselves.
|
but not if an error occurs in a previous hook or in the backups themselves.
|
||||||
|
(Prior to borgmatic 1.6.0, these hooks instead ran once per configuration file
|
||||||
|
rather than once per repository.)
|
||||||
|
|
||||||
There are additional hooks that run before/after other actions as well. For
|
There are additional hooks that run before/after other actions as well. For
|
||||||
instance, `before_prune` runs before a `prune` action, while `after_prune`
|
instance, `before_prune` runs before a `prune` action for a repository, while
|
||||||
runs after it.
|
`after_prune` runs after it.
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.0</span> The
|
||||||
|
`before_actions` and `after_actions` hooks run before/after all the actions
|
||||||
|
(like `create`, `prune`, etc.) for each repository. These hooks are a good
|
||||||
|
place to run per-repository steps like mounting/unmounting a remote
|
||||||
|
filesystem.
|
||||||
|
|
||||||
|
|
||||||
## Variable interpolation
|
## Variable interpolation
|
||||||
|
|
||||||
|
@ -46,24 +56,28 @@ separate shell script:
|
||||||
```yaml
|
```yaml
|
||||||
hooks:
|
hooks:
|
||||||
after_prune:
|
after_prune:
|
||||||
- record-prune.sh "{configuration_filename}" "{repositories}"
|
- record-prune.sh "{configuration_filename}" "{repository}"
|
||||||
```
|
```
|
||||||
|
|
||||||
In this example, when the hook is triggered, borgmatic interpolates runtime
|
In this example, when the hook is triggered, borgmatic interpolates runtime
|
||||||
values into the hook command: the borgmatic configuration filename and the
|
values into the hook command: the borgmatic configuration filename and the
|
||||||
paths of all configured repositories. Here's the full set of supported
|
paths of the current Borg repository. Here's the full set of supported
|
||||||
variables you can use here:
|
variables you can use here:
|
||||||
|
|
||||||
* `configuration_filename`: borgmatic configuration filename in which the
|
* `configuration_filename`: borgmatic configuration filename in which the
|
||||||
hook was defined
|
hook was defined
|
||||||
* `repositories`: comma-separated paths of all repositories configured in the
|
* `repository`: path of the current repository as configured in the current
|
||||||
current borgmatic configuration file
|
borgmatic configuration file
|
||||||
|
|
||||||
|
Note that you can also interpolate in [arbitrary environment
|
||||||
|
variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/).
|
||||||
|
|
||||||
|
|
||||||
|
## Global hooks
|
||||||
|
|
||||||
You can also use `before_everything` and `after_everything` hooks to perform
|
You can also use `before_everything` and `after_everything` hooks to perform
|
||||||
global setup or cleanup:
|
global setup or cleanup:
|
||||||
|
|
||||||
## Global hooks
|
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
hooks:
|
hooks:
|
||||||
before_everything:
|
before_everything:
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
---
|
---
|
||||||
title: How to backup to a removable drive or an intermittent server
|
title: How to backup to a removable drive or an intermittent server
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Backup to a removable drive or server
|
key: 💾 Backup to a removable drive/server
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 9
|
order: 10
|
||||||
---
|
---
|
||||||
## Occasional backups
|
## Occasional backups
|
||||||
|
|
||||||
|
@ -76,7 +76,7 @@ location:
|
||||||
- /home
|
- /home
|
||||||
|
|
||||||
repositories:
|
repositories:
|
||||||
- me@buddys-server.org:backup.borg
|
- ssh://me@buddys-server.org/./backup.borg
|
||||||
|
|
||||||
hooks:
|
hooks:
|
||||||
before_backup:
|
before_backup:
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
---
|
---
|
||||||
title: How to backup your databases
|
title: How to backup your databases
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Backup your databases
|
key: 🗄️ Backup your databases
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 7
|
order: 8
|
||||||
---
|
---
|
||||||
## Database dump hooks
|
## Database dump hooks
|
||||||
|
|
||||||
|
@ -33,7 +33,8 @@ As part of each backup, borgmatic streams a database dump for each configured
|
||||||
database directly to Borg, so it's included in the backup without consuming
|
database directly to Borg, so it's included in the backup without consuming
|
||||||
additional disk space. (The exceptions are the PostgreSQL/MongoDB "directory"
|
additional disk space. (The exceptions are the PostgreSQL/MongoDB "directory"
|
||||||
dump formats, which can't stream and therefore do consume temporary disk
|
dump formats, which can't stream and therefore do consume temporary disk
|
||||||
space.)
|
space. Additionally, prior to borgmatic 1.5.3, all database dumps consumed
|
||||||
|
temporary disk space.)
|
||||||
|
|
||||||
To support this, borgmatic creates temporary named pipes in `~/.borgmatic` by
|
To support this, borgmatic creates temporary named pipes in `~/.borgmatic` by
|
||||||
default. To customize this path, set the `borgmatic_source_directory` option
|
default. To customize this path, set the `borgmatic_source_directory` option
|
||||||
|
@ -51,6 +52,8 @@ hooks:
|
||||||
postgresql_databases:
|
postgresql_databases:
|
||||||
- name: users
|
- name: users
|
||||||
hostname: database1.example.org
|
hostname: database1.example.org
|
||||||
|
- name: orders
|
||||||
|
hostname: database2.example.org
|
||||||
port: 5433
|
port: 5433
|
||||||
username: postgres
|
username: postgres
|
||||||
password: trustsome1
|
password: trustsome1
|
||||||
|
@ -58,14 +61,14 @@ hooks:
|
||||||
options: "--role=someone"
|
options: "--role=someone"
|
||||||
mysql_databases:
|
mysql_databases:
|
||||||
- name: posts
|
- name: posts
|
||||||
hostname: database2.example.org
|
hostname: database3.example.org
|
||||||
port: 3307
|
port: 3307
|
||||||
username: root
|
username: root
|
||||||
password: trustsome1
|
password: trustsome1
|
||||||
options: "--skip-comments"
|
options: "--skip-comments"
|
||||||
mongodb_databases:
|
mongodb_databases:
|
||||||
- name: messages
|
- name: messages
|
||||||
hostname: database3.example.org
|
hostname: database4.example.org
|
||||||
port: 27018
|
port: 27018
|
||||||
username: dbuser
|
username: dbuser
|
||||||
password: trustsome1
|
password: trustsome1
|
||||||
|
@ -73,6 +76,9 @@ hooks:
|
||||||
options: "--ssl"
|
options: "--ssl"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### All databases
|
||||||
|
|
||||||
If you want to dump all databases on a host, use `all` for the database name:
|
If you want to dump all databases on a host, use `all` for the database name:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -88,8 +94,30 @@ hooks:
|
||||||
Note that you may need to use a `username` of the `postgres` superuser for
|
Note that you may need to use a `username` of the `postgres` superuser for
|
||||||
this to work with PostgreSQL.
|
this to work with PostgreSQL.
|
||||||
|
|
||||||
If you would like to backup databases only and not source directories, you can
|
<span class="minilink minilink-addedin">New in version 1.7.6</span> With
|
||||||
specify an empty `source_directories` value (as it is a mandatory field):
|
PostgreSQL and MySQL, you can optionally dump "all" databases to separate
|
||||||
|
files instead of one combined dump file, allowing more convenient restores of
|
||||||
|
individual databases. Enable this by specifying your desired database dump
|
||||||
|
`format`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
hooks:
|
||||||
|
postgresql_databases:
|
||||||
|
- name: all
|
||||||
|
format: custom
|
||||||
|
mysql_databases:
|
||||||
|
- name: all
|
||||||
|
format: sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### No source directories
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.1</span> If you
|
||||||
|
would like to backup databases only and not source directories, you can omit
|
||||||
|
`source_directories` entirely.
|
||||||
|
|
||||||
|
In older versions of borgmatic, instead specify an empty `source_directories`
|
||||||
|
value, as it is a mandatory option prior to version 1.7.1:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
location:
|
location:
|
||||||
|
@ -100,6 +128,16 @@ hooks:
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### External passwords
|
||||||
|
|
||||||
|
If you don't want to keep your database passwords in your borgmatic
|
||||||
|
configuration file, you can instead pass them in via [environment
|
||||||
|
variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/)
|
||||||
|
or command-line [configuration
|
||||||
|
overrides](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#configuration-overrides).
|
||||||
|
|
||||||
|
|
||||||
### Configuration backups
|
### Configuration backups
|
||||||
|
|
||||||
An important note about this database configuration: You'll need the
|
An important note about this database configuration: You'll need the
|
||||||
|
@ -122,14 +160,13 @@ that you'd like supported.
|
||||||
|
|
||||||
To restore a database dump from an archive, use the `borgmatic restore`
|
To restore a database dump from an archive, use the `borgmatic restore`
|
||||||
action. But the first step is to figure out which archive to restore from. A
|
action. But the first step is to figure out which archive to restore from. A
|
||||||
good way to do that is to use the `list` action:
|
good way to do that is to use the `rlist` action:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic list
|
borgmatic rlist
|
||||||
```
|
```
|
||||||
|
|
||||||
(No borgmatic `list` action? Try the old-style `--list`, or upgrade
|
(No borgmatic `rlist` action? Try `list` instead or upgrade borgmatic!)
|
||||||
borgmatic!)
|
|
||||||
|
|
||||||
That should yield output looking something like:
|
That should yield output looking something like:
|
||||||
|
|
||||||
|
@ -201,17 +238,29 @@ databases that share the exact same name on different hosts.
|
||||||
setting to support dump and restore streaming, you'll need to ensure that any
|
setting to support dump and restore streaming, you'll need to ensure that any
|
||||||
special files are excluded from backups (named pipes, block devices,
|
special files are excluded from backups (named pipes, block devices,
|
||||||
character devices, and sockets) to prevent hanging. Try a command like
|
character devices, and sockets) to prevent hanging. Try a command like
|
||||||
`find /your/source/path -type c,b,p,s` to find such files. Common directories
|
`find /your/source/path -type b -or -type c -or -type p -or -type s` to find
|
||||||
to exclude are `/dev` and `/run`, but that may not be exhaustive.
|
such files. Common directories to exclude are `/dev` and `/run`, but that may
|
||||||
|
not be exhaustive. <span class="minilink minilink-addedin">New in version
|
||||||
|
1.7.3</span> When database hooks are enabled, borgmatic automatically excludes
|
||||||
|
special files that may cause Borg to hang, so you no longer need to manually
|
||||||
|
exclude them. (This includes symlinks with special files as a destination.) You
|
||||||
|
can override/prevent this behavior by explicitly setting `read_special` to true.
|
||||||
|
|
||||||
|
|
||||||
### Manual restoration
|
### Manual restoration
|
||||||
|
|
||||||
If you prefer to restore a database without the help of borgmatic, first
|
If you prefer to restore a database without the help of borgmatic, first
|
||||||
[extract](https://torsion.org/borgmatic/docs/how-to/extract-a-backup/) an
|
[extract](https://torsion.org/borgmatic/docs/how-to/extract-a-backup/) an
|
||||||
archive containing a database dump, and then manually restore the dump file
|
archive containing a database dump.
|
||||||
found within the extracted `~/.borgmatic/` path (e.g. with `pg_restore`,
|
|
||||||
`mysql`, or `mongorestore`, commands).
|
borgmatic extracts the dump file into the *`username`*`/.borgmatic/` directory
|
||||||
|
within the extraction destination path, where *`username`* is the user that
|
||||||
|
created the backup. For example, if you created the backup with the `root`
|
||||||
|
user and you're extracting to `/tmp`, then the dump will be in
|
||||||
|
`/tmp/root/.borgmatic`.
|
||||||
|
|
||||||
|
After extraction, you can manually restore the dump file using native database
|
||||||
|
commands like `pg_restore`, `mysql`, `mongorestore` or similar.
|
||||||
|
|
||||||
|
|
||||||
## Preparation and cleanup hooks
|
## Preparation and cleanup hooks
|
||||||
|
@ -251,3 +300,7 @@ Alternatively, if excluding special files is too onerous, you can create two
|
||||||
separate borgmatic configuration files—one for your source files and a
|
separate borgmatic configuration files—one for your source files and a
|
||||||
separate one for backing up databases. That way, the database `read_special`
|
separate one for backing up databases. That way, the database `read_special`
|
||||||
option will not be active when backing up special files.
|
option will not be active when backing up special files.
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.7.3</span> See
|
||||||
|
Limitations above about borgmatic's automatic exclusion of special files to
|
||||||
|
prevent Borg hangs.
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
---
|
---
|
||||||
title: How to deal with very large backups
|
title: How to deal with very large backups
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Deal with very large backups
|
key: 📏 Deal with very large backups
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 3
|
order: 4
|
||||||
---
|
---
|
||||||
## Biggish data
|
## Biggish data
|
||||||
|
|
||||||
|
@ -27,9 +27,6 @@ borgmatic create
|
||||||
borgmatic check
|
borgmatic check
|
||||||
```
|
```
|
||||||
|
|
||||||
(No borgmatic `prune`, `create`, or `check` actions? Try the old-style
|
|
||||||
`--prune`, `--create`, or `--check`. Or upgrade borgmatic!)
|
|
||||||
|
|
||||||
You can run with only one of these actions provided, or you can mix and match
|
You can run with only one of these actions provided, or you can mix and match
|
||||||
any number of them in a single borgmatic run. This supports approaches like
|
any number of them in a single borgmatic run. This supports approaches like
|
||||||
skipping certain actions while running others. For instance, this skips
|
skipping certain actions while running others. For instance, this skips
|
||||||
|
@ -47,9 +44,11 @@ consistency checks with `check` on a much less frequent basis (e.g. with
|
||||||
|
|
||||||
### Consistency check configuration
|
### Consistency check configuration
|
||||||
|
|
||||||
Another option is to customize your consistency checks. The default
|
Another option is to customize your consistency checks. By default, if you
|
||||||
consistency checks run both full-repository checks and per-archive checks
|
omit consistency checks from configuration, borgmatic runs full-repository
|
||||||
within each repository.
|
checks (`repository`) and per-archive checks (`archives`) within each
|
||||||
|
repository, no more than once a month. This is equivalent to what `borg check`
|
||||||
|
does if run without options.
|
||||||
|
|
||||||
But if you find that archive checks are too slow, for example, you can
|
But if you find that archive checks are too slow, for example, you can
|
||||||
configure borgmatic to run repository checks only. Configure this in the
|
configure borgmatic to run repository checks only. Configure this in the
|
||||||
|
@ -58,17 +57,62 @@ configure borgmatic to run repository checks only. Configure this in the
|
||||||
```yaml
|
```yaml
|
||||||
consistency:
|
consistency:
|
||||||
checks:
|
checks:
|
||||||
- repository
|
- name: repository
|
||||||
```
|
```
|
||||||
|
|
||||||
|
(Prior to borgmatic 1.6.2, `checks` was a plain list of strings without the `name:` part.)
|
||||||
|
|
||||||
Here are the available checks from fastest to slowest:
|
Here are the available checks from fastest to slowest:
|
||||||
|
|
||||||
* `repository`: Checks the consistency of the repository itself.
|
* `repository`: Checks the consistency of the repository itself.
|
||||||
* `archives`: Checks all of the archives in the repository.
|
* `archives`: Checks all of the archives in the repository.
|
||||||
* `extract`: Performs an extraction dry-run of the most recent archive.
|
* `extract`: Performs an extraction dry-run of the most recent archive.
|
||||||
* `data`: Verifies the data integrity of all archives contents, decrypting and decompressing all data (implies `archives` as well).
|
* `data`: Verifies the data integrity of all archives contents, decrypting and decompressing all data.
|
||||||
|
|
||||||
See [Borg's check documentation](https://borgbackup.readthedocs.io/en/stable/usage/check.html) for more information.
|
Note that the `data` check is a more thorough version of the `archives` check,
|
||||||
|
so enabling the `data` check implicitly enables the `archives` check as well.
|
||||||
|
|
||||||
|
See [Borg's check
|
||||||
|
documentation](https://borgbackup.readthedocs.io/en/stable/usage/check.html)
|
||||||
|
for more information.
|
||||||
|
|
||||||
|
### Check frequency
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.6.2</span> You can
|
||||||
|
optionally configure checks to run on a periodic basis rather than every time
|
||||||
|
borgmatic runs checks. For instance:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
consistency:
|
||||||
|
checks:
|
||||||
|
- name: repository
|
||||||
|
frequency: 2 weeks
|
||||||
|
- name: archives
|
||||||
|
frequency: 1 month
|
||||||
|
```
|
||||||
|
|
||||||
|
This tells borgmatic to run the `repository` consistency check at most once
|
||||||
|
every two weeks for a given repository and the `archives` check at most once a
|
||||||
|
month. The `frequency` value is a number followed by a unit of time, e.g. "3
|
||||||
|
days", "1 week", "2 months", etc. The `frequency` defaults to `always`, which
|
||||||
|
means run this check every time checks run.
|
||||||
|
|
||||||
|
Unlike a real scheduler like cron, borgmatic only makes a best effort to run
|
||||||
|
checks on the configured frequency. It compares that frequency with how long
|
||||||
|
it's been since the last check for a given repository (as recorded in a file
|
||||||
|
within `~/.borgmatic/checks`). If it hasn't been long enough, the check is
|
||||||
|
skipped. And you still have to run `borgmatic check` (or `borgmatic` without
|
||||||
|
actions) in order for checks to run, even when a `frequency` is configured!
|
||||||
|
|
||||||
|
This also applies *across* configuration files that have the same repository
|
||||||
|
configured. Make sure you have the same check frequency configured in each
|
||||||
|
though—or the most frequently configured check will apply.
|
||||||
|
|
||||||
|
If you want to temporarily ignore your configured frequencies, you can invoke
|
||||||
|
`borgmatic check --force` to run checks unconditionally.
|
||||||
|
|
||||||
|
|
||||||
|
### Disabling checks
|
||||||
|
|
||||||
If that's still too slow, you can disable consistency checks entirely,
|
If that's still too slow, you can disable consistency checks entirely,
|
||||||
either for a single repository or for all repositories.
|
either for a single repository or for all repositories.
|
||||||
|
@ -78,7 +122,7 @@ Disabling all consistency checks looks like this:
|
||||||
```yaml
|
```yaml
|
||||||
consistency:
|
consistency:
|
||||||
checks:
|
checks:
|
||||||
- disabled
|
- name: disabled
|
||||||
```
|
```
|
||||||
|
|
||||||
Or, if you have multiple repositories in your borgmatic configuration file,
|
Or, if you have multiple repositories in your borgmatic configuration file,
|
||||||
|
@ -99,7 +143,8 @@ borgmatic check --only data --only extract
|
||||||
```
|
```
|
||||||
|
|
||||||
This is useful for running slow consistency checks on an infrequent basis,
|
This is useful for running slow consistency checks on an infrequent basis,
|
||||||
separate from your regular checks.
|
separate from your regular checks. It is still subject to any configured
|
||||||
|
check frequencies unless the `--force` flag is used.
|
||||||
|
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
---
|
---
|
||||||
title: How to develop on borgmatic
|
title: How to develop on borgmatic
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Develop on borgmatic
|
key: 🏗️ Develop on borgmatic
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 12
|
order: 13
|
||||||
---
|
---
|
||||||
## Source code
|
## Source code
|
||||||
|
|
||||||
|
|
|
@ -1,22 +1,21 @@
|
||||||
---
|
---
|
||||||
title: How to extract a backup
|
title: How to extract a backup
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Extract a backup
|
key: 📤 Extract a backup
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 6
|
order: 7
|
||||||
---
|
---
|
||||||
## Extract
|
## Extract
|
||||||
|
|
||||||
When the worst happens—or you want to test your backups—the first step is
|
When the worst happens—or you want to test your backups—the first step is
|
||||||
to figure out which archive to extract. A good way to do that is to use the
|
to figure out which archive to extract. A good way to do that is to use the
|
||||||
`list` action:
|
`rlist` action:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic list
|
borgmatic rlist
|
||||||
```
|
```
|
||||||
|
|
||||||
(No borgmatic `list` action? Try the old-style `--list`, or upgrade
|
(No borgmatic `rlist` action? Try `list` instead or upgrade borgmatic!)
|
||||||
borgmatic!)
|
|
||||||
|
|
||||||
That should yield output looking something like:
|
That should yield output looking something like:
|
||||||
|
|
||||||
|
@ -32,10 +31,9 @@ and therefore the latest timestamp, run a command like:
|
||||||
borgmatic extract --archive host-2019-01-02T04:06:07.080910
|
borgmatic extract --archive host-2019-01-02T04:06:07.080910
|
||||||
```
|
```
|
||||||
|
|
||||||
(No borgmatic `extract` action? Try the old-style `--extract`, or upgrade
|
(No borgmatic `extract` action? Upgrade borgmatic!)
|
||||||
borgmatic!)
|
|
||||||
|
|
||||||
With newer versions of borgmatic, you can simplify this to:
|
Or simplify this to:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic extract --archive latest
|
borgmatic extract --archive latest
|
||||||
|
@ -43,7 +41,8 @@ borgmatic extract --archive latest
|
||||||
|
|
||||||
The `--archive` value is the name of the archive to extract. This extracts the
|
The `--archive` value is the name of the archive to extract. This extracts the
|
||||||
entire contents of the archive to the current directory, so make sure you're
|
entire contents of the archive to the current directory, so make sure you're
|
||||||
in the right place before running the command.
|
in the right place before running the command—or see below about the
|
||||||
|
`--destination` flag.
|
||||||
|
|
||||||
|
|
||||||
## Repository selection
|
## Repository selection
|
||||||
|
@ -65,13 +64,15 @@ everything from an archive. To do that, tack on one or more `--path` values.
|
||||||
For instance:
|
For instance:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic extract --archive host-2019-... --path path/1 path/2
|
borgmatic extract --archive latest --path path/1 path/2
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that the specified restore paths should not have a leading slash. Like a
|
Note that the specified restore paths should not have a leading slash. Like a
|
||||||
whole-archive extract, this also extracts into the current directory. So for
|
whole-archive extract, this also extracts into the current directory by
|
||||||
example, if you happen to be in the directory `/var` and you run the `extract`
|
default. So for example, if you happen to be in the directory `/var` and you
|
||||||
command above, borgmatic will extract `/var/path/1` and `/var/path/2`.
|
run the `extract` command above, borgmatic will extract `/var/path/1` and
|
||||||
|
`/var/path/2`.
|
||||||
|
|
||||||
|
|
||||||
## Extract to a particular destination
|
## Extract to a particular destination
|
||||||
|
|
||||||
|
@ -80,7 +81,7 @@ extract files to a particular destination directory, use the `--destination`
|
||||||
flag:
|
flag:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic extract --archive host-2019-... --destination /tmp
|
borgmatic extract --archive latest --destination /tmp
|
||||||
```
|
```
|
||||||
|
|
||||||
When using the `--destination` flag, be careful not to overwrite your system's
|
When using the `--destination` flag, be careful not to overwrite your system's
|
||||||
|
@ -104,7 +105,7 @@ archive as a [FUSE](https://en.wikipedia.org/wiki/Filesystem_in_Userspace)
|
||||||
filesystem, you can use the `borgmatic mount` action. Here's an example:
|
filesystem, you can use the `borgmatic mount` action. Here's an example:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic mount --archive host-2019-... --mount-point /mnt
|
borgmatic mount --archive latest --mount-point /mnt
|
||||||
```
|
```
|
||||||
|
|
||||||
This mounts the entire archive on the given mount point `/mnt`, so that you
|
This mounts the entire archive on the given mount point `/mnt`, so that you
|
||||||
|
@ -116,7 +117,7 @@ Omit the `--archive` flag to mount all archives (lazy-loaded):
|
||||||
borgmatic mount --mount-point /mnt
|
borgmatic mount --mount-point /mnt
|
||||||
```
|
```
|
||||||
|
|
||||||
Or use the "latest" value for the archive to mount the latest successful archive:
|
Or use the "latest" value for the archive to mount the latest archive:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic mount --archive latest --mount-point /mnt
|
borgmatic mount --archive latest --mount-point /mnt
|
||||||
|
@ -127,7 +128,7 @@ your archive, use the `--path` flag, similar to the `extract` action above.
|
||||||
For instance:
|
For instance:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic mount --archive host-2019-... --mount-point /mnt --path var/lib
|
borgmatic mount --archive latest --mount-point /mnt --path var/lib
|
||||||
```
|
```
|
||||||
|
|
||||||
When you're all done exploring your files, unmount your mount point. No
|
When you're all done exploring your files, unmount your mount point. No
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
---
|
---
|
||||||
title: How to inspect your backups
|
title: How to inspect your backups
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Inspect your backups
|
key: 🔎 Inspect your backups
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 4
|
order: 5
|
||||||
---
|
---
|
||||||
## Backup progress
|
## Backup progress
|
||||||
|
|
||||||
|
@ -37,18 +37,59 @@ borgmatic --stats
|
||||||
## Existing backups
|
## Existing backups
|
||||||
|
|
||||||
borgmatic provides convenient actions for Borg's
|
borgmatic provides convenient actions for Borg's
|
||||||
[list](https://borgbackup.readthedocs.io/en/stable/usage/list.html) and
|
[`list`](https://borgbackup.readthedocs.io/en/stable/usage/list.html) and
|
||||||
[info](https://borgbackup.readthedocs.io/en/stable/usage/info.html)
|
[`info`](https://borgbackup.readthedocs.io/en/stable/usage/info.html)
|
||||||
functionality:
|
functionality:
|
||||||
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic list
|
borgmatic list
|
||||||
borgmatic info
|
borgmatic info
|
||||||
```
|
```
|
||||||
|
|
||||||
(No borgmatic `list` or `info` actions? Try the old-style `--list` or
|
You can change the output format of `borgmatic list` by specifying your own
|
||||||
`--info`. Or upgrade borgmatic!)
|
with `--format`. Refer to the [borg list --format
|
||||||
|
documentation](https://borgbackup.readthedocs.io/en/stable/usage/list.html#the-format-specifier-syntax)
|
||||||
|
for available values.
|
||||||
|
|
||||||
|
*(No borgmatic `list` or `info` actions? Upgrade borgmatic!)*
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in borgmatic version 1.7.0</span>
|
||||||
|
There are also `rlist` and `rinfo` actions for displaying repository
|
||||||
|
information with Borg 2.x:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
borgmatic rlist
|
||||||
|
borgmatic rinfo
|
||||||
|
```
|
||||||
|
|
||||||
|
See the [borgmatic command-line
|
||||||
|
reference](https://torsion.org/borgmatic/docs/reference/command-line/) for
|
||||||
|
more information.
|
||||||
|
|
||||||
|
|
||||||
|
### Searching for a file
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.6.3</span> Let's say
|
||||||
|
you've accidentally deleted a file and want to find the backup archive(s)
|
||||||
|
containing it. `borgmatic list` provides a `--find` flag for exactly this
|
||||||
|
purpose. For instance, if you're looking for a `foo.txt`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
borgmatic list --find foo.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
This will list your archives and indicate those with files matching
|
||||||
|
`*foo.txt*` anywhere in the archive. The `--find` parameter can alternatively
|
||||||
|
be a [Borg
|
||||||
|
pattern](https://borgbackup.readthedocs.io/en/stable/usage/help.html#borg-patterns).
|
||||||
|
|
||||||
|
To limit the archives searched, use the standard `list` parameters for
|
||||||
|
filtering archives such as `--last`, `--archive`, `--match-archives`, etc. For
|
||||||
|
example, to search only the last five archives:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
borgmatic list --find foo.txt --last 5
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Logging
|
## Logging
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
---
|
---
|
||||||
title: How to make backups redundant
|
title: How to make backups redundant
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Make backups redundant
|
key: ☁️ Make backups redundant
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 2
|
order: 3
|
||||||
---
|
---
|
||||||
## Multiple repositories
|
## Multiple repositories
|
||||||
|
|
||||||
|
@ -20,8 +20,8 @@ location:
|
||||||
|
|
||||||
# Paths of local or remote repositories to backup to.
|
# Paths of local or remote repositories to backup to.
|
||||||
repositories:
|
repositories:
|
||||||
- 1234@usw-s001.rsync.net:backups.borg
|
- ssh://1234@usw-s001.rsync.net/./backups.borg
|
||||||
- k8pDxu32@k8pDxu32.repo.borgbase.com:repo
|
- ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
|
||||||
- /var/lib/backups/local.borg
|
- /var/lib/backups/local.borg
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -42,3 +42,13 @@ potentially across providers.
|
||||||
See [Borg repository URLs
|
See [Borg repository URLs
|
||||||
documentation](https://borgbackup.readthedocs.io/en/stable/usage/general.html#repository-urls)
|
documentation](https://borgbackup.readthedocs.io/en/stable/usage/general.html#repository-urls)
|
||||||
for more information on how to specify local and remote repository paths.
|
for more information on how to specify local and remote repository paths.
|
||||||
|
|
||||||
|
### Different options per repository
|
||||||
|
|
||||||
|
What if you want borgmatic to backup to multiple repositories—while also
|
||||||
|
setting different options for each one? In that case, you'll need to use
|
||||||
|
[a separate borgmatic configuration file for each
|
||||||
|
repository](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/)
|
||||||
|
instead of the multiple repositories in one configuration file as described
|
||||||
|
above. That's because all of the repositories in a particular configuration
|
||||||
|
file get the same options applied.
|
||||||
|
|
|
@ -1,20 +1,22 @@
|
||||||
---
|
---
|
||||||
title: How to make per-application backups
|
title: How to make per-application backups
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Make per-application backups
|
key: 🔀 Make per-application backups
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 1
|
order: 1
|
||||||
---
|
---
|
||||||
## Multiple backup configurations
|
## Multiple backup configurations
|
||||||
|
|
||||||
You may find yourself wanting to create different backup policies for
|
You may find yourself wanting to create different backup policies for
|
||||||
different applications on your system. For instance, you may want one backup
|
different applications on your system or even for different backup
|
||||||
configuration for your database data directory, and a different configuration
|
repositories. For instance, you might want one backup configuration for your
|
||||||
for your user home directories.
|
database data directory and a different configuration for your user home
|
||||||
|
directories. Or one backup configuration for your local backups with a
|
||||||
|
different configuration for your remote repository.
|
||||||
|
|
||||||
The way to accomplish that is pretty simple: Create multiple separate
|
The way to accomplish that is pretty simple: Create multiple separate
|
||||||
configuration files and place each one in a `/etc/borgmatic.d/` directory. For
|
configuration files and place each one in a `/etc/borgmatic.d/` directory. For
|
||||||
instance:
|
instance, for applications:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
sudo mkdir /etc/borgmatic.d
|
sudo mkdir /etc/borgmatic.d
|
||||||
|
@ -22,6 +24,14 @@ sudo generate-borgmatic-config --destination /etc/borgmatic.d/app1.yaml
|
||||||
sudo generate-borgmatic-config --destination /etc/borgmatic.d/app2.yaml
|
sudo generate-borgmatic-config --destination /etc/borgmatic.d/app2.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Or, for repositories:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo mkdir /etc/borgmatic.d
|
||||||
|
sudo generate-borgmatic-config --destination /etc/borgmatic.d/repo1.yaml
|
||||||
|
sudo generate-borgmatic-config --destination /etc/borgmatic.d/repo2.yaml
|
||||||
|
```
|
||||||
|
|
||||||
When you set up multiple configuration files like this, borgmatic will run
|
When you set up multiple configuration files like this, borgmatic will run
|
||||||
each one in turn from a single borgmatic invocation. This includes, by
|
each one in turn from a single borgmatic invocation. This includes, by
|
||||||
default, the traditional `/etc/borgmatic/config.yaml` as well.
|
default, the traditional `/etc/borgmatic/config.yaml` as well.
|
||||||
|
@ -29,13 +39,20 @@ default, the traditional `/etc/borgmatic/config.yaml` as well.
|
||||||
Each configuration file is interpreted independently, as if you ran borgmatic
|
Each configuration file is interpreted independently, as if you ran borgmatic
|
||||||
for each configuration file one at a time. In other words, borgmatic does not
|
for each configuration file one at a time. In other words, borgmatic does not
|
||||||
perform any merging of configuration files by default. If you'd like borgmatic
|
perform any merging of configuration files by default. If you'd like borgmatic
|
||||||
to merge your configuration files, see below about configuration includes.
|
to merge your configuration files, for instance to avoid duplication of
|
||||||
|
settings, see below about configuration includes.
|
||||||
|
|
||||||
Additionally, the `~/.config/borgmatic.d/` directory works the same way as
|
Additionally, the `~/.config/borgmatic.d/` directory works the same way as
|
||||||
`/etc/borgmatic.d`. If you need even more customizability, you can specify
|
`/etc/borgmatic.d`.
|
||||||
alternate configuration paths on the command-line with borgmatic's `--config`
|
|
||||||
flag. See `borgmatic --help` for more information.
|
|
||||||
|
|
||||||
|
If you need even more customizability, you can specify alternate configuration
|
||||||
|
paths on the command-line with borgmatic's `--config` flag. (See `borgmatic
|
||||||
|
--help` for more information.) For instance, if you want to schedule your
|
||||||
|
various borgmatic backups to run at different times, you'll need multiple
|
||||||
|
entries in your [scheduling software of
|
||||||
|
choice](https://torsion.org/borgmatic/docs/how-to/set-up-backups/#autopilot),
|
||||||
|
each entry using borgmatic's `--config` flag instead of relying on
|
||||||
|
`/etc/borgmatic.d`.
|
||||||
|
|
||||||
## Configuration includes
|
## Configuration includes
|
||||||
|
|
||||||
|
@ -69,6 +86,10 @@ themselves and complaining that they are not valid configuration files, you
|
||||||
should put them in a directory other than `/etc/borgmatic.d/`. (A subdirectory
|
should put them in a directory other than `/etc/borgmatic.d/`. (A subdirectory
|
||||||
is fine.)
|
is fine.)
|
||||||
|
|
||||||
|
When a configuration include is a relative path, borgmatic loads it from either
|
||||||
|
the current working directory or from the directory containing the file doing
|
||||||
|
the including.
|
||||||
|
|
||||||
Note that this form of include must be a YAML value rather than a key. For
|
Note that this form of include must be a YAML value rather than a key. For
|
||||||
example, this will not work:
|
example, this will not work:
|
||||||
|
|
||||||
|
@ -80,44 +101,92 @@ location:
|
||||||
!include /etc/borgmatic/common_retention.yaml
|
!include /etc/borgmatic/common_retention.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
But if you do want to merge in a YAML key and its values, keep reading!
|
But if you do want to merge in a YAML key *and* its values, keep reading!
|
||||||
|
|
||||||
|
|
||||||
## Include merging
|
## Include merging
|
||||||
|
|
||||||
If you need to get even fancier and pull in common configuration options while
|
If you need to get even fancier and merge in common configuration options, you
|
||||||
potentially overriding individual options, you can perform a YAML merge of
|
can perform a YAML merge of included configuration using the YAML `<<` key.
|
||||||
included configuration using the YAML `<<` key. For instance, here's an
|
For instance, here's an example of a main configuration file that pulls in
|
||||||
example of a main configuration file that pulls in two retention options via
|
retention and consistency options via a single include:
|
||||||
an include, and then overrides one of them locally:
|
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
<<: !include /etc/borgmatic/common.yaml
|
||||||
|
|
||||||
|
location:
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
This is what `common.yaml` might look like:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
retention:
|
||||||
|
keep_hourly: 24
|
||||||
|
keep_daily: 7
|
||||||
|
|
||||||
|
consistency:
|
||||||
|
checks:
|
||||||
|
- name: repository
|
||||||
|
```
|
||||||
|
|
||||||
|
Once this include gets merged in, the resulting configuration would have all
|
||||||
|
of the `location` options from the original configuration file *and* the
|
||||||
|
`retention` and `consistency` options from the include.
|
||||||
|
|
||||||
|
Prior to borgmatic version 1.6.0, when there's a section collision between the
|
||||||
|
local file and the merged include, the local file's section takes precedence.
|
||||||
|
So if the `retention` section appears in both the local file and the include
|
||||||
|
file, the included `retention` is ignored in favor of the local `retention`.
|
||||||
|
But see below about deep merge in version 1.6.0+.
|
||||||
|
|
||||||
|
Note that this `<<` include merging syntax is only for merging in mappings
|
||||||
|
(configuration options and their values). But if you'd like to include a
|
||||||
|
single value directly, please see the section above about standard includes.
|
||||||
|
|
||||||
|
Additionally, there is a limitation preventing multiple `<<` include merges
|
||||||
|
per section. So for instance, that means you can do one `<<` merge at the
|
||||||
|
global level, another `<<` within each configuration section, etc. (This is a
|
||||||
|
YAML limitation.)
|
||||||
|
|
||||||
|
|
||||||
|
### Deep merge
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.6.0</span> borgmatic
|
||||||
|
performs a deep merge of merged include files, meaning that values are merged
|
||||||
|
at all levels in the two configuration files. This allows you to include
|
||||||
|
common configuration—up to full borgmatic configuration files—while overriding
|
||||||
|
only the parts you want to customize.
|
||||||
|
|
||||||
|
For instance, here's an example of a main configuration file that pulls in two
|
||||||
|
retention options via an include and then overrides one of them locally:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
<<: !include /etc/borgmatic/common.yaml
|
||||||
|
|
||||||
location:
|
location:
|
||||||
...
|
...
|
||||||
|
|
||||||
retention:
|
retention:
|
||||||
keep_daily: 5
|
keep_daily: 5
|
||||||
<<: !include /etc/borgmatic/common_retention.yaml
|
|
||||||
```
|
```
|
||||||
|
|
||||||
This is what `common_retention.yaml` might look like:
|
This is what `common.yaml` might look like:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
keep_hourly: 24
|
retention:
|
||||||
keep_daily: 7
|
keep_hourly: 24
|
||||||
|
keep_daily: 7
|
||||||
```
|
```
|
||||||
|
|
||||||
Once this include gets merged in, the resulting configuration would have a
|
Once this include gets merged in, the resulting configuration would have a
|
||||||
`keep_hourly` value of `24` and an overridden `keep_daily` value of `5`.
|
`keep_hourly` value of `24` and an overridden `keep_daily` value of `5`.
|
||||||
|
|
||||||
When there is a collision of an option between the local file and the merged
|
When there's an option collision between the local file and the merged
|
||||||
include, the local file's option takes precedent. And note that this is a
|
include, the local file's option takes precedence.
|
||||||
shallow merge rather than a deep merge, so the merging does not descend into
|
|
||||||
nested values.
|
|
||||||
|
|
||||||
Note that this `<<` include merging syntax is only for merging in mappings
|
<span class="minilink minilink-addedin">New in version 1.6.1</span> Colliding
|
||||||
(keys/values). If you'd like to include other types like scalars or lists
|
list values are appended together.
|
||||||
directly, please see the section above about standard includes.
|
|
||||||
|
|
||||||
|
|
||||||
## Configuration overrides
|
## Configuration overrides
|
||||||
|
@ -162,7 +231,14 @@ borgmatic create --override location.repositories=[test1.borg,test2.borg]
|
||||||
Or even a single list element:
|
Or even a single list element:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic create --override location.repositories=[/root/test1.borg]
|
borgmatic create --override location.repositories=[/root/test.borg]
|
||||||
|
```
|
||||||
|
|
||||||
|
If your override value contains special YAML characters like colons, then
|
||||||
|
you'll need quotes for it to parse correctly:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
borgmatic create --override location.repositories="['user@server:test.borg']"
|
||||||
```
|
```
|
||||||
|
|
||||||
There is not currently a way to override a single element of a list without
|
There is not currently a way to override a single element of a list without
|
||||||
|
@ -177,3 +253,5 @@ indentation and a leading dash.)
|
||||||
|
|
||||||
Be sure to quote your overrides if they contain spaces or other characters
|
Be sure to quote your overrides if they contain spaces or other characters
|
||||||
that your shell may interpret.
|
that your shell may interpret.
|
||||||
|
|
||||||
|
An alternate to command-line overrides is passing in your values via [environment variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/).
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
---
|
---
|
||||||
title: How to monitor your backups
|
title: How to monitor your backups
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Monitor your backups
|
key: 🚨 Monitor your backups
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 5
|
order: 6
|
||||||
---
|
---
|
||||||
|
|
||||||
## Monitoring and alerting
|
## Monitoring and alerting
|
||||||
|
@ -38,17 +38,19 @@ below for how to configure this.
|
||||||
|
|
||||||
borgmatic integrates with monitoring services like
|
borgmatic integrates with monitoring services like
|
||||||
[Healthchecks](https://healthchecks.io/), [Cronitor](https://cronitor.io),
|
[Healthchecks](https://healthchecks.io/), [Cronitor](https://cronitor.io),
|
||||||
[Cronhub](https://cronhub.io), and [PagerDuty](https://www.pagerduty.com/) and
|
[Cronhub](https://cronhub.io), [PagerDuty](https://www.pagerduty.com/), and
|
||||||
pings these services whenever borgmatic runs. That way, you'll receive an
|
[ntfy](https://ntfy.sh/) and pings these services whenever borgmatic runs.
|
||||||
alert when something goes wrong or (for certain hooks) the service doesn't
|
That way, you'll receive an alert when something goes wrong or (for certain
|
||||||
hear from borgmatic for a configured interval. See [Healthchecks
|
hooks) the service doesn't hear from borgmatic for a configured interval. See
|
||||||
|
[Healthchecks
|
||||||
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#healthchecks-hook),
|
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#healthchecks-hook),
|
||||||
[Cronitor
|
[Cronitor
|
||||||
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-hook),
|
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-hook),
|
||||||
[Cronhub
|
[Cronhub
|
||||||
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook),
|
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook),
|
||||||
and [PagerDuty
|
[PagerDuty
|
||||||
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pagerduty-hook)
|
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pagerduty-hook),
|
||||||
|
and [ntfy hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook)
|
||||||
below for how to configure this.
|
below for how to configure this.
|
||||||
|
|
||||||
While these services offer different features, you probably only need to use
|
While these services offer different features, you probably only need to use
|
||||||
|
@ -59,8 +61,6 @@ one of them at most.
|
||||||
You can use traditional monitoring software to consume borgmatic JSON output
|
You can use traditional monitoring software to consume borgmatic JSON output
|
||||||
and track when the last successful backup occurred. See [scripting
|
and track when the last successful backup occurred. See [scripting
|
||||||
borgmatic](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#scripting-borgmatic)
|
borgmatic](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#scripting-borgmatic)
|
||||||
and [related
|
|
||||||
software](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#related-software)
|
|
||||||
below for how to configure this.
|
below for how to configure this.
|
||||||
|
|
||||||
### Borg hosting providers
|
### Borg hosting providers
|
||||||
|
@ -136,7 +136,8 @@ URL" for your project. Here's an example:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
hooks:
|
hooks:
|
||||||
healthchecks: https://hc-ping.com/addffa72-da17-40ae-be9c-ff591afb942a
|
healthchecks:
|
||||||
|
ping_url: https://hc-ping.com/addffa72-da17-40ae-be9c-ff591afb942a
|
||||||
```
|
```
|
||||||
|
|
||||||
With this hook in place, borgmatic pings your Healthchecks project when a
|
With this hook in place, borgmatic pings your Healthchecks project when a
|
||||||
|
@ -157,8 +158,11 @@ itself. But the logs are only included for errors that occur when a `prune`,
|
||||||
`compact`, `create`, or `check` action is run.
|
`compact`, `create`, or `check` action is run.
|
||||||
|
|
||||||
You can customize the verbosity of the logs that are sent to Healthchecks with
|
You can customize the verbosity of the logs that are sent to Healthchecks with
|
||||||
borgmatic's `--monitoring-verbosity` flag. The `--files` and `--stats` flags
|
borgmatic's `--monitoring-verbosity` flag. The `--list` and `--stats` flags
|
||||||
may also be of use. See `borgmatic --help` for more information.
|
may also be of use. See `borgmatic create --help` for more information.
|
||||||
|
Additionally, see the [borgmatic configuration
|
||||||
|
file](https://torsion.org/borgmatic/docs/reference/configuration/) for
|
||||||
|
additional Healthchecks options.
|
||||||
|
|
||||||
You can configure Healthchecks to notify you by a [variety of
|
You can configure Healthchecks to notify you by a [variety of
|
||||||
mechanisms](https://healthchecks.io/#welcome-integrations) when backups fail
|
mechanisms](https://healthchecks.io/#welcome-integrations) when backups fail
|
||||||
|
@ -176,7 +180,8 @@ API URL" for your monitor. Here's an example:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
hooks:
|
hooks:
|
||||||
cronitor: https://cronitor.link/d3x0c1
|
cronitor:
|
||||||
|
ping_url: https://cronitor.link/d3x0c1
|
||||||
```
|
```
|
||||||
|
|
||||||
With this hook in place, borgmatic pings your Cronitor monitor when a backup
|
With this hook in place, borgmatic pings your Cronitor monitor when a backup
|
||||||
|
@ -204,7 +209,8 @@ URL" for your monitor. Here's an example:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
hooks:
|
hooks:
|
||||||
cronhub: https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d031
|
cronhub:
|
||||||
|
ping_url: https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d031
|
||||||
```
|
```
|
||||||
|
|
||||||
With this hook in place, borgmatic pings your Cronhub monitor when a backup
|
With this hook in place, borgmatic pings your Cronhub monitor when a backup
|
||||||
|
@ -246,7 +252,8 @@ Here's an example:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
hooks:
|
hooks:
|
||||||
pagerduty: a177cad45bd374409f78906a810a3074
|
pagerduty:
|
||||||
|
integration_key: a177cad45bd374409f78906a810a3074
|
||||||
```
|
```
|
||||||
|
|
||||||
With this hook in place, borgmatic creates a PagerDuty event for your service
|
With this hook in place, borgmatic creates a PagerDuty event for your service
|
||||||
|
@ -263,51 +270,69 @@ If you have any issues with the integration, [please contact
|
||||||
us](https://torsion.org/borgmatic/#support-and-contributing).
|
us](https://torsion.org/borgmatic/#support-and-contributing).
|
||||||
|
|
||||||
|
|
||||||
|
## ntfy hook
|
||||||
|
|
||||||
|
[ntfy](https://ntfy.sh) is a free, simple, service (either hosted or self-hosted)
|
||||||
|
which offers simple pub/sub push notifications to multiple platforms including
|
||||||
|
[web](https://ntfy.sh/stats), [Android](https://play.google.com/store/apps/details?id=io.heckel.ntfy)
|
||||||
|
and [iOS](https://apps.apple.com/us/app/ntfy/id1625396347).
|
||||||
|
|
||||||
|
Since push notifications for regular events might soon become quite annoying,
|
||||||
|
this hook only fires on any errors by default in order to instantly alert you to issues.
|
||||||
|
The `states` list can override this.
|
||||||
|
|
||||||
|
As ntfy is unauthenticated, it isn't a suitable channel for any private information
|
||||||
|
so the default messages are intentionally generic. These can be overridden, depending
|
||||||
|
on your risk assessment. Each `state` can have its own custom messages, priorities and tags
|
||||||
|
or, if none are provided, will use the default.
|
||||||
|
|
||||||
|
An example configuration is shown here, with all the available options, including
|
||||||
|
[priorities](https://ntfy.sh/docs/publish/#message-priority) and
|
||||||
|
[tags](https://ntfy.sh/docs/publish/#tags-emojis):
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
hooks:
|
||||||
|
ntfy:
|
||||||
|
topic: my-unique-topic
|
||||||
|
server: https://ntfy.my-domain.com
|
||||||
|
start:
|
||||||
|
title: A Borgmatic backup started
|
||||||
|
message: Watch this space...
|
||||||
|
tags: borgmatic
|
||||||
|
priority: min
|
||||||
|
finish:
|
||||||
|
title: A Borgmatic backup completed successfully
|
||||||
|
message: Nice!
|
||||||
|
tags: borgmatic,+1
|
||||||
|
priority: min
|
||||||
|
fail:
|
||||||
|
title: A Borgmatic backup failed
|
||||||
|
message: You should probably fix it
|
||||||
|
tags: borgmatic,-1,skull
|
||||||
|
priority: max
|
||||||
|
states:
|
||||||
|
- start
|
||||||
|
- finish
|
||||||
|
- fail
|
||||||
|
```
|
||||||
|
|
||||||
## Scripting borgmatic
|
## Scripting borgmatic
|
||||||
|
|
||||||
To consume the output of borgmatic in other software, you can include an
|
To consume the output of borgmatic in other software, you can include an
|
||||||
optional `--json` flag with `create`, `list`, or `info` to get the output
|
optional `--json` flag with `create`, `rlist`, `rinfo`, or `info` to get the
|
||||||
formatted as JSON.
|
output formatted as JSON.
|
||||||
|
|
||||||
Note that when you specify the `--json` flag, Borg's other non-JSON output is
|
Note that when you specify the `--json` flag, Borg's other non-JSON output is
|
||||||
suppressed so as not to interfere with the captured JSON. Also note that JSON
|
suppressed so as not to interfere with the captured JSON. Also note that JSON
|
||||||
output only shows up at the console, and not in syslog.
|
output only shows up at the console, and not in syslog.
|
||||||
|
|
||||||
|
|
||||||
## Related software
|
|
||||||
|
|
||||||
* [Borgmacator GNOME AppIndicator](https://github.com/N-Coder/borgmacator/)
|
|
||||||
|
|
||||||
|
|
||||||
### Successful backups
|
|
||||||
|
|
||||||
`borgmatic list` includes support for a `--successful` flag that only lists
|
|
||||||
successful (non-checkpoint) backups. This flag works via a basic heuristic: It
|
|
||||||
assumes that non-checkpoint archive names end with a digit (e.g. from a
|
|
||||||
timestamp), while checkpoint archive names do not. This means that if you're
|
|
||||||
using custom archive names that do not end in a digit, the `--successful` flag
|
|
||||||
will not work as expected.
|
|
||||||
|
|
||||||
Combined with a built-in Borg flag like `--last`, you can list the last
|
|
||||||
successful backup for use in your monitoring scripts. Here's an example
|
|
||||||
combined with `--json`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
borgmatic list --successful --last 1 --json
|
|
||||||
```
|
|
||||||
|
|
||||||
Note that this particular combination will only work if you've got a single
|
|
||||||
backup "series" in your repository. If you're instead backing up, say, from
|
|
||||||
multiple different hosts into a single repository, then you'll need to get
|
|
||||||
fancier with your archive listing. See `borg list --help` for more flags.
|
|
||||||
|
|
||||||
|
|
||||||
### Latest backups
|
### Latest backups
|
||||||
|
|
||||||
All borgmatic actions that accept an "--archive" flag allow you to specify an
|
All borgmatic actions that accept an `--archive` flag allow you to specify an
|
||||||
archive name of "latest". This lets you get the latest successful archive
|
archive name of `latest`. This lets you get the latest archive without having
|
||||||
without having to first run "borgmatic list" manually, which can be handy in
|
to first run `borgmatic rlist` manually, which can be handy in automated
|
||||||
automated scripts. Here's an example:
|
scripts. Here's an example:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic info --archive latest
|
borgmatic info --archive latest
|
||||||
|
|
|
@ -0,0 +1,90 @@
|
||||||
|
---
|
||||||
|
title: How to provide your passwords
|
||||||
|
eleventyNavigation:
|
||||||
|
key: 🔒 Provide your passwords
|
||||||
|
parent: How-to guides
|
||||||
|
order: 2
|
||||||
|
---
|
||||||
|
## Environment variable interpolation
|
||||||
|
|
||||||
|
If you want to use a Borg repository passphrase or database passwords with
|
||||||
|
borgmatic, you can set them directly in your borgmatic configuration file,
|
||||||
|
treating those secrets like any other option value. But if you'd rather store
|
||||||
|
them outside of borgmatic, whether for convenience or security reasons, read
|
||||||
|
on.
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in version 1.6.4</span> borgmatic
|
||||||
|
supports interpolating arbitrary environment variables directly into option
|
||||||
|
values in your configuration file. That means you can instruct borgmatic to
|
||||||
|
pull your repository passphrase, your database passwords, or any other option
|
||||||
|
values from environment variables. For instance:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
storage:
|
||||||
|
encryption_passphrase: ${MY_PASSPHRASE}
|
||||||
|
```
|
||||||
|
|
||||||
|
This uses the `MY_PASSPHRASE` environment variable as your encryption
|
||||||
|
passphrase. Note that the `{` `}` brackets are required. `$MY_PASSPHRASE` by
|
||||||
|
itself will not work.
|
||||||
|
|
||||||
|
In the case of `encryption_passphrase` in particular, an alternate approach
|
||||||
|
is to use Borg's `BORG_PASSPHRASE` environment variable, which doesn't even
|
||||||
|
require setting an explicit `encryption_passphrase` value in borgmatic's
|
||||||
|
configuration file.
|
||||||
|
|
||||||
|
For [database
|
||||||
|
configuration](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/),
|
||||||
|
the same approach applies. For example:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
hooks:
|
||||||
|
postgresql_databases:
|
||||||
|
- name: users
|
||||||
|
password: ${MY_DATABASE_PASSWORD}
|
||||||
|
```
|
||||||
|
|
||||||
|
This uses the `MY_DATABASE_PASSWORD` environment variable as your database
|
||||||
|
password.
|
||||||
|
|
||||||
|
|
||||||
|
### Interpolation defaults
|
||||||
|
|
||||||
|
If you'd like to set a default for your environment variables, you can do so with the following syntax:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
storage:
|
||||||
|
encryption_passphrase: ${MY_PASSPHRASE:-defaultpass}
|
||||||
|
```
|
||||||
|
|
||||||
|
Here, "`defaultpass`" is the default passphrase if the `MY_PASSPHRASE`
|
||||||
|
environment variable is not set. Without a default, if the environment
|
||||||
|
variable doesn't exist, borgmatic will error.
|
||||||
|
|
||||||
|
|
||||||
|
### Disabling interpolation
|
||||||
|
|
||||||
|
To disable this environment variable interpolation feature entirely, you can
|
||||||
|
pass the `--no-environment-interpolation` flag on the command-line.
|
||||||
|
|
||||||
|
Or if you'd like to disable interpolation within a single option value, you
|
||||||
|
can escape it with a backslash. For instance, if your password is literally
|
||||||
|
`${A}@!`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
storage:
|
||||||
|
encryption_passphrase: \${A}@!
|
||||||
|
```
|
||||||
|
|
||||||
|
### Related features
|
||||||
|
|
||||||
|
Another way to override particular options within a borgmatic configuration
|
||||||
|
file is to use a [configuration
|
||||||
|
override](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#configuration-overrides)
|
||||||
|
on the command-line. But please be aware of the security implications of
|
||||||
|
specifying secrets on the command-line.
|
||||||
|
|
||||||
|
Additionally, borgmatic action hooks support their own [variable
|
||||||
|
interpolation](https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/#variable-interpolation),
|
||||||
|
although in that case it's for particular borgmatic runtime values rather than
|
||||||
|
(only) environment variables.
|
|
@ -1,9 +1,9 @@
|
||||||
---
|
---
|
||||||
title: How to run arbitrary Borg commands
|
title: How to run arbitrary Borg commands
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Run arbitrary Borg commands
|
key: 🔧 Run arbitrary Borg commands
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 10
|
order: 11
|
||||||
---
|
---
|
||||||
## Running Borg with borgmatic
|
## Running Borg with borgmatic
|
||||||
|
|
||||||
|
@ -30,8 +30,8 @@ based on your borgmatic configuration files or command-line arguments:
|
||||||
|
|
||||||
### borg action
|
### borg action
|
||||||
|
|
||||||
The way you run Borg with borgmatic is via the `borg` action. Here's a simple
|
<span class="minilink minilink-addedin">New in version 1.5.15</span> The way
|
||||||
example:
|
you run Borg with borgmatic is via the `borg` action. Here's a simple example:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic borg break-lock
|
borgmatic borg break-lock
|
||||||
|
@ -46,12 +46,11 @@ options, as that part is provided by borgmatic.
|
||||||
You can also specify Borg options for relevant commands:
|
You can also specify Borg options for relevant commands:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic borg list --progress
|
borgmatic borg rlist --short
|
||||||
```
|
```
|
||||||
|
|
||||||
This runs Borg's `list` command once on each configured borgmatic
|
This runs Borg's `rlist` command once on each configured borgmatic repository.
|
||||||
repository. However, the native `borgmatic list` action should be preferred
|
(The native `borgmatic rlist` action should be preferred for most use.)
|
||||||
for most use.
|
|
||||||
|
|
||||||
What if you only want to run Borg on a single configured borgmatic repository
|
What if you only want to run Borg on a single configured borgmatic repository
|
||||||
when you've got several configured? Not a problem.
|
when you've got several configured? Not a problem.
|
||||||
|
@ -63,7 +62,7 @@ borgmatic borg --repository repo.borg break-lock
|
||||||
And what about a single archive?
|
And what about a single archive?
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
borgmatic borg --archive your-archive-name list
|
borgmatic borg --archive your-archive-name rlist
|
||||||
```
|
```
|
||||||
|
|
||||||
### Limitations
|
### Limitations
|
||||||
|
@ -88,6 +87,9 @@ borgmatic's `borg` action is not without limitations:
|
||||||
borgmatic action. In this case, only the Borg command is run.
|
borgmatic action. In this case, only the Borg command is run.
|
||||||
* Unlike normal borgmatic actions that support JSON, the `borg` action will
|
* Unlike normal borgmatic actions that support JSON, the `borg` action will
|
||||||
not disable certain borgmatic logs to avoid interfering with JSON output.
|
not disable certain borgmatic logs to avoid interfering with JSON output.
|
||||||
|
* Unlike other borgmatic actions, the `borg` action captures (and logs) all
|
||||||
|
output, so interactive prompts or flags like `--progress` will not work as
|
||||||
|
expected.
|
||||||
|
|
||||||
In general, this `borgmatic borg` feature should be considered an escape
|
In general, this `borgmatic borg` feature should be considered an escape
|
||||||
valve—a feature of second resort. In the long run, it's preferable to wrap
|
valve—a feature of second resort. In the long run, it's preferable to wrap
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
---
|
---
|
||||||
title: How to set up backups
|
title: How to set up backups
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Set up backups
|
key: 📥 Set up backups
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 0
|
order: 0
|
||||||
---
|
---
|
||||||
|
@ -28,7 +28,7 @@ sudo pip3 install --user --upgrade borgmatic
|
||||||
This installs borgmatic and its commands at the `/root/.local/bin` path.
|
This installs borgmatic and its commands at the `/root/.local/bin` path.
|
||||||
|
|
||||||
Your pip binary may have a different name than "pip3". Make sure you're using
|
Your pip binary may have a different name than "pip3". Make sure you're using
|
||||||
Python 3.6+, as borgmatic does not support Python 2.
|
Python 3.7+, as borgmatic does not support older versions of Python.
|
||||||
|
|
||||||
The next step is to ensure that borgmatic's commands available are on your
|
The next step is to ensure that borgmatic's commands available are on your
|
||||||
system `PATH`, so that you can run borgmatic:
|
system `PATH`, so that you can run borgmatic:
|
||||||
|
@ -51,6 +51,11 @@ sudo borgmatic --version
|
||||||
|
|
||||||
If borgmatic is properly installed, that should output your borgmatic version.
|
If borgmatic is properly installed, that should output your borgmatic version.
|
||||||
|
|
||||||
|
As an alternative to adding the path to `~/.bashrc` file, if you're using sudo
|
||||||
|
to run borgmatic, you can configure [sudo's
|
||||||
|
`secure_path` option](https://man.archlinux.org/man/sudoers.5) to include
|
||||||
|
borgmatic's path.
|
||||||
|
|
||||||
|
|
||||||
### Global install option
|
### Global install option
|
||||||
|
|
||||||
|
@ -78,7 +83,7 @@ Besides the approaches described above, there are several other options for
|
||||||
installing borgmatic:
|
installing borgmatic:
|
||||||
|
|
||||||
* [Docker image with scheduled backups](https://hub.docker.com/r/b3vis/borgmatic/) (+ Docker Compose files)
|
* [Docker image with scheduled backups](https://hub.docker.com/r/b3vis/borgmatic/) (+ Docker Compose files)
|
||||||
* [Docker base image](https://hub.docker.com/r/monachus/borgmatic/)
|
* [Docker image with multi-arch and Docker CLI support](https://hub.docker.com/r/modem7/borgmatic-docker/)
|
||||||
* [Debian](https://tracker.debian.org/pkg/borgmatic)
|
* [Debian](https://tracker.debian.org/pkg/borgmatic)
|
||||||
* [Ubuntu](https://launchpad.net/ubuntu/+source/borgmatic)
|
* [Ubuntu](https://launchpad.net/ubuntu/+source/borgmatic)
|
||||||
* [Fedora official](https://bodhi.fedoraproject.org/updates/?search=borgmatic)
|
* [Fedora official](https://bodhi.fedoraproject.org/updates/?search=borgmatic)
|
||||||
|
@ -87,8 +92,8 @@ installing borgmatic:
|
||||||
* [Alpine Linux](https://pkgs.alpinelinux.org/packages?name=borgmatic)
|
* [Alpine Linux](https://pkgs.alpinelinux.org/packages?name=borgmatic)
|
||||||
* [OpenBSD](http://ports.su/sysutils/borgmatic)
|
* [OpenBSD](http://ports.su/sysutils/borgmatic)
|
||||||
* [openSUSE](https://software.opensuse.org/package/borgmatic)
|
* [openSUSE](https://software.opensuse.org/package/borgmatic)
|
||||||
|
* [macOS (via Homebrew)](https://formulae.brew.sh/formula/borgmatic)
|
||||||
* [Ansible role](https://github.com/borgbase/ansible-role-borgbackup)
|
* [Ansible role](https://github.com/borgbase/ansible-role-borgbackup)
|
||||||
* [stand-alone binary](https://github.com/cmarquardt/borgmatic-binary)
|
|
||||||
* [virtualenv](https://virtualenv.pypa.io/en/stable/)
|
* [virtualenv](https://virtualenv.pypa.io/en/stable/)
|
||||||
|
|
||||||
|
|
||||||
|
@ -107,6 +112,7 @@ Additionally, [rsync.net](https://www.rsync.net/products/borg.html) and
|
||||||
[Hetzner](https://www.hetzner.com/storage/storage-box) have compatible storage
|
[Hetzner](https://www.hetzner.com/storage/storage-box) have compatible storage
|
||||||
offerings, but do not currently fund borgmatic development or hosting.
|
offerings, but do not currently fund borgmatic development or hosting.
|
||||||
|
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
After you install borgmatic, generate a sample configuration file:
|
After you install borgmatic, generate a sample configuration file:
|
||||||
|
@ -180,32 +186,39 @@ files via configuration management, or you want to double check that your hand
|
||||||
edits are valid.
|
edits are valid.
|
||||||
|
|
||||||
|
|
||||||
## Initialization
|
## Repository creation
|
||||||
|
|
||||||
Before you can create backups with borgmatic, you first need to initialize a
|
Before you can create backups with borgmatic, you first need to create a Borg
|
||||||
Borg repository so you have a destination for your backup archives. (But skip
|
repository so you have a destination for your backup archives. (But skip this
|
||||||
this step if you already have a Borg repository.) To create a repository, run
|
step if you already have a Borg repository.) To create a repository, run a
|
||||||
a command like the following:
|
command like the following with Borg 1.x:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
sudo borgmatic init --encryption repokey
|
sudo borgmatic init --encryption repokey
|
||||||
```
|
```
|
||||||
|
|
||||||
(No borgmatic `init` action? Try the old-style `--init` flag, or upgrade
|
<span class="minilink minilink-addedin">New in borgmatic version 1.7.0</span>
|
||||||
borgmatic!)
|
Or, with Borg 2.x:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo borgmatic rcreate --encryption repokey-aes-ocb
|
||||||
|
```
|
||||||
|
|
||||||
|
(Note that `repokey-chacha20-poly1305` may be faster than `repokey-aes-ocb` on
|
||||||
|
certain platforms like ARM64.)
|
||||||
|
|
||||||
This uses the borgmatic configuration file you created above to determine
|
This uses the borgmatic configuration file you created above to determine
|
||||||
which local or remote repository to create, and encrypts it with the
|
which local or remote repository to create, and encrypts it with the
|
||||||
encryption passphrase specified there if one is provided. Read about [Borg
|
encryption passphrase specified there if one is provided. Read about [Borg
|
||||||
encryption
|
encryption
|
||||||
modes](https://borgbackup.readthedocs.io/en/stable/usage/init.html#encryption-modes)
|
modes](https://borgbackup.readthedocs.io/en/stable/usage/init.html#encryption-mode-tldr)
|
||||||
for the menu of available encryption modes.
|
for the menu of available encryption modes.
|
||||||
|
|
||||||
Also, optionally check out the [Borg Quick
|
Also, optionally check out the [Borg Quick
|
||||||
Start](https://borgbackup.readthedocs.org/en/stable/quickstart.html) for more
|
Start](https://borgbackup.readthedocs.org/en/stable/quickstart.html) for more
|
||||||
background about repository initialization.
|
background about repository creation.
|
||||||
|
|
||||||
Note that borgmatic skips repository initialization if the repository already
|
Note that borgmatic skips repository creation if the repository already
|
||||||
exists. This supports use cases like ensuring a repository exists prior to
|
exists. This supports use cases like ensuring a repository exists prior to
|
||||||
performing a backup.
|
performing a backup.
|
||||||
|
|
||||||
|
@ -215,29 +228,42 @@ key-based SSH access to the desired user account on the remote host.
|
||||||
|
|
||||||
## Backups
|
## Backups
|
||||||
|
|
||||||
Now that you've configured borgmatic and initialized a repository, it's a
|
Now that you've configured borgmatic and created a repository, it's a good
|
||||||
good idea to test that borgmatic is working. So to run borgmatic and start a
|
idea to test that borgmatic is working. So to run borgmatic and start a
|
||||||
backup, you can invoke it like this:
|
backup, you can invoke it like this:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
sudo borgmatic --verbosity 1 --files
|
sudo borgmatic create --verbosity 1 --list --stats
|
||||||
```
|
```
|
||||||
|
|
||||||
(No borgmatic `--files` flag? It's only present in newer versions of
|
(No borgmatic `--list` flag? Try `--files` instead, leave it out, or upgrade
|
||||||
borgmatic. So try leaving it out, or upgrade borgmatic!)
|
borgmatic!)
|
||||||
|
|
||||||
By default, this will also prune any old backups as per the configured
|
The `--verbosity` flag makes borgmatic show the steps it's performing. The
|
||||||
retention policy, compact segments to free up space (with Borg 1.2+), and
|
`--list` flag lists each file that's new or changed since the last backup. And
|
||||||
check backups for consistency problems due to things like file damage.
|
`--stats` shows summary information about the created archive. All of these
|
||||||
|
flags are optional.
|
||||||
|
|
||||||
The verbosity flag makes borgmatic show the steps it's performing. And the
|
As the command runs, you should eyeball the output to see if it matches your
|
||||||
files flag lists each file that's new or changed since the last backup.
|
expectations based on your configuration.
|
||||||
Eyeball the list and see if it matches your expectations based on the
|
|
||||||
configuration.
|
|
||||||
|
|
||||||
If you'd like to specify an alternate configuration file path, use the
|
If you'd like to specify an alternate configuration file path, use the
|
||||||
`--config` flag. See `borgmatic --help` for more information.
|
`--config` flag.
|
||||||
|
|
||||||
|
See `borgmatic --help` and `borgmatic create --help` for more information.
|
||||||
|
|
||||||
|
|
||||||
|
## Default actions
|
||||||
|
|
||||||
|
If you omit `create` and other actions, borgmatic runs through a set of
|
||||||
|
default actions: `prune` any old backups as per the configured retention
|
||||||
|
policy, `compact` segments to free up space (with Borg 1.2+), `create` a
|
||||||
|
backup, *and* `check` backups for consistency problems due to things like file
|
||||||
|
damage. For instance:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo borgmatic --verbosity 1 --list --stats
|
||||||
|
```
|
||||||
|
|
||||||
## Autopilot
|
## Autopilot
|
||||||
|
|
||||||
|
@ -298,9 +324,43 @@ interested in an [unofficial work-around for Full Disk
|
||||||
Access](https://projects.torsion.org/borgmatic-collective/borgmatic/issues/293).
|
Access](https://projects.torsion.org/borgmatic-collective/borgmatic/issues/293).
|
||||||
|
|
||||||
|
|
||||||
## Colored output
|
## Niceties
|
||||||
|
|
||||||
Borgmatic produces colored terminal output by default. It is disabled when a
|
|
||||||
|
### Shell completion
|
||||||
|
|
||||||
|
borgmatic includes a shell completion script (currently only for Bash) to
|
||||||
|
support tab-completing borgmatic command-line actions and flags. Depending on
|
||||||
|
how you installed borgmatic, this may be enabled by default. But if it's not,
|
||||||
|
start by installing the `bash-completion` Linux package or the
|
||||||
|
[`bash-completion@2`](https://formulae.brew.sh/formula/bash-completion@2)
|
||||||
|
macOS Homebrew formula. Then, install the shell completion script globally:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo su -c "borgmatic --bash-completion > $(pkg-config --variable=completionsdir bash-completion)/borgmatic"
|
||||||
|
```
|
||||||
|
|
||||||
|
If you don't have `pkg-config` installed, you can try the following path
|
||||||
|
instead:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo su -c "borgmatic --bash-completion > /usr/share/bash-completion/completions/borgmatic"
|
||||||
|
```
|
||||||
|
|
||||||
|
Or, if you'd like to install the script for only the current user:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir --parents ~/.local/share/bash-completion/completions
|
||||||
|
borgmatic --bash-completion > ~/.local/share/bash-completion/completions/borgmatic
|
||||||
|
```
|
||||||
|
|
||||||
|
Finally, restart your shell (`exit` and open a new shell) so the completions
|
||||||
|
take effect.
|
||||||
|
|
||||||
|
|
||||||
|
### Colored output
|
||||||
|
|
||||||
|
borgmatic produces colored terminal output by default. It is disabled when a
|
||||||
non-interactive terminal is detected (like a cron job), or when you use the
|
non-interactive terminal is detected (like a cron job), or when you use the
|
||||||
`--json` flag. Otherwise, you can disable it by passing the `--no-color` flag,
|
`--json` flag. Otherwise, you can disable it by passing the `--no-color` flag,
|
||||||
setting the environment variable `PY_COLORS=False`, or setting the `color`
|
setting the environment variable `PY_COLORS=False`, or setting the `color`
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
---
|
---
|
||||||
title: How to upgrade borgmatic
|
title: How to upgrade borgmatic and Borg
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Upgrade borgmatic
|
key: 📦 Upgrade borgmatic/Borg
|
||||||
parent: How-to guides
|
parent: How-to guides
|
||||||
order: 11
|
order: 12
|
||||||
---
|
---
|
||||||
## Upgrading
|
## Upgrading borgmatic
|
||||||
|
|
||||||
In general, all you should need to do to upgrade borgmatic is run the
|
In general, all you should need to do to upgrade borgmatic is run the
|
||||||
following:
|
following:
|
||||||
|
@ -115,3 +115,89 @@ sudo pip3 install --user borgmatic
|
||||||
|
|
||||||
That's it! borgmatic will continue using your /etc/borgmatic configuration
|
That's it! borgmatic will continue using your /etc/borgmatic configuration
|
||||||
files.
|
files.
|
||||||
|
|
||||||
|
|
||||||
|
## Upgrading Borg
|
||||||
|
|
||||||
|
To upgrade to a new version of Borg, you can generally install a new version
|
||||||
|
the same way you installed the previous version, paying attention to any
|
||||||
|
instructions included with each Borg release changelog linked from the
|
||||||
|
[releases page](https://github.com/borgbackup/borg/releases). Some more major
|
||||||
|
Borg releases require additional steps that borgmatic can help with.
|
||||||
|
|
||||||
|
|
||||||
|
### Borg 1.2 to 2.0
|
||||||
|
|
||||||
|
<span class="minilink minilink-addedin">New in borgmatic version 1.7.0</span>
|
||||||
|
Upgrading Borg from 1.2 to 2.0 requires manually upgrading your existing Borg
|
||||||
|
1 repositories before use with Borg or borgmatic. Here's how you can
|
||||||
|
accomplish that.
|
||||||
|
|
||||||
|
Start by upgrading borgmatic as described above to at least version 1.7.0 and
|
||||||
|
Borg to 2.0. Then, rename your repository in borgmatic's configuration file to
|
||||||
|
a new repository path. The repository upgrade process does not occur
|
||||||
|
in-place; you'll create a new repository with a copy of your old repository's
|
||||||
|
data.
|
||||||
|
|
||||||
|
Let's say your original borgmatic repository configuration file looks something
|
||||||
|
like this:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
location:
|
||||||
|
repositories:
|
||||||
|
- original.borg
|
||||||
|
```
|
||||||
|
|
||||||
|
Change it to a new (not yet created) repository path:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
location:
|
||||||
|
repositories:
|
||||||
|
- upgraded.borg
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, run the `rcreate` action (formerly `init`) to create that new Borg 2
|
||||||
|
repository:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
borgmatic rcreate --verbosity 1 --encryption repokey-blake2-aes-ocb \
|
||||||
|
--source-repository original.borg --repository upgraded.borg
|
||||||
|
```
|
||||||
|
|
||||||
|
This creates an empty repository and doesn't actually transfer any data yet.
|
||||||
|
The `--source-repository` flag is necessary to reuse key material from your
|
||||||
|
Borg 1 repository so that the subsequent data transfer can work.
|
||||||
|
|
||||||
|
The `--encryption` value above selects the same chunk ID algorithm (`blake2`)
|
||||||
|
used in Borg 1, thereby making deduplication work across transferred archives
|
||||||
|
and new archives. Note that `repokey-blake2-chacha20-poly1305` may be faster
|
||||||
|
than `repokey-blake2-aes-ocb` on certain platforms like ARM64. Read about
|
||||||
|
[Borg encryption
|
||||||
|
modes](https://borgbackup.readthedocs.io/en/2.0.0b4/usage/rcreate.html#encryption-mode-tldr)
|
||||||
|
for the menu of available encryption modes.
|
||||||
|
|
||||||
|
To transfer data from your original Borg 1 repository to your newly created
|
||||||
|
Borg 2 repository:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
borgmatic transfer --verbosity 1 --upgrader From12To20 --source-repository \
|
||||||
|
original.borg --repository upgraded.borg --dry-run
|
||||||
|
borgmatic transfer --verbosity 1 --upgrader From12To20 --source-repository \
|
||||||
|
original.borg --repository upgraded.borg
|
||||||
|
borgmatic transfer --verbosity 1 --upgrader From12To20 --source-repository \
|
||||||
|
original.borg --repository upgraded.borg --dry-run
|
||||||
|
```
|
||||||
|
|
||||||
|
The first command with `--dry-run` tells you what Borg is going to do during
|
||||||
|
the transfer, the second command actually performs the transfer/upgrade (this
|
||||||
|
might take a while), and the final command with `--dry-run` again provides
|
||||||
|
confirmation of success—or tells you if something hasn't been transferred yet.
|
||||||
|
|
||||||
|
Note that by omitting the `--upgrader` flag, you can also do archive transfers
|
||||||
|
between related Borg 2 repositories without upgrading, even down to individual
|
||||||
|
archives. For more on that functionality, see the [Borg transfer
|
||||||
|
documentation](https://borgbackup.readthedocs.io/en/2.0.0b4/usage/transfer.html).
|
||||||
|
|
||||||
|
That's it! Now you can use your new Borg 2 repository as normal with
|
||||||
|
borgmatic. If you've got multiple repositories, repeat the above process for
|
||||||
|
each.
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
---
|
---
|
||||||
title: Command-line reference
|
title: Command-line reference
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Command-line reference
|
key: ⌨️ Command-line reference
|
||||||
parent: Reference guides
|
parent: Reference guides
|
||||||
order: 1
|
order: 1
|
||||||
---
|
---
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
---
|
---
|
||||||
title: Configuration reference
|
title: Configuration reference
|
||||||
eleventyNavigation:
|
eleventyNavigation:
|
||||||
key: Configuration reference
|
key: ⚙️ Configuration reference
|
||||||
parent: Reference guides
|
parent: Reference guides
|
||||||
order: 0
|
order: 0
|
||||||
---
|
---
|
||||||
|
|
Binary file not shown.
After Width: | Height: | Size: 10 KiB |
|
@ -41,7 +41,7 @@ ProtectSystem=full
|
||||||
# ReadOnlyPaths=-/var/lib/my_backup_source
|
# ReadOnlyPaths=-/var/lib/my_backup_source
|
||||||
# This will mount a tmpfs on top of /root and pass through needed paths
|
# This will mount a tmpfs on top of /root and pass through needed paths
|
||||||
# ProtectHome=tmpfs
|
# ProtectHome=tmpfs
|
||||||
# BindPaths=-/root/.cache/borg -/root/.cache/borg -/root/.borgmatic
|
# BindPaths=-/root/.cache/borg -/root/.config/borg -/root/.borgmatic
|
||||||
|
|
||||||
# May interfere with running external programs within borgmatic hooks.
|
# May interfere with running external programs within borgmatic hooks.
|
||||||
CapabilityBoundingSet=CAP_DAC_READ_SEARCH CAP_NET_RAW
|
CapabilityBoundingSet=CAP_DAC_READ_SEARCH CAP_NET_RAW
|
||||||
|
@ -61,4 +61,4 @@ LogRateLimitIntervalSec=0
|
||||||
# Delay start to prevent backups running during boot. Note that systemd-inhibit requires dbus and
|
# Delay start to prevent backups running during boot. Note that systemd-inhibit requires dbus and
|
||||||
# dbus-user-session to be installed.
|
# dbus-user-session to be installed.
|
||||||
ExecStartPre=sleep 1m
|
ExecStartPre=sleep 1m
|
||||||
ExecStart=systemd-inhibit --who="borgmatic" --why="Prevent interrupting scheduled backup" /root/.local/bin/borgmatic --verbosity -1 --syslog-verbosity 1
|
ExecStart=systemd-inhibit --who="borgmatic" --what="sleep:shutdown" --why="Prevent interrupting scheduled backup" /root/.local/bin/borgmatic --verbosity -1 --syslog-verbosity 1
|
||||||
|
|
|
@ -4,6 +4,7 @@ Description=Run borgmatic backup
|
||||||
[Timer]
|
[Timer]
|
||||||
OnCalendar=daily
|
OnCalendar=daily
|
||||||
Persistent=true
|
Persistent=true
|
||||||
|
RandomizedDelaySec=3h
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=timers.target
|
WantedBy=timers.target
|
||||||
|
|
|
@ -53,6 +53,7 @@ for sub_command in prune create check list info; do
|
||||||
| grep -v '^--first' \
|
| grep -v '^--first' \
|
||||||
| grep -v '^--format' \
|
| grep -v '^--format' \
|
||||||
| grep -v '^--glob-archives' \
|
| grep -v '^--glob-archives' \
|
||||||
|
| grep -v '^--match-archives' \
|
||||||
| grep -v '^--last' \
|
| grep -v '^--last' \
|
||||||
| grep -v '^--format' \
|
| grep -v '^--format' \
|
||||||
| grep -v '^--patterns-from' \
|
| grep -v '^--patterns-from' \
|
||||||
|
|
|
@ -21,6 +21,12 @@ if [[ $version =~ .*dev* ]]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if ! git diff-index --quiet HEAD -- ; then
|
||||||
|
echo "Refusing to release with local changes:"
|
||||||
|
git status --porcelain
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
git tag $version
|
git tag $version
|
||||||
git push origin $version
|
git push origin $version
|
||||||
git push github $version
|
git push github $version
|
||||||
|
@ -31,8 +37,8 @@ python3 setup.py bdist_wheel
|
||||||
python3 setup.py sdist
|
python3 setup.py sdist
|
||||||
gpg --detach-sign --armor dist/borgmatic-*.tar.gz
|
gpg --detach-sign --armor dist/borgmatic-*.tar.gz
|
||||||
gpg --detach-sign --armor dist/borgmatic-*-py3-none-any.whl
|
gpg --detach-sign --armor dist/borgmatic-*-py3-none-any.whl
|
||||||
twine upload -r pypi dist/borgmatic-*.tar.gz dist/borgmatic-*.tar.gz.asc
|
twine upload -r pypi --username __token__ dist/borgmatic-*.tar.gz dist/borgmatic-*.tar.gz.asc
|
||||||
twine upload -r pypi dist/borgmatic-*-py3-none-any.whl dist/borgmatic-*-py3-none-any.whl.asc
|
twine upload -r pypi --username __token__ dist/borgmatic-*-py3-none-any.whl dist/borgmatic-*-py3-none-any.whl.asc
|
||||||
|
|
||||||
# Set release changelogs on projects.torsion.org and GitHub.
|
# Set release changelogs on projects.torsion.org and GitHub.
|
||||||
release_changelog="$(cat NEWS | sed '/^$/q' | grep -v '^\S')"
|
release_changelog="$(cat NEWS | sed '/^$/q' | grep -v '^\S')"
|
||||||
|
|
|
@ -11,11 +11,11 @@
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
apk add --no-cache python3 py3-pip borgbackup postgresql-client mariadb-client mongodb-tools \
|
apk add --no-cache python3 py3-pip borgbackup postgresql-client mariadb-client mongodb-tools \
|
||||||
py3-ruamel.yaml py3-ruamel.yaml.clib
|
py3-ruamel.yaml py3-ruamel.yaml.clib bash
|
||||||
# If certain dependencies of black are available in this version of Alpine, install them.
|
# If certain dependencies of black are available in this version of Alpine, install them.
|
||||||
apk add --no-cache py3-typed-ast py3-regex || true
|
apk add --no-cache py3-typed-ast py3-regex || true
|
||||||
python3 -m pip install --no-cache --upgrade pip==22.0.3 setuptools==60.8.1
|
python3 -m pip install --no-cache --upgrade pip==22.2.2 setuptools==64.0.1
|
||||||
pip3 install tox==3.24.5
|
pip3 install --ignore-installed tox==3.25.1
|
||||||
export COVERAGE_FILE=/tmp/.coverage
|
export COVERAGE_FILE=/tmp/.coverage
|
||||||
tox --workdir /tmp/.tox --sitepackages
|
tox --workdir /tmp/.tox --sitepackages
|
||||||
tox --workdir /tmp/.tox --sitepackages -e end-to-end
|
tox --workdir /tmp/.tox --sitepackages -e end-to-end
|
||||||
|
|
6
setup.py
6
setup.py
|
@ -1,6 +1,6 @@
|
||||||
from setuptools import find_packages, setup
|
from setuptools import find_packages, setup
|
||||||
|
|
||||||
VERSION = '1.5.24'
|
VERSION = '1.7.6.dev0'
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
|
@ -30,12 +30,12 @@ setup(
|
||||||
},
|
},
|
||||||
obsoletes=['atticmatic'],
|
obsoletes=['atticmatic'],
|
||||||
install_requires=(
|
install_requires=(
|
||||||
|
'colorama>=0.4.1,<0.5',
|
||||||
'jsonschema',
|
'jsonschema',
|
||||||
'requests',
|
'requests',
|
||||||
'ruamel.yaml>0.15.0,<0.18.0',
|
'ruamel.yaml>0.15.0,<0.18.0',
|
||||||
'setuptools',
|
'setuptools',
|
||||||
'colorama>=0.4.1,<0.5',
|
|
||||||
),
|
),
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
python_requires='>3.7.0',
|
python_requires='>=3.7',
|
||||||
)
|
)
|
||||||
|
|
|
@ -14,8 +14,8 @@ py==1.10.0
|
||||||
pycodestyle==2.8.0
|
pycodestyle==2.8.0
|
||||||
pyflakes==2.4.0
|
pyflakes==2.4.0
|
||||||
jsonschema==3.2.0
|
jsonschema==3.2.0
|
||||||
pytest==6.2.5
|
pytest==7.2.0
|
||||||
pytest-cov==3.0.0
|
pytest-cov==4.0.0
|
||||||
regex; python_version >= '3.8'
|
regex; python_version >= '3.8'
|
||||||
requests==2.25.0
|
requests==2.25.0
|
||||||
ruamel.yaml>0.15.0,<0.18.0
|
ruamel.yaml>0.15.0,<0.18.0
|
||||||
|
|
|
@ -18,8 +18,9 @@ def generate_configuration(config_path, repository_path):
|
||||||
config = (
|
config = (
|
||||||
open(config_path)
|
open(config_path)
|
||||||
.read()
|
.read()
|
||||||
.replace('user@backupserver:sourcehostname.borg', repository_path)
|
.replace('ssh://user@backupserver/./sourcehostname.borg', repository_path)
|
||||||
.replace('- user@backupserver:{fqdn}', '')
|
.replace('- ssh://user@backupserver/./{fqdn}', '')
|
||||||
|
.replace('- /var/local/backups/local.borg', '')
|
||||||
.replace('- /home/user/path with spaces', '')
|
.replace('- /home/user/path with spaces', '')
|
||||||
.replace('- /home', '- {}'.format(config_path))
|
.replace('- /home', '- {}'.format(config_path))
|
||||||
.replace('- /etc', '')
|
.replace('- /etc', '')
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def test_bash_completion_runs_without_error():
|
||||||
|
subprocess.check_call('borgmatic --bash-completion | bash', shell=True)
|
|
@ -9,20 +9,24 @@ import pytest
|
||||||
|
|
||||||
|
|
||||||
def write_configuration(
|
def write_configuration(
|
||||||
config_path, repository_path, borgmatic_source_directory, postgresql_dump_format='custom'
|
source_directory,
|
||||||
|
config_path,
|
||||||
|
repository_path,
|
||||||
|
borgmatic_source_directory,
|
||||||
|
postgresql_dump_format='custom',
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Write out borgmatic configuration into a file at the config path. Set the options so as to work
|
Write out borgmatic configuration into a file at the config path. Set the options so as to work
|
||||||
for testing. This includes injecting the given repository path, borgmatic source directory for
|
for testing. This includes injecting the given repository path, borgmatic source directory for
|
||||||
storing database dumps, dump format (for PostgreSQL), and encryption passphrase.
|
storing database dumps, dump format (for PostgreSQL), and encryption passphrase.
|
||||||
'''
|
'''
|
||||||
config = '''
|
config = f'''
|
||||||
location:
|
location:
|
||||||
source_directories:
|
source_directories:
|
||||||
- {}
|
- {source_directory}
|
||||||
repositories:
|
repositories:
|
||||||
- {}
|
- {repository_path}
|
||||||
borgmatic_source_directory: {}
|
borgmatic_source_directory: {borgmatic_source_directory}
|
||||||
|
|
||||||
storage:
|
storage:
|
||||||
encryption_passphrase: "test"
|
encryption_passphrase: "test"
|
||||||
|
@ -33,7 +37,7 @@ hooks:
|
||||||
hostname: postgresql
|
hostname: postgresql
|
||||||
username: postgres
|
username: postgres
|
||||||
password: test
|
password: test
|
||||||
format: {}
|
format: {postgresql_dump_format}
|
||||||
- name: all
|
- name: all
|
||||||
hostname: postgresql
|
hostname: postgresql
|
||||||
username: postgres
|
username: postgres
|
||||||
|
@ -57,9 +61,7 @@ hooks:
|
||||||
hostname: mongodb
|
hostname: mongodb
|
||||||
username: root
|
username: root
|
||||||
password: test
|
password: test
|
||||||
'''.format(
|
'''
|
||||||
config_path, repository_path, borgmatic_source_directory, postgresql_dump_format
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(config_path, 'w') as config_file:
|
with open(config_path, 'w') as config_file:
|
||||||
config_file.write(config)
|
config_file.write(config)
|
||||||
|
@ -71,11 +73,16 @@ def test_database_dump_and_restore():
|
||||||
repository_path = os.path.join(temporary_directory, 'test.borg')
|
repository_path = os.path.join(temporary_directory, 'test.borg')
|
||||||
borgmatic_source_directory = os.path.join(temporary_directory, '.borgmatic')
|
borgmatic_source_directory = os.path.join(temporary_directory, '.borgmatic')
|
||||||
|
|
||||||
|
# Write out a special file to ensure that it gets properly excluded and Borg doesn't hang on it.
|
||||||
|
os.mkfifo(os.path.join(temporary_directory, 'special_file'))
|
||||||
|
|
||||||
original_working_directory = os.getcwd()
|
original_working_directory = os.getcwd()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
config_path = os.path.join(temporary_directory, 'test.yaml')
|
config_path = os.path.join(temporary_directory, 'test.yaml')
|
||||||
write_configuration(config_path, repository_path, borgmatic_source_directory)
|
write_configuration(
|
||||||
|
temporary_directory, config_path, repository_path, borgmatic_source_directory
|
||||||
|
)
|
||||||
|
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
['borgmatic', '-v', '2', '--config', config_path, 'init', '--encryption', 'repokey']
|
['borgmatic', '-v', '2', '--config', config_path, 'init', '--encryption', 'repokey']
|
||||||
|
@ -114,6 +121,7 @@ def test_database_dump_and_restore_with_directory_format():
|
||||||
try:
|
try:
|
||||||
config_path = os.path.join(temporary_directory, 'test.yaml')
|
config_path = os.path.join(temporary_directory, 'test.yaml')
|
||||||
write_configuration(
|
write_configuration(
|
||||||
|
temporary_directory,
|
||||||
config_path,
|
config_path,
|
||||||
repository_path,
|
repository_path,
|
||||||
borgmatic_source_directory,
|
borgmatic_source_directory,
|
||||||
|
@ -146,7 +154,9 @@ def test_database_dump_with_error_causes_borgmatic_to_exit():
|
||||||
|
|
||||||
try:
|
try:
|
||||||
config_path = os.path.join(temporary_directory, 'test.yaml')
|
config_path = os.path.join(temporary_directory, 'test.yaml')
|
||||||
write_configuration(config_path, repository_path, borgmatic_source_directory)
|
write_configuration(
|
||||||
|
temporary_directory, config_path, repository_path, borgmatic_source_directory
|
||||||
|
)
|
||||||
|
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
['borgmatic', '-v', '2', '--config', config_path, 'init', '--encryption', 'repokey']
|
['borgmatic', '-v', '2', '--config', config_path, 'init', '--encryption', 'repokey']
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_borgmatic_config_with_merging_succeeds():
|
||||||
|
with tempfile.TemporaryDirectory() as temporary_directory:
|
||||||
|
config_path = os.path.join(temporary_directory, 'test.yaml')
|
||||||
|
new_config_path = os.path.join(temporary_directory, 'new.yaml')
|
||||||
|
|
||||||
|
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
|
||||||
|
subprocess.check_call(
|
||||||
|
f'generate-borgmatic-config --source {config_path} --destination {new_config_path}'.split(
|
||||||
|
' '
|
||||||
|
)
|
||||||
|
)
|
|
@ -0,0 +1,56 @@
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
|
||||||
|
def generate_configuration(config_path, repository_path):
|
||||||
|
'''
|
||||||
|
Generate borgmatic configuration into a file at the config path, and update the defaults so as
|
||||||
|
to work for testing (including injecting the given repository path and tacking on an encryption
|
||||||
|
passphrase).
|
||||||
|
'''
|
||||||
|
subprocess.check_call(
|
||||||
|
'generate-borgmatic-config --destination {}'.format(config_path).split(' ')
|
||||||
|
)
|
||||||
|
config = (
|
||||||
|
open(config_path)
|
||||||
|
.read()
|
||||||
|
.replace('ssh://user@backupserver/./sourcehostname.borg', repository_path)
|
||||||
|
.replace('- ssh://user@backupserver/./{fqdn}', '')
|
||||||
|
.replace('- /var/local/backups/local.borg', '')
|
||||||
|
.replace('- /home/user/path with spaces', '')
|
||||||
|
.replace('- /home', '- {}'.format(config_path))
|
||||||
|
.replace('- /etc', '')
|
||||||
|
.replace('- /var/log/syslog*', '')
|
||||||
|
+ 'storage:\n encryption_passphrase: "test"'
|
||||||
|
)
|
||||||
|
config_file = open(config_path, 'w')
|
||||||
|
config_file.write(config)
|
||||||
|
config_file.close()
|
||||||
|
|
||||||
|
|
||||||
|
def test_override_get_normalized():
|
||||||
|
temporary_directory = tempfile.mkdtemp()
|
||||||
|
repository_path = os.path.join(temporary_directory, 'test.borg')
|
||||||
|
|
||||||
|
original_working_directory = os.getcwd()
|
||||||
|
|
||||||
|
try:
|
||||||
|
config_path = os.path.join(temporary_directory, 'test.yaml')
|
||||||
|
generate_configuration(config_path, repository_path)
|
||||||
|
|
||||||
|
subprocess.check_call(
|
||||||
|
f'borgmatic -v 2 --config {config_path} init --encryption repokey'.split(' ')
|
||||||
|
)
|
||||||
|
|
||||||
|
# Run borgmatic with an override structured for an outdated config file format. If
|
||||||
|
# normalization is working, it should get normalized and shouldn't error.
|
||||||
|
subprocess.check_call(
|
||||||
|
f'borgmatic create --config {config_path} --override hooks.healthchecks=http://localhost:8888/someuuid'.split(
|
||||||
|
' '
|
||||||
|
)
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
os.chdir(original_working_directory)
|
||||||
|
shutil.rmtree(temporary_directory)
|
|
@ -107,13 +107,6 @@ def test_parse_arguments_with_list_json_overrides_default():
|
||||||
assert arguments['list'].json is True
|
assert arguments['list'].json is True
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_dashed_list_json_overrides_default():
|
|
||||||
arguments = module.parse_arguments('--list', '--json')
|
|
||||||
|
|
||||||
assert 'list' in arguments
|
|
||||||
assert arguments['list'].json is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_no_actions_defaults_to_all_actions_enabled():
|
def test_parse_arguments_with_no_actions_defaults_to_all_actions_enabled():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
@ -127,14 +120,14 @@ def test_parse_arguments_with_no_actions_defaults_to_all_actions_enabled():
|
||||||
def test_parse_arguments_with_no_actions_passes_argument_to_relevant_actions():
|
def test_parse_arguments_with_no_actions_passes_argument_to_relevant_actions():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
arguments = module.parse_arguments('--stats', '--files')
|
arguments = module.parse_arguments('--stats', '--list')
|
||||||
|
|
||||||
assert 'prune' in arguments
|
assert 'prune' in arguments
|
||||||
assert arguments['prune'].stats
|
assert arguments['prune'].stats
|
||||||
assert arguments['prune'].files
|
assert arguments['prune'].list_archives
|
||||||
assert 'create' in arguments
|
assert 'create' in arguments
|
||||||
assert arguments['create'].stats
|
assert arguments['create'].stats
|
||||||
assert arguments['create'].files
|
assert arguments['create'].list_files
|
||||||
assert 'check' in arguments
|
assert 'check' in arguments
|
||||||
|
|
||||||
|
|
||||||
|
@ -191,16 +184,6 @@ def test_parse_arguments_with_prune_action_leaves_other_actions_disabled():
|
||||||
assert 'check' not in arguments
|
assert 'check' not in arguments
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_dashed_prune_action_leaves_other_actions_disabled():
|
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
|
||||||
|
|
||||||
arguments = module.parse_arguments('--prune')
|
|
||||||
|
|
||||||
assert 'prune' in arguments
|
|
||||||
assert 'create' not in arguments
|
|
||||||
assert 'check' not in arguments
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_multiple_actions_leaves_other_action_disabled():
|
def test_parse_arguments_with_multiple_actions_leaves_other_action_disabled():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
@ -211,16 +194,6 @@ def test_parse_arguments_with_multiple_actions_leaves_other_action_disabled():
|
||||||
assert 'check' in arguments
|
assert 'check' in arguments
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_multiple_dashed_actions_leaves_other_action_disabled():
|
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
|
||||||
|
|
||||||
arguments = module.parse_arguments('--create', '--check')
|
|
||||||
|
|
||||||
assert 'prune' not in arguments
|
|
||||||
assert 'create' in arguments
|
|
||||||
assert 'check' in arguments
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_invalid_arguments_exits():
|
def test_parse_arguments_with_invalid_arguments_exits():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
@ -248,12 +221,6 @@ def test_parse_arguments_allows_encryption_mode_with_init():
|
||||||
module.parse_arguments('--config', 'myconfig', 'init', '--encryption', 'repokey')
|
module.parse_arguments('--config', 'myconfig', 'init', '--encryption', 'repokey')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_allows_encryption_mode_with_dashed_init():
|
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
|
||||||
|
|
||||||
module.parse_arguments('--config', 'myconfig', '--init', '--encryption', 'repokey')
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_requires_encryption_mode_with_init():
|
def test_parse_arguments_requires_encryption_mode_with_init():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
@ -287,24 +254,6 @@ def test_parse_arguments_allows_init_and_create():
|
||||||
module.parse_arguments('--config', 'myconfig', 'init', '--encryption', 'repokey', 'create')
|
module.parse_arguments('--config', 'myconfig', 'init', '--encryption', 'repokey', 'create')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_disallows_init_and_dry_run():
|
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
module.parse_arguments(
|
|
||||||
'--config', 'myconfig', 'init', '--encryption', 'repokey', '--dry-run'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_disallows_glob_archives_with_successful():
|
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
module.parse_arguments(
|
|
||||||
'--config', 'myconfig', 'list', '--glob-archives', '*glob*', '--successful'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_disallows_repository_unless_action_consumes_it():
|
def test_parse_arguments_disallows_repository_unless_action_consumes_it():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
@ -370,24 +319,12 @@ def test_parse_arguments_allows_archive_with_mount():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_allows_archive_with_dashed_extract():
|
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
|
||||||
|
|
||||||
module.parse_arguments('--config', 'myconfig', '--extract', '--archive', 'test')
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_allows_archive_with_restore():
|
def test_parse_arguments_allows_archive_with_restore():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
module.parse_arguments('--config', 'myconfig', 'restore', '--archive', 'test')
|
module.parse_arguments('--config', 'myconfig', 'restore', '--archive', 'test')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_allows_archive_with_dashed_restore():
|
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
|
||||||
|
|
||||||
module.parse_arguments('--config', 'myconfig', '--restore', '--archive', 'test')
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_allows_archive_with_list():
|
def test_parse_arguments_allows_archive_with_list():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
@ -466,23 +403,23 @@ def test_parse_arguments_with_stats_flag_but_no_create_or_prune_flag_raises_valu
|
||||||
module.parse_arguments('--stats', 'list')
|
module.parse_arguments('--stats', 'list')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_files_and_create_flags_does_not_raise():
|
def test_parse_arguments_with_list_and_create_flags_does_not_raise():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
module.parse_arguments('--files', 'create', 'list')
|
module.parse_arguments('--list', 'create')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_files_and_prune_flags_does_not_raise():
|
def test_parse_arguments_with_list_and_prune_flags_does_not_raise():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
module.parse_arguments('--files', 'prune', 'list')
|
module.parse_arguments('--list', 'prune')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_files_flag_but_no_create_or_prune_or_restore_flag_raises_value_error():
|
def test_parse_arguments_with_list_flag_but_no_relevant_action_raises_value_error():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
module.parse_arguments('--files', 'list')
|
module.parse_arguments('--list', 'rcreate')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_allows_json_with_list_or_info():
|
def test_parse_arguments_allows_json_with_list_or_info():
|
||||||
|
@ -492,12 +429,6 @@ def test_parse_arguments_allows_json_with_list_or_info():
|
||||||
module.parse_arguments('info', '--json')
|
module.parse_arguments('info', '--json')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_allows_json_with_dashed_info():
|
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
|
||||||
|
|
||||||
module.parse_arguments('--info', '--json')
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_disallows_json_with_both_list_and_info():
|
def test_parse_arguments_disallows_json_with_both_list_and_info():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
@ -505,6 +436,56 @@ def test_parse_arguments_disallows_json_with_both_list_and_info():
|
||||||
module.parse_arguments('list', 'info', '--json')
|
module.parse_arguments('list', 'info', '--json')
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_disallows_json_with_both_list_and_rinfo():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.parse_arguments('list', 'rinfo', '--json')
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_disallows_json_with_both_rinfo_and_info():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.parse_arguments('rinfo', 'info', '--json')
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_disallows_transfer_with_both_archive_and_match_archives():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.parse_arguments(
|
||||||
|
'transfer',
|
||||||
|
'--source-repository',
|
||||||
|
'source.borg',
|
||||||
|
'--archive',
|
||||||
|
'foo',
|
||||||
|
'--match-archives',
|
||||||
|
'sh:*bar',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_disallows_info_with_both_archive_and_match_archives():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.parse_arguments('info', '--archive', 'foo', '--match-archives', 'sh:*bar')
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_disallows_info_with_both_archive_and_prefix():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.parse_arguments('info', '--archive', 'foo', '--prefix', 'bar')
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_disallows_info_with_both_prefix_and_match_archives():
|
||||||
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.parse_arguments('info', '--prefix', 'foo', '--match-archives', 'sh:*bar')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_check_only_extract_does_not_raise_extract_subparser_error():
|
def test_parse_arguments_check_only_extract_does_not_raise_extract_subparser_error():
|
||||||
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
from borgmatic.commands import completion as module
|
||||||
|
|
||||||
|
|
||||||
|
def test_bash_completion_does_not_raise():
|
||||||
|
assert module.bash_completion()
|
|
@ -1,13 +1,25 @@
|
||||||
from borgmatic.commands import generate_config as module
|
from borgmatic.commands import generate_config as module
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_no_arguments_uses_defaults():
|
def test_parse_arguments_with_no_arguments_uses_default_destination():
|
||||||
parser = module.parse_arguments()
|
parser = module.parse_arguments()
|
||||||
|
|
||||||
assert parser.destination_filename == module.DEFAULT_DESTINATION_CONFIG_FILENAME
|
assert parser.destination_filename == module.DEFAULT_DESTINATION_CONFIG_FILENAME
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_filename_argument_overrides_defaults():
|
def test_parse_arguments_with_destination_argument_overrides_default():
|
||||||
parser = module.parse_arguments('--destination', 'config.yaml')
|
parser = module.parse_arguments('--destination', 'config.yaml')
|
||||||
|
|
||||||
assert parser.destination_filename == 'config.yaml'
|
assert parser.destination_filename == 'config.yaml'
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_parses_source():
|
||||||
|
parser = module.parse_arguments('--source', 'source.yaml', '--destination', 'config.yaml')
|
||||||
|
|
||||||
|
assert parser.source_filename == 'source.yaml'
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_arguments_parses_overwrite():
|
||||||
|
parser = module.parse_arguments('--destination', 'config.yaml', '--overwrite')
|
||||||
|
|
||||||
|
assert parser.overwrite
|
||||||
|
|
|
@ -87,7 +87,7 @@ location:
|
||||||
assert module._comment_out_optional_configuration(config.strip()) == expected_config.strip()
|
assert module._comment_out_optional_configuration(config.strip()) == expected_config.strip()
|
||||||
|
|
||||||
|
|
||||||
def testrender_configuration_converts_configuration_to_yaml_string():
|
def test_render_configuration_converts_configuration_to_yaml_string():
|
||||||
yaml_string = module.render_configuration({'foo': 'bar'})
|
yaml_string = module.render_configuration({'foo': 'bar'})
|
||||||
|
|
||||||
assert yaml_string == 'foo: bar\n'
|
assert yaml_string == 'foo: bar\n'
|
||||||
|
@ -110,6 +110,12 @@ def test_write_configuration_with_already_existing_file_raises():
|
||||||
module.write_configuration('config.yaml', 'config: yaml')
|
module.write_configuration('config.yaml', 'config: yaml')
|
||||||
|
|
||||||
|
|
||||||
|
def test_write_configuration_with_already_existing_file_and_overwrite_does_not_raise():
|
||||||
|
flexmock(os.path).should_receive('exists').and_return(True)
|
||||||
|
|
||||||
|
module.write_configuration('/tmp/config.yaml', 'config: yaml', overwrite=True)
|
||||||
|
|
||||||
|
|
||||||
def test_write_configuration_with_already_existing_directory_does_not_raise():
|
def test_write_configuration_with_already_existing_directory_does_not_raise():
|
||||||
flexmock(os.path).should_receive('exists').and_return(False)
|
flexmock(os.path).should_receive('exists').and_return(False)
|
||||||
flexmock(os).should_receive('makedirs').and_raise(FileExistsError)
|
flexmock(os).should_receive('makedirs').and_raise(FileExistsError)
|
||||||
|
@ -212,6 +218,7 @@ def test_generate_sample_configuration_with_source_filename_does_not_raise():
|
||||||
builtins.should_receive('open').with_args('schema.yaml').and_return('')
|
builtins.should_receive('open').with_args('schema.yaml').and_return('')
|
||||||
flexmock(module.yaml).should_receive('round_trip_load')
|
flexmock(module.yaml).should_receive('round_trip_load')
|
||||||
flexmock(module.load).should_receive('load_configuration')
|
flexmock(module.load).should_receive('load_configuration')
|
||||||
|
flexmock(module.normalize).should_receive('normalize')
|
||||||
flexmock(module).should_receive('_schema_to_sample_configuration')
|
flexmock(module).should_receive('_schema_to_sample_configuration')
|
||||||
flexmock(module).should_receive('merge_source_configuration_into_destination')
|
flexmock(module).should_receive('merge_source_configuration_into_destination')
|
||||||
flexmock(module).should_receive('render_configuration')
|
flexmock(module).should_receive('render_configuration')
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import io
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
@ -14,49 +15,360 @@ def test_load_configuration_parses_contents():
|
||||||
assert module.load_configuration('config.yaml') == {'key': 'value'}
|
assert module.load_configuration('config.yaml') == {'key': 'value'}
|
||||||
|
|
||||||
|
|
||||||
def test_load_configuration_inlines_include():
|
def test_load_configuration_inlines_include_relative_to_current_directory():
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
builtins.should_receive('open').with_args('include.yaml').and_return('value')
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
builtins.should_receive('open').with_args('config.yaml').and_return(
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
'key: !include include.yaml'
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
)
|
include_file = io.StringIO('value')
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO('key: !include include.yaml')
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
assert module.load_configuration('config.yaml') == {'key': 'value'}
|
assert module.load_configuration('config.yaml') == {'key': 'value'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_inlines_include_relative_to_config_parent_directory():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').with_args('/etc').and_return(True)
|
||||||
|
flexmock(module.os.path).should_receive('isabs').with_args('/etc/config.yaml').and_return(True)
|
||||||
|
flexmock(module.os.path).should_receive('isabs').with_args('include.yaml').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').with_args('/tmp/include.yaml').and_return(
|
||||||
|
False
|
||||||
|
)
|
||||||
|
flexmock(module.os.path).should_receive('exists').with_args('/etc/include.yaml').and_return(
|
||||||
|
True
|
||||||
|
)
|
||||||
|
include_file = io.StringIO('value')
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/etc/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO('key: !include include.yaml')
|
||||||
|
config_file.name = '/etc/config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/etc/config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
assert module.load_configuration('/etc/config.yaml') == {'key': 'value'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_raises_if_relative_include_does_not_exist():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').with_args('/etc').and_return(True)
|
||||||
|
flexmock(module.os.path).should_receive('isabs').with_args('/etc/config.yaml').and_return(True)
|
||||||
|
flexmock(module.os.path).should_receive('isabs').with_args('include.yaml').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(False)
|
||||||
|
config_file = io.StringIO('key: !include include.yaml')
|
||||||
|
config_file.name = '/etc/config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/etc/config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
with pytest.raises(FileNotFoundError):
|
||||||
|
module.load_configuration('/etc/config.yaml')
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_inlines_absolute_include():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(True)
|
||||||
|
flexmock(module.os.path).should_receive('exists').never()
|
||||||
|
include_file = io.StringIO('value')
|
||||||
|
include_file.name = '/root/include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/root/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO('key: !include /root/include.yaml')
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
assert module.load_configuration('config.yaml') == {'key': 'value'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_configuration_raises_if_absolute_include_does_not_exist():
|
||||||
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(True)
|
||||||
|
builtins.should_receive('open').with_args('/root/include.yaml').and_raise(FileNotFoundError)
|
||||||
|
config_file = io.StringIO('key: !include /root/include.yaml')
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
|
with pytest.raises(FileNotFoundError):
|
||||||
|
assert module.load_configuration('config.yaml')
|
||||||
|
|
||||||
|
|
||||||
def test_load_configuration_merges_include():
|
def test_load_configuration_merges_include():
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
builtins.should_receive('open').with_args('include.yaml').and_return(
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include_file = io.StringIO(
|
||||||
'''
|
'''
|
||||||
foo: bar
|
foo: bar
|
||||||
baz: quux
|
baz: quux
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
builtins.should_receive('open').with_args('config.yaml').and_return(
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
'''
|
'''
|
||||||
foo: override
|
foo: override
|
||||||
<<: !include include.yaml
|
<<: !include include.yaml
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
assert module.load_configuration('config.yaml') == {'foo': 'override', 'baz': 'quux'}
|
assert module.load_configuration('config.yaml') == {'foo': 'override', 'baz': 'quux'}
|
||||||
|
|
||||||
|
|
||||||
def test_load_configuration_does_not_merge_include_list():
|
def test_load_configuration_does_not_merge_include_list():
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
builtins.should_receive('open').with_args('include.yaml').and_return(
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
include_file = io.StringIO(
|
||||||
'''
|
'''
|
||||||
- one
|
- one
|
||||||
- two
|
- two
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
builtins.should_receive('open').with_args('config.yaml').and_return(
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
config_file = io.StringIO(
|
||||||
'''
|
'''
|
||||||
foo: bar
|
foo: bar
|
||||||
repositories:
|
repositories:
|
||||||
<<: !include include.yaml
|
<<: !include include.yaml
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
|
config_file.name = 'config.yaml'
|
||||||
|
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
|
||||||
|
|
||||||
with pytest.raises(ruamel.yaml.error.YAMLError):
|
with pytest.raises(ruamel.yaml.error.YAMLError):
|
||||||
assert module.load_configuration('config.yaml')
|
assert module.load_configuration('config.yaml')
|
||||||
|
|
||||||
|
|
||||||
|
def test_deep_merge_nodes_replaces_colliding_scalar_values():
|
||||||
|
node_values = [
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_hourly'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='24'),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_daily'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='7'),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_daily'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='5'),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = module.deep_merge_nodes(node_values)
|
||||||
|
assert len(result) == 1
|
||||||
|
(section_key, section_value) = result[0]
|
||||||
|
assert section_key.value == 'retention'
|
||||||
|
options = section_value.value
|
||||||
|
assert len(options) == 2
|
||||||
|
assert options[0][0].value == 'keep_hourly'
|
||||||
|
assert options[0][1].value == '24'
|
||||||
|
assert options[1][0].value == 'keep_daily'
|
||||||
|
assert options[1][1].value == '5'
|
||||||
|
|
||||||
|
|
||||||
|
def test_deep_merge_nodes_keeps_non_colliding_scalar_values():
|
||||||
|
node_values = [
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_hourly'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='24'),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_daily'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='7'),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='keep_minutely'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='10'),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = module.deep_merge_nodes(node_values)
|
||||||
|
assert len(result) == 1
|
||||||
|
(section_key, section_value) = result[0]
|
||||||
|
assert section_key.value == 'retention'
|
||||||
|
options = section_value.value
|
||||||
|
assert len(options) == 3
|
||||||
|
assert options[0][0].value == 'keep_hourly'
|
||||||
|
assert options[0][1].value == '24'
|
||||||
|
assert options[1][0].value == 'keep_daily'
|
||||||
|
assert options[1][1].value == '7'
|
||||||
|
assert options[2][0].value == 'keep_minutely'
|
||||||
|
assert options[2][1].value == '10'
|
||||||
|
|
||||||
|
|
||||||
|
def test_deep_merge_nodes_keeps_deeply_nested_values():
|
||||||
|
node_values = [
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='storage'),
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='lock_wait'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='5'),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='extra_borg_options'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='init'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='--init-option'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='storage'),
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='extra_borg_options'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='prune'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='--prune-option'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = module.deep_merge_nodes(node_values)
|
||||||
|
assert len(result) == 1
|
||||||
|
(section_key, section_value) = result[0]
|
||||||
|
assert section_key.value == 'storage'
|
||||||
|
options = section_value.value
|
||||||
|
assert len(options) == 2
|
||||||
|
assert options[0][0].value == 'lock_wait'
|
||||||
|
assert options[0][1].value == '5'
|
||||||
|
assert options[1][0].value == 'extra_borg_options'
|
||||||
|
nested_options = options[1][1].value
|
||||||
|
assert len(nested_options) == 2
|
||||||
|
assert nested_options[0][0].value == 'init'
|
||||||
|
assert nested_options[0][1].value == '--init-option'
|
||||||
|
assert nested_options[1][0].value == 'prune'
|
||||||
|
assert nested_options[1][1].value == '--prune-option'
|
||||||
|
|
||||||
|
|
||||||
|
def test_deep_merge_nodes_appends_colliding_sequence_values():
|
||||||
|
node_values = [
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='before_backup'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag='tag:yaml.org,2002:int', value=['echo 1', 'echo 2']
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag='tag:yaml.org,2002:map',
|
||||||
|
value=[
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode(
|
||||||
|
tag='tag:yaml.org,2002:str', value='before_backup'
|
||||||
|
),
|
||||||
|
ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag='tag:yaml.org,2002:int', value=['echo 3', 'echo 4']
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
result = module.deep_merge_nodes(node_values)
|
||||||
|
assert len(result) == 1
|
||||||
|
(section_key, section_value) = result[0]
|
||||||
|
assert section_key.value == 'hooks'
|
||||||
|
options = section_value.value
|
||||||
|
assert len(options) == 1
|
||||||
|
assert options[0][0].value == 'before_backup'
|
||||||
|
assert options[0][1].value == ['echo 1', 'echo 2', 'echo 3', 'echo 4']
|
||||||
|
|
|
@ -21,14 +21,20 @@ def mock_config_and_schema(config_yaml, schema_yaml=None):
|
||||||
when parsing the configuration.
|
when parsing the configuration.
|
||||||
'''
|
'''
|
||||||
config_stream = io.StringIO(config_yaml)
|
config_stream = io.StringIO(config_yaml)
|
||||||
|
config_stream.name = 'config.yaml'
|
||||||
|
|
||||||
if schema_yaml is None:
|
if schema_yaml is None:
|
||||||
schema_stream = open(module.schema_filename())
|
schema_stream = open(module.schema_filename())
|
||||||
else:
|
else:
|
||||||
schema_stream = io.StringIO(schema_yaml)
|
schema_stream = io.StringIO(schema_yaml)
|
||||||
|
schema_stream.name = 'schema.yaml'
|
||||||
|
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
builtins.should_receive('open').with_args('config.yaml').and_return(config_stream)
|
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
|
||||||
builtins.should_receive('open').with_args('schema.yaml').and_return(schema_stream)
|
flexmock(module.os.path).should_receive('isabs').and_return(False)
|
||||||
|
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||||
|
builtins.should_receive('open').with_args('/tmp/config.yaml').and_return(config_stream)
|
||||||
|
builtins.should_receive('open').with_args('/tmp/schema.yaml').and_return(schema_stream)
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_transforms_file_into_mapping():
|
def test_parse_configuration_transforms_file_into_mapping():
|
||||||
|
@ -49,44 +55,44 @@ def test_parse_configuration_transforms_file_into_mapping():
|
||||||
|
|
||||||
consistency:
|
consistency:
|
||||||
checks:
|
checks:
|
||||||
- repository
|
- name: repository
|
||||||
- archives
|
- name: archives
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
result = module.parse_configuration('config.yaml', 'schema.yaml')
|
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
assert result == {
|
assert config == {
|
||||||
'location': {'source_directories': ['/home', '/etc'], 'repositories': ['hostname.borg']},
|
'location': {'source_directories': ['/home', '/etc'], 'repositories': ['hostname.borg']},
|
||||||
'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60},
|
'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60},
|
||||||
'consistency': {'checks': ['repository', 'archives']},
|
'consistency': {'checks': [{'name': 'repository'}, {'name': 'archives'}]},
|
||||||
}
|
}
|
||||||
|
assert logs == []
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_passes_through_quoted_punctuation():
|
def test_parse_configuration_passes_through_quoted_punctuation():
|
||||||
escaped_punctuation = string.punctuation.replace('\\', r'\\').replace('"', r'\"')
|
escaped_punctuation = string.punctuation.replace('\\', r'\\').replace('"', r'\"')
|
||||||
|
|
||||||
mock_config_and_schema(
|
mock_config_and_schema(
|
||||||
'''
|
f'''
|
||||||
location:
|
location:
|
||||||
source_directories:
|
source_directories:
|
||||||
- /home
|
- "/home/{escaped_punctuation}"
|
||||||
|
|
||||||
repositories:
|
repositories:
|
||||||
- "{}.borg"
|
- test.borg
|
||||||
'''.format(
|
'''
|
||||||
escaped_punctuation
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
result = module.parse_configuration('config.yaml', 'schema.yaml')
|
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
assert result == {
|
assert config == {
|
||||||
'location': {
|
'location': {
|
||||||
'source_directories': ['/home'],
|
'source_directories': [f'/home/{string.punctuation}'],
|
||||||
'repositories': ['{}.borg'.format(string.punctuation)],
|
'repositories': ['test.borg'],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
assert logs == []
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
|
def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
|
||||||
|
@ -115,7 +121,7 @@ def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
|
||||||
''',
|
''',
|
||||||
)
|
)
|
||||||
|
|
||||||
module.parse_configuration('config.yaml', 'schema.yaml')
|
module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_inlines_include():
|
def test_parse_configuration_inlines_include():
|
||||||
|
@ -133,19 +139,22 @@ def test_parse_configuration_inlines_include():
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
builtins.should_receive('open').with_args('include.yaml').and_return(
|
include_file = io.StringIO(
|
||||||
'''
|
'''
|
||||||
keep_daily: 7
|
keep_daily: 7
|
||||||
keep_hourly: 24
|
keep_hourly: 24
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
|
||||||
result = module.parse_configuration('config.yaml', 'schema.yaml')
|
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
assert result == {
|
assert config == {
|
||||||
'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']},
|
'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']},
|
||||||
'retention': {'keep_daily': 7, 'keep_hourly': 24},
|
'retention': {'keep_daily': 7, 'keep_hourly': 24},
|
||||||
}
|
}
|
||||||
|
assert logs == []
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_merges_include():
|
def test_parse_configuration_merges_include():
|
||||||
|
@ -164,40 +173,43 @@ def test_parse_configuration_merges_include():
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
builtins.should_receive('open').with_args('include.yaml').and_return(
|
include_file = io.StringIO(
|
||||||
'''
|
'''
|
||||||
keep_daily: 7
|
keep_daily: 7
|
||||||
keep_hourly: 24
|
keep_hourly: 24
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
|
include_file.name = 'include.yaml'
|
||||||
|
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
|
||||||
|
|
||||||
result = module.parse_configuration('config.yaml', 'schema.yaml')
|
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
assert result == {
|
assert config == {
|
||||||
'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']},
|
'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']},
|
||||||
'retention': {'keep_daily': 1, 'keep_hourly': 24},
|
'retention': {'keep_daily': 1, 'keep_hourly': 24},
|
||||||
}
|
}
|
||||||
|
assert logs == []
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_raises_for_missing_config_file():
|
def test_parse_configuration_raises_for_missing_config_file():
|
||||||
with pytest.raises(FileNotFoundError):
|
with pytest.raises(FileNotFoundError):
|
||||||
module.parse_configuration('config.yaml', 'schema.yaml')
|
module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_raises_for_missing_schema_file():
|
def test_parse_configuration_raises_for_missing_schema_file():
|
||||||
mock_config_and_schema('')
|
mock_config_and_schema('')
|
||||||
builtins = flexmock(sys.modules['builtins'])
|
builtins = flexmock(sys.modules['builtins'])
|
||||||
builtins.should_receive('open').with_args('schema.yaml').and_raise(FileNotFoundError)
|
builtins.should_receive('open').with_args('/tmp/schema.yaml').and_raise(FileNotFoundError)
|
||||||
|
|
||||||
with pytest.raises(FileNotFoundError):
|
with pytest.raises(FileNotFoundError):
|
||||||
module.parse_configuration('config.yaml', 'schema.yaml')
|
module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_raises_for_syntax_error():
|
def test_parse_configuration_raises_for_syntax_error():
|
||||||
mock_config_and_schema('foo:\nbar')
|
mock_config_and_schema('foo:\nbar')
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
module.parse_configuration('config.yaml', 'schema.yaml')
|
module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_raises_for_validation_error():
|
def test_parse_configuration_raises_for_validation_error():
|
||||||
|
@ -211,7 +223,7 @@ def test_parse_configuration_raises_for_validation_error():
|
||||||
)
|
)
|
||||||
|
|
||||||
with pytest.raises(module.Validation_error):
|
with pytest.raises(module.Validation_error):
|
||||||
module.parse_configuration('config.yaml', 'schema.yaml')
|
module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_applies_overrides():
|
def test_parse_configuration_applies_overrides():
|
||||||
|
@ -228,17 +240,18 @@ def test_parse_configuration_applies_overrides():
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
result = module.parse_configuration(
|
config, logs = module.parse_configuration(
|
||||||
'config.yaml', 'schema.yaml', overrides=['location.local_path=borg2']
|
'/tmp/config.yaml', '/tmp/schema.yaml', overrides=['location.local_path=borg2']
|
||||||
)
|
)
|
||||||
|
|
||||||
assert result == {
|
assert config == {
|
||||||
'location': {
|
'location': {
|
||||||
'source_directories': ['/home'],
|
'source_directories': ['/home'],
|
||||||
'repositories': ['hostname.borg'],
|
'repositories': ['hostname.borg'],
|
||||||
'local_path': 'borg2',
|
'local_path': 'borg2',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
assert logs == []
|
||||||
|
|
||||||
|
|
||||||
def test_parse_configuration_applies_normalization():
|
def test_parse_configuration_applies_normalization():
|
||||||
|
@ -255,12 +268,13 @@ def test_parse_configuration_applies_normalization():
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
result = module.parse_configuration('config.yaml', 'schema.yaml')
|
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
|
||||||
|
|
||||||
assert result == {
|
assert config == {
|
||||||
'location': {
|
'location': {
|
||||||
'source_directories': ['/home'],
|
'source_directories': ['/home'],
|
||||||
'repositories': ['hostname.borg'],
|
'repositories': ['hostname.borg'],
|
||||||
'exclude_if_present': ['.nobackup'],
|
'exclude_if_present': ['.nobackup'],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
assert logs == []
|
||||||
|
|
|
@ -54,6 +54,30 @@ def test_log_outputs_skips_logs_for_process_with_none_stdout():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_log_outputs_returns_output_without_logging_for_output_log_level_none():
|
||||||
|
flexmock(module.logger).should_receive('log').never()
|
||||||
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(False)
|
||||||
|
|
||||||
|
hi_process = subprocess.Popen(['echo', 'hi'], stdout=subprocess.PIPE)
|
||||||
|
flexmock(module).should_receive('output_buffer_for_process').with_args(
|
||||||
|
hi_process, ()
|
||||||
|
).and_return(hi_process.stdout)
|
||||||
|
|
||||||
|
there_process = subprocess.Popen(['echo', 'there'], stdout=subprocess.PIPE)
|
||||||
|
flexmock(module).should_receive('output_buffer_for_process').with_args(
|
||||||
|
there_process, ()
|
||||||
|
).and_return(there_process.stdout)
|
||||||
|
|
||||||
|
captured_outputs = module.log_outputs(
|
||||||
|
(hi_process, there_process),
|
||||||
|
exclude_stdouts=(),
|
||||||
|
output_log_level=None,
|
||||||
|
borg_local_path='borg',
|
||||||
|
)
|
||||||
|
|
||||||
|
assert captured_outputs == {hi_process: 'hi', there_process: 'there'}
|
||||||
|
|
||||||
|
|
||||||
def test_log_outputs_includes_error_output_in_exception():
|
def test_log_outputs_includes_error_output_in_exception():
|
||||||
flexmock(module.logger).should_receive('log')
|
flexmock(module.logger).should_receive('log')
|
||||||
flexmock(module).should_receive('exit_code_indicates_error').and_return(True)
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(True)
|
||||||
|
@ -70,6 +94,27 @@ def test_log_outputs_includes_error_output_in_exception():
|
||||||
assert error.value.output
|
assert error.value.output
|
||||||
|
|
||||||
|
|
||||||
|
def test_log_outputs_logs_multiline_error_output():
|
||||||
|
'''
|
||||||
|
Make sure that all error output lines get logged, not just (for instance) the first few lines
|
||||||
|
of a process' traceback.
|
||||||
|
'''
|
||||||
|
flexmock(module.logger).should_receive('log')
|
||||||
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(True)
|
||||||
|
flexmock(module).should_receive('command_for_process').and_return('grep')
|
||||||
|
|
||||||
|
process = subprocess.Popen(
|
||||||
|
['python', '-c', 'foopydoo'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('output_buffer_for_process').and_return(process.stdout)
|
||||||
|
flexmock(module.logger).should_call('log').at_least().times(3)
|
||||||
|
|
||||||
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
|
module.log_outputs(
|
||||||
|
(process,), exclude_stdouts=(), output_log_level=logging.INFO, borg_local_path='borg'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_log_outputs_skips_error_output_in_exception_for_process_with_none_stdout():
|
def test_log_outputs_skips_error_output_in_exception_for_process_with_none_stdout():
|
||||||
flexmock(module.logger).should_receive('log')
|
flexmock(module.logger).should_receive('log')
|
||||||
flexmock(module).should_receive('exit_code_indicates_error').and_return(True)
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(True)
|
||||||
|
@ -233,7 +278,7 @@ def test_log_outputs_with_unfinished_process_re_polls():
|
||||||
flexmock(module).should_receive('exit_code_indicates_error').and_return(False)
|
flexmock(module).should_receive('exit_code_indicates_error').and_return(False)
|
||||||
|
|
||||||
process = subprocess.Popen(['true'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
process = subprocess.Popen(['true'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
flexmock(process).should_receive('poll').and_return(None).and_return(0).twice()
|
flexmock(process).should_receive('poll').and_return(None).and_return(0).times(3)
|
||||||
flexmock(module).should_receive('output_buffer_for_process').and_return(process.stdout)
|
flexmock(module).should_receive('output_buffer_for_process').and_return(process.stdout)
|
||||||
|
|
||||||
module.log_outputs(
|
module.log_outputs(
|
||||||
|
|
|
@ -8,116 +8,288 @@ from ..test_verbosity import insert_logging_mock
|
||||||
|
|
||||||
|
|
||||||
def test_run_arbitrary_borg_calls_borg_with_parameters():
|
def test_run_arbitrary_borg_calls_borg_with_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'break-lock', 'repo'), output_log_level=logging.WARNING, borg_local_path='borg'
|
('borg', 'break-lock', 'repo'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.run_arbitrary_borg(
|
module.run_arbitrary_borg(
|
||||||
repository='repo', storage_config={}, options=['break-lock'],
|
repository='repo', storage_config={}, local_borg_version='1.2.3', options=['break-lock'],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_run_arbitrary_borg_with_log_info_calls_borg_with_info_parameter():
|
def test_run_arbitrary_borg_with_log_info_calls_borg_with_info_parameter():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'break-lock', 'repo', '--info'),
|
('borg', 'break-lock', 'repo', '--info'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
borg_local_path='borg',
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
insert_logging_mock(logging.INFO)
|
insert_logging_mock(logging.INFO)
|
||||||
|
|
||||||
module.run_arbitrary_borg(
|
module.run_arbitrary_borg(
|
||||||
repository='repo', storage_config={}, options=['break-lock'],
|
repository='repo', storage_config={}, local_borg_version='1.2.3', options=['break-lock'],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_parameter():
|
def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_parameter():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'break-lock', 'repo', '--debug', '--show-rc'),
|
('borg', 'break-lock', 'repo', '--debug', '--show-rc'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
borg_local_path='borg',
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
insert_logging_mock(logging.DEBUG)
|
insert_logging_mock(logging.DEBUG)
|
||||||
|
|
||||||
module.run_arbitrary_borg(
|
module.run_arbitrary_borg(
|
||||||
repository='repo', storage_config={}, options=['break-lock'],
|
repository='repo', storage_config={}, local_borg_version='1.2.3', options=['break-lock'],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_parameters():
|
def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
storage_config = {'lock_wait': 5}
|
storage_config = {'lock_wait': 5}
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(()).and_return(
|
||||||
|
('--lock-wait', '5')
|
||||||
|
)
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'break-lock', 'repo', '--lock-wait', '5'),
|
('borg', 'break-lock', 'repo', '--lock-wait', '5'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
borg_local_path='borg',
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.run_arbitrary_borg(
|
module.run_arbitrary_borg(
|
||||||
repository='repo', storage_config=storage_config, options=['break-lock'],
|
repository='repo',
|
||||||
|
storage_config=storage_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
options=['break-lock'],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_run_arbitrary_borg_with_archive_calls_borg_with_archive_parameter():
|
def test_run_arbitrary_borg_with_archive_calls_borg_with_archive_parameter():
|
||||||
storage_config = {}
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'break-lock', 'repo::archive'),
|
('borg', 'break-lock', 'repo::archive'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
borg_local_path='borg',
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.run_arbitrary_borg(
|
module.run_arbitrary_borg(
|
||||||
repository='repo', storage_config=storage_config, options=['break-lock'], archive='archive',
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
options=['break-lock'],
|
||||||
|
archive='archive',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_run_arbitrary_borg_with_local_path_calls_borg_via_local_path():
|
def test_run_arbitrary_borg_with_local_path_calls_borg_via_local_path():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg1', 'break-lock', 'repo'), output_log_level=logging.WARNING, borg_local_path='borg1'
|
('borg1', 'break-lock', 'repo'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg1',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.run_arbitrary_borg(
|
module.run_arbitrary_borg(
|
||||||
repository='repo', storage_config={}, options=['break-lock'], local_path='borg1',
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
options=['break-lock'],
|
||||||
|
local_path='borg1',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_run_arbitrary_borg_with_remote_path_calls_borg_with_remote_path_parameters():
|
def test_run_arbitrary_borg_with_remote_path_calls_borg_with_remote_path_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(
|
||||||
|
('--remote-path', 'borg1')
|
||||||
|
).and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'break-lock', 'repo', '--remote-path', 'borg1'),
|
('borg', 'break-lock', 'repo', '--remote-path', 'borg1'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
borg_local_path='borg',
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.run_arbitrary_borg(
|
module.run_arbitrary_borg(
|
||||||
repository='repo', storage_config={}, options=['break-lock'], remote_path='borg1',
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
options=['break-lock'],
|
||||||
|
remote_path='borg1',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_run_arbitrary_borg_passes_borg_specific_parameters_to_borg():
|
def test_run_arbitrary_borg_passes_borg_specific_parameters_to_borg():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'list', 'repo', '--progress'),
|
('borg', 'list', 'repo', '--progress'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
borg_local_path='borg',
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.run_arbitrary_borg(
|
module.run_arbitrary_borg(
|
||||||
repository='repo', storage_config={}, options=['list', '--progress'],
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
options=['list', '--progress'],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_run_arbitrary_borg_omits_dash_dash_in_parameters_passed_to_borg():
|
def test_run_arbitrary_borg_omits_dash_dash_in_parameters_passed_to_borg():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'break-lock', 'repo'), output_log_level=logging.WARNING, borg_local_path='borg',
|
('borg', 'break-lock', 'repo'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.run_arbitrary_borg(
|
module.run_arbitrary_borg(
|
||||||
repository='repo', storage_config={}, options=['--', 'break-lock'],
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
options=['--', 'break-lock'],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_run_arbitrary_borg_without_borg_specific_parameters_does_not_raise():
|
def test_run_arbitrary_borg_without_borg_specific_parameters_does_not_raise():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').never()
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg',), output_log_level=logging.WARNING, borg_local_path='borg',
|
('borg',),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.run_arbitrary_borg(
|
module.run_arbitrary_borg(
|
||||||
repository='repo', storage_config={}, options=[],
|
repository='repo', storage_config={}, local_borg_version='1.2.3', options=[],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_arbitrary_borg_passes_key_sub_command_to_borg_before_repository():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
('borg', 'key', 'export', 'repo'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
module.run_arbitrary_borg(
|
||||||
|
repository='repo', storage_config={}, local_borg_version='1.2.3', options=['key', 'export'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_arbitrary_borg_passes_debug_sub_command_to_borg_before_repository():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
('borg', 'debug', 'dump-manifest', 'repo', 'path'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
module.run_arbitrary_borg(
|
||||||
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
options=['debug', 'dump-manifest', 'path'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_arbitrary_borg_with_debug_info_command_does_not_pass_borg_repository():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').never()
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
('borg', 'debug', 'info'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
module.run_arbitrary_borg(
|
||||||
|
repository='repo', storage_config={}, local_borg_version='1.2.3', options=['debug', 'info'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_run_arbitrary_borg_with_debug_convert_profile_command_does_not_pass_borg_repository():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').never()
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
('borg', 'debug', 'convert-profile', 'in', 'out'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
module.run_arbitrary_borg(
|
||||||
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
options=['debug', 'convert-profile', 'in', 'out'],
|
||||||
)
|
)
|
||||||
|
|
|
@ -0,0 +1,70 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from flexmock import flexmock
|
||||||
|
|
||||||
|
from borgmatic.borg import break_lock as module
|
||||||
|
|
||||||
|
from ..test_verbosity import insert_logging_mock
|
||||||
|
|
||||||
|
|
||||||
|
def insert_execute_command_mock(command):
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
command, borg_local_path='borg', extra_environment=None,
|
||||||
|
).once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_break_lock_calls_borg_with_required_flags():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
insert_execute_command_mock(('borg', 'break-lock', 'repo'))
|
||||||
|
|
||||||
|
module.break_lock(
|
||||||
|
repository='repo', storage_config={}, local_borg_version='1.2.3',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_break_lock_calls_borg_with_remote_path_flags():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
insert_execute_command_mock(('borg', 'break-lock', '--remote-path', 'borg1', 'repo'))
|
||||||
|
|
||||||
|
module.break_lock(
|
||||||
|
repository='repo', storage_config={}, local_borg_version='1.2.3', remote_path='borg1',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_break_lock_calls_borg_with_umask_flags():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
insert_execute_command_mock(('borg', 'break-lock', '--umask', '0770', 'repo'))
|
||||||
|
|
||||||
|
module.break_lock(
|
||||||
|
repository='repo', storage_config={'umask': '0770'}, local_borg_version='1.2.3',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_break_lock_calls_borg_with_lock_wait_flags():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
insert_execute_command_mock(('borg', 'break-lock', '--lock-wait', '5', 'repo'))
|
||||||
|
|
||||||
|
module.break_lock(
|
||||||
|
repository='repo', storage_config={'lock_wait': '5'}, local_borg_version='1.2.3',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_break_lock_with_log_info_calls_borg_with_info_parameter():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
insert_execute_command_mock(('borg', 'break-lock', '--info', 'repo'))
|
||||||
|
insert_logging_mock(logging.INFO)
|
||||||
|
|
||||||
|
module.break_lock(
|
||||||
|
repository='repo', storage_config={}, local_borg_version='1.2.3',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_break_lock_with_log_debug_calls_borg_with_debug_flags():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
insert_execute_command_mock(('borg', 'break-lock', '--debug', '--show-rc', 'repo'))
|
||||||
|
insert_logging_mock(logging.DEBUG)
|
||||||
|
|
||||||
|
module.break_lock(
|
||||||
|
repository='repo', storage_config={}, local_borg_version='1.2.3',
|
||||||
|
)
|
|
@ -9,7 +9,10 @@ from ..test_verbosity import insert_logging_mock
|
||||||
|
|
||||||
|
|
||||||
def insert_execute_command_mock(command):
|
def insert_execute_command_mock(command):
|
||||||
flexmock(module).should_receive('execute_command').with_args(command).once()
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
command, extra_environment=None
|
||||||
|
).once()
|
||||||
|
|
||||||
|
|
||||||
def insert_execute_command_never():
|
def insert_execute_command_never():
|
||||||
|
@ -17,172 +20,392 @@ def insert_execute_command_never():
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_returns_them_as_tuple():
|
def test_parse_checks_returns_them_as_tuple():
|
||||||
checks = module._parse_checks({'checks': ['foo', 'disabled', 'bar']})
|
checks = module.parse_checks({'checks': [{'name': 'foo'}, {'name': 'bar'}]})
|
||||||
|
|
||||||
assert checks == ('foo', 'bar')
|
assert checks == ('foo', 'bar')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_with_missing_value_returns_defaults():
|
def test_parse_checks_with_missing_value_returns_defaults():
|
||||||
checks = module._parse_checks({})
|
checks = module.parse_checks({})
|
||||||
|
|
||||||
assert checks == module.DEFAULT_CHECKS
|
assert checks == ('repository', 'archives')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_with_blank_value_returns_defaults():
|
def test_parse_checks_with_empty_list_returns_defaults():
|
||||||
checks = module._parse_checks({'checks': []})
|
checks = module.parse_checks({'checks': []})
|
||||||
|
|
||||||
assert checks == module.DEFAULT_CHECKS
|
assert checks == ('repository', 'archives')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_with_none_value_returns_defaults():
|
def test_parse_checks_with_none_value_returns_defaults():
|
||||||
checks = module._parse_checks({'checks': None})
|
checks = module.parse_checks({'checks': None})
|
||||||
|
|
||||||
assert checks == module.DEFAULT_CHECKS
|
assert checks == ('repository', 'archives')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_with_disabled_returns_no_checks():
|
def test_parse_checks_with_disabled_returns_no_checks():
|
||||||
checks = module._parse_checks({'checks': ['disabled']})
|
checks = module.parse_checks({'checks': [{'name': 'foo'}, {'name': 'disabled'}]})
|
||||||
|
|
||||||
assert checks == ()
|
assert checks == ()
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_with_data_check_also_injects_archives():
|
|
||||||
checks = module._parse_checks({'checks': ['data']})
|
|
||||||
|
|
||||||
assert checks == ('data', 'archives')
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_with_data_check_passes_through_archives():
|
|
||||||
checks = module._parse_checks({'checks': ['data', 'archives']})
|
|
||||||
|
|
||||||
assert checks == ('data', 'archives')
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_prefers_override_checks_to_configured_checks():
|
def test_parse_checks_prefers_override_checks_to_configured_checks():
|
||||||
checks = module._parse_checks({'checks': ['archives']}, only_checks=['repository', 'extract'])
|
checks = module.parse_checks(
|
||||||
|
{'checks': [{'name': 'archives'}]}, only_checks=['repository', 'extract']
|
||||||
|
)
|
||||||
|
|
||||||
assert checks == ('repository', 'extract')
|
assert checks == ('repository', 'extract')
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_with_override_data_check_also_injects_archives():
|
@pytest.mark.parametrize(
|
||||||
checks = module._parse_checks({'checks': ['extract']}, only_checks=['data'])
|
'frequency,expected_result',
|
||||||
|
(
|
||||||
|
(None, None),
|
||||||
|
('always', None),
|
||||||
|
('1 hour', module.datetime.timedelta(hours=1)),
|
||||||
|
('2 hours', module.datetime.timedelta(hours=2)),
|
||||||
|
('1 day', module.datetime.timedelta(days=1)),
|
||||||
|
('2 days', module.datetime.timedelta(days=2)),
|
||||||
|
('1 week', module.datetime.timedelta(weeks=1)),
|
||||||
|
('2 weeks', module.datetime.timedelta(weeks=2)),
|
||||||
|
('1 month', module.datetime.timedelta(days=30)),
|
||||||
|
('2 months', module.datetime.timedelta(days=60)),
|
||||||
|
('1 year', module.datetime.timedelta(days=365)),
|
||||||
|
('2 years', module.datetime.timedelta(days=365 * 2)),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_parse_frequency_parses_into_timedeltas(frequency, expected_result):
|
||||||
|
assert module.parse_frequency(frequency) == expected_result
|
||||||
|
|
||||||
assert checks == ('data', 'archives')
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'frequency', ('sometime', 'x days', '3 decades',),
|
||||||
|
)
|
||||||
|
def test_parse_frequency_raises_on_parse_error(frequency):
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.parse_frequency(frequency)
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_checks_on_frequency_without_config_uses_default_checks():
|
||||||
|
flexmock(module).should_receive('parse_frequency').and_return(
|
||||||
|
module.datetime.timedelta(weeks=4)
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('read_check_time').and_return(None)
|
||||||
|
|
||||||
|
assert module.filter_checks_on_frequency(
|
||||||
|
location_config={},
|
||||||
|
consistency_config={},
|
||||||
|
borg_repository_id='repo',
|
||||||
|
checks=('repository', 'archives'),
|
||||||
|
force=False,
|
||||||
|
) == ('repository', 'archives')
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_checks_on_frequency_retains_unconfigured_check():
|
||||||
|
assert module.filter_checks_on_frequency(
|
||||||
|
location_config={},
|
||||||
|
consistency_config={},
|
||||||
|
borg_repository_id='repo',
|
||||||
|
checks=('data',),
|
||||||
|
force=False,
|
||||||
|
) == ('data',)
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_checks_on_frequency_retains_check_without_frequency():
|
||||||
|
flexmock(module).should_receive('parse_frequency').and_return(None)
|
||||||
|
|
||||||
|
assert module.filter_checks_on_frequency(
|
||||||
|
location_config={},
|
||||||
|
consistency_config={'checks': [{'name': 'archives'}]},
|
||||||
|
borg_repository_id='repo',
|
||||||
|
checks=('archives',),
|
||||||
|
force=False,
|
||||||
|
) == ('archives',)
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency():
|
||||||
|
flexmock(module).should_receive('parse_frequency').and_return(
|
||||||
|
module.datetime.timedelta(hours=1)
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('read_check_time').and_return(
|
||||||
|
module.datetime.datetime(year=module.datetime.MINYEAR, month=1, day=1)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert module.filter_checks_on_frequency(
|
||||||
|
location_config={},
|
||||||
|
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
|
||||||
|
borg_repository_id='repo',
|
||||||
|
checks=('archives',),
|
||||||
|
force=False,
|
||||||
|
) == ('archives',)
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_checks_on_frequency_retains_check_with_missing_check_time_file():
|
||||||
|
flexmock(module).should_receive('parse_frequency').and_return(
|
||||||
|
module.datetime.timedelta(hours=1)
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('read_check_time').and_return(None)
|
||||||
|
|
||||||
|
assert module.filter_checks_on_frequency(
|
||||||
|
location_config={},
|
||||||
|
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
|
||||||
|
borg_repository_id='repo',
|
||||||
|
checks=('archives',),
|
||||||
|
force=False,
|
||||||
|
) == ('archives',)
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency():
|
||||||
|
flexmock(module).should_receive('parse_frequency').and_return(
|
||||||
|
module.datetime.timedelta(hours=1)
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('read_check_time').and_return(module.datetime.datetime.now())
|
||||||
|
|
||||||
|
assert (
|
||||||
|
module.filter_checks_on_frequency(
|
||||||
|
location_config={},
|
||||||
|
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
|
||||||
|
borg_repository_id='repo',
|
||||||
|
checks=('archives',),
|
||||||
|
force=False,
|
||||||
|
)
|
||||||
|
== ()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force():
|
||||||
|
assert module.filter_checks_on_frequency(
|
||||||
|
location_config={},
|
||||||
|
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
|
||||||
|
borg_repository_id='repo',
|
||||||
|
checks=('archives',),
|
||||||
|
force=True,
|
||||||
|
) == ('archives',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_repository_check_returns_flag():
|
def test_make_check_flags_with_repository_check_returns_flag():
|
||||||
flags = module._make_check_flags(('repository',))
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('repository',))
|
||||||
|
|
||||||
assert flags == ('--repository-only',)
|
assert flags == ('--repository-only',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_archives_check_returns_flag():
|
def test_make_check_flags_with_archives_check_returns_flag():
|
||||||
flags = module._make_check_flags(('archives',))
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('archives',))
|
||||||
|
|
||||||
assert flags == ('--archives-only',)
|
assert flags == ('--archives-only',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_data_check_returns_flag():
|
def test_make_check_flags_with_data_check_returns_flag_and_implies_archives():
|
||||||
flags = module._make_check_flags(('data',))
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
assert flags == ('--verify-data',)
|
flags = module.make_check_flags('1.2.3', ('data',))
|
||||||
|
|
||||||
|
assert flags == ('--archives-only', '--verify-data',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_extract_omits_extract_flag():
|
def test_make_check_flags_with_extract_omits_extract_flag():
|
||||||
flags = module._make_check_flags(('extract',))
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('extract',))
|
||||||
|
|
||||||
assert flags == ()
|
assert flags == ()
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_default_checks_and_default_prefix_returns_default_flags():
|
def test_make_check_flags_with_repository_and_data_checks_does_not_return_repository_only():
|
||||||
flags = module._make_check_flags(module.DEFAULT_CHECKS, prefix=module.DEFAULT_PREFIX)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
assert flags == ('--prefix', module.DEFAULT_PREFIX)
|
flags = module.make_check_flags('1.2.3', ('repository', 'data',))
|
||||||
|
|
||||||
|
assert flags == ('--verify-data',)
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_check_flags_with_default_checks_and_default_prefix_returns_default_flags():
|
||||||
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags(
|
||||||
|
'1.2.3', ('repository', 'archives'), prefix=module.DEFAULT_PREFIX
|
||||||
|
)
|
||||||
|
|
||||||
|
assert flags == ('--match-archives', f'sh:{module.DEFAULT_PREFIX}*')
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_all_checks_and_default_prefix_returns_default_flags():
|
def test_make_check_flags_with_all_checks_and_default_prefix_returns_default_flags():
|
||||||
flags = module._make_check_flags(
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
module.DEFAULT_CHECKS + ('extract',), prefix=module.DEFAULT_PREFIX
|
|
||||||
|
flags = module.make_check_flags(
|
||||||
|
'1.2.3', ('repository', 'archives', 'extract'), prefix=module.DEFAULT_PREFIX
|
||||||
)
|
)
|
||||||
|
|
||||||
assert flags == ('--prefix', module.DEFAULT_PREFIX)
|
assert flags == ('--match-archives', f'sh:{module.DEFAULT_PREFIX}*')
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_check_flags_with_all_checks_and_default_prefix_without_borg_features_returns_glob_archives_flags():
|
||||||
|
flexmock(module.feature).should_receive('available').and_return(False)
|
||||||
|
|
||||||
|
flags = module.make_check_flags(
|
||||||
|
'1.2.3', ('repository', 'archives', 'extract'), prefix=module.DEFAULT_PREFIX
|
||||||
|
)
|
||||||
|
|
||||||
|
assert flags == ('--glob-archives', f'{module.DEFAULT_PREFIX}*')
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_archives_check_and_last_includes_last_flag():
|
def test_make_check_flags_with_archives_check_and_last_includes_last_flag():
|
||||||
flags = module._make_check_flags(('archives',), check_last=3)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('archives',), check_last=3)
|
||||||
|
|
||||||
assert flags == ('--archives-only', '--last', '3')
|
assert flags == ('--archives-only', '--last', '3')
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_check_flags_with_data_check_and_last_includes_last_flag():
|
||||||
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('data',), check_last=3)
|
||||||
|
|
||||||
|
assert flags == ('--archives-only', '--last', '3', '--verify-data')
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_repository_check_and_last_omits_last_flag():
|
def test_make_check_flags_with_repository_check_and_last_omits_last_flag():
|
||||||
flags = module._make_check_flags(('repository',), check_last=3)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('repository',), check_last=3)
|
||||||
|
|
||||||
assert flags == ('--repository-only',)
|
assert flags == ('--repository-only',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_default_checks_and_last_includes_last_flag():
|
def test_make_check_flags_with_default_checks_and_last_includes_last_flag():
|
||||||
flags = module._make_check_flags(module.DEFAULT_CHECKS, check_last=3)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('repository', 'archives'), check_last=3)
|
||||||
|
|
||||||
assert flags == ('--last', '3')
|
assert flags == ('--last', '3')
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_archives_check_and_prefix_includes_prefix_flag():
|
def test_make_check_flags_with_archives_check_and_prefix_includes_match_archives_flag():
|
||||||
flags = module._make_check_flags(('archives',), prefix='foo-')
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
assert flags == ('--archives-only', '--prefix', 'foo-')
|
flags = module.make_check_flags('1.2.3', ('archives',), prefix='foo-')
|
||||||
|
|
||||||
|
assert flags == ('--archives-only', '--match-archives', 'sh:foo-*')
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_archives_check_and_empty_prefix_omits_prefix_flag():
|
def test_make_check_flags_with_data_check_and_prefix_includes_match_archives_flag():
|
||||||
flags = module._make_check_flags(('archives',), prefix='')
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('data',), prefix='foo-')
|
||||||
|
|
||||||
|
assert flags == ('--archives-only', '--match-archives', 'sh:foo-*', '--verify-data')
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_check_flags_with_archives_check_and_empty_prefix_omits_match_archives_flag():
|
||||||
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('archives',), prefix='')
|
||||||
|
|
||||||
assert flags == ('--archives-only',)
|
assert flags == ('--archives-only',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_archives_check_and_none_prefix_omits_prefix_flag():
|
def test_make_check_flags_with_archives_check_and_none_prefix_omits_match_archives_flag():
|
||||||
flags = module._make_check_flags(('archives',), prefix=None)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('archives',), prefix=None)
|
||||||
|
|
||||||
assert flags == ('--archives-only',)
|
assert flags == ('--archives-only',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_repository_check_and_prefix_omits_prefix_flag():
|
def test_make_check_flags_with_repository_check_and_prefix_omits_match_archives_flag():
|
||||||
flags = module._make_check_flags(('repository',), prefix='foo-')
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
flags = module.make_check_flags('1.2.3', ('repository',), prefix='foo-')
|
||||||
|
|
||||||
assert flags == ('--repository-only',)
|
assert flags == ('--repository-only',)
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_default_checks_and_prefix_includes_prefix_flag():
|
def test_make_check_flags_with_default_checks_and_prefix_includes_match_archives_flag():
|
||||||
flags = module._make_check_flags(module.DEFAULT_CHECKS, prefix='foo-')
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
assert flags == ('--prefix', 'foo-')
|
flags = module.make_check_flags('1.2.3', ('repository', 'archives'), prefix='foo-')
|
||||||
|
|
||||||
|
assert flags == ('--match-archives', 'sh:foo-*')
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_check_time_does_not_raise():
|
||||||
|
flexmock(module.os).should_receive('stat').and_return(flexmock(st_mtime=123))
|
||||||
|
|
||||||
|
assert module.read_check_time('/path')
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_check_time_on_missing_file_does_not_raise():
|
||||||
|
flexmock(module.os).should_receive('stat').and_raise(FileNotFoundError)
|
||||||
|
|
||||||
|
assert module.read_check_time('/path') is None
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_progress_calls_borg_with_progress_parameter():
|
def test_check_archives_with_progress_calls_borg_with_progress_parameter():
|
||||||
checks = ('repository',)
|
checks = ('repository',)
|
||||||
consistency_config = {'check_last': None}
|
consistency_config = {'check_last': None}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||||
flexmock(module).should_receive('execute_command').never()
|
flexmock(module).should_receive('execute_command').never()
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'check', '--progress', 'repo'), output_file=module.DO_NOT_CAPTURE
|
('borg', 'check', '--progress', 'repo'),
|
||||||
|
output_file=module.DO_NOT_CAPTURE,
|
||||||
|
extra_environment=None,
|
||||||
).once()
|
).once()
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo', storage_config={}, consistency_config=consistency_config, progress=True
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
progress=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_repair_calls_borg_with_repair_parameter():
|
def test_check_archives_with_repair_calls_borg_with_repair_parameter():
|
||||||
checks = ('repository',)
|
checks = ('repository',)
|
||||||
consistency_config = {'check_last': None}
|
consistency_config = {'check_last': None}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||||
flexmock(module).should_receive('execute_command').never()
|
flexmock(module).should_receive('execute_command').never()
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'check', '--repair', 'repo'), output_file=module.DO_NOT_CAPTURE
|
('borg', 'check', '--repair', 'repo'),
|
||||||
|
output_file=module.DO_NOT_CAPTURE,
|
||||||
|
extra_environment=None,
|
||||||
).once()
|
).once()
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo', storage_config={}, consistency_config=consistency_config, repair=True
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
repair=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -198,64 +421,153 @@ def test_check_archives_with_repair_calls_borg_with_repair_parameter():
|
||||||
def test_check_archives_calls_borg_with_parameters(checks):
|
def test_check_archives_calls_borg_with_parameters(checks):
|
||||||
check_last = flexmock()
|
check_last = flexmock()
|
||||||
consistency_config = {'check_last': check_last}
|
consistency_config = {'check_last': check_last}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').with_args(
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
checks, check_last, module.DEFAULT_PREFIX
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').with_args(
|
||||||
|
'1.2.3', checks, check_last, module.DEFAULT_PREFIX
|
||||||
).and_return(())
|
).and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', 'repo'))
|
insert_execute_command_mock(('borg', 'check', 'repo'))
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_archives_with_json_error_raises():
|
||||||
|
checks = ('archives',)
|
||||||
|
check_last = flexmock()
|
||||||
|
consistency_config = {'check_last': check_last}
|
||||||
|
flexmock(module).should_receive('parse_checks')
|
||||||
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"unexpected": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.check_archives(
|
||||||
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_archives_with_missing_json_keys_raises():
|
||||||
|
checks = ('archives',)
|
||||||
|
check_last = flexmock()
|
||||||
|
consistency_config = {'check_last': check_last}
|
||||||
|
flexmock(module).should_receive('parse_checks')
|
||||||
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return('{invalid JSON')
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
module.check_archives(
|
||||||
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_extract_check_calls_extract_only():
|
def test_check_archives_with_extract_check_calls_extract_only():
|
||||||
checks = ('extract',)
|
checks = ('extract',)
|
||||||
check_last = flexmock()
|
check_last = flexmock()
|
||||||
consistency_config = {'check_last': check_last}
|
consistency_config = {'check_last': check_last}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').never()
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').never()
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
flexmock(module.extract).should_receive('extract_last_archive_dry_run').once()
|
flexmock(module.extract).should_receive('extract_last_archive_dry_run').once()
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
insert_execute_command_never()
|
insert_execute_command_never()
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_log_info_calls_borg_with_info_parameter():
|
def test_check_archives_with_log_info_calls_borg_with_info_parameter():
|
||||||
checks = ('repository',)
|
checks = ('repository',)
|
||||||
consistency_config = {'check_last': None}
|
consistency_config = {'check_last': None}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_logging_mock(logging.INFO)
|
insert_logging_mock(logging.INFO)
|
||||||
insert_execute_command_mock(('borg', 'check', '--info', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--info', 'repo'))
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_log_debug_calls_borg_with_debug_parameter():
|
def test_check_archives_with_log_debug_calls_borg_with_debug_parameter():
|
||||||
checks = ('repository',)
|
checks = ('repository',)
|
||||||
consistency_config = {'check_last': None}
|
consistency_config = {'check_last': None}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_logging_mock(logging.DEBUG)
|
insert_logging_mock(logging.DEBUG)
|
||||||
insert_execute_command_mock(('borg', 'check', '--debug', '--show-rc', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--debug', '--show-rc', 'repo'))
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_without_any_checks_bails():
|
def test_check_archives_without_any_checks_bails():
|
||||||
consistency_config = {'check_last': None}
|
consistency_config = {'check_last': None}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(())
|
flexmock(module).should_receive('parse_checks')
|
||||||
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(())
|
||||||
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
insert_execute_command_never()
|
insert_execute_command_never()
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -263,16 +575,25 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
|
||||||
checks = ('repository',)
|
checks = ('repository',)
|
||||||
check_last = flexmock()
|
check_last = flexmock()
|
||||||
consistency_config = {'check_last': check_last}
|
consistency_config = {'check_last': check_last}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').with_args(
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
checks, check_last, module.DEFAULT_PREFIX
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').with_args(
|
||||||
|
'1.2.3', checks, check_last, module.DEFAULT_PREFIX
|
||||||
).and_return(())
|
).and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg1', 'check', 'repo'))
|
insert_execute_command_mock(('borg1', 'check', 'repo'))
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo',
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
storage_config={},
|
storage_config={},
|
||||||
consistency_config=consistency_config,
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
local_path='borg1',
|
local_path='borg1',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -281,16 +602,25 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(
|
||||||
checks = ('repository',)
|
checks = ('repository',)
|
||||||
check_last = flexmock()
|
check_last = flexmock()
|
||||||
consistency_config = {'check_last': check_last}
|
consistency_config = {'check_last': check_last}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').with_args(
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
checks, check_last, module.DEFAULT_PREFIX
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').with_args(
|
||||||
|
'1.2.3', checks, check_last, module.DEFAULT_PREFIX
|
||||||
).and_return(())
|
).and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', '--remote-path', 'borg1', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--remote-path', 'borg1', 'repo'))
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo',
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
storage_config={},
|
storage_config={},
|
||||||
consistency_config=consistency_config,
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
remote_path='borg1',
|
remote_path='borg1',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -299,14 +629,25 @@ def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
|
||||||
checks = ('repository',)
|
checks = ('repository',)
|
||||||
check_last = flexmock()
|
check_last = flexmock()
|
||||||
consistency_config = {'check_last': check_last}
|
consistency_config = {'check_last': check_last}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').with_args(
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
checks, check_last, module.DEFAULT_PREFIX
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').with_args(
|
||||||
|
'1.2.3', checks, check_last, module.DEFAULT_PREFIX
|
||||||
).and_return(())
|
).and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', '--lock-wait', '5', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--lock-wait', '5', 'repo'))
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo', storage_config={'lock_wait': 5}, consistency_config=consistency_config
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={'lock_wait': 5},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -315,26 +656,46 @@ def test_check_archives_with_retention_prefix():
|
||||||
check_last = flexmock()
|
check_last = flexmock()
|
||||||
prefix = 'foo-'
|
prefix = 'foo-'
|
||||||
consistency_config = {'check_last': check_last, 'prefix': prefix}
|
consistency_config = {'check_last': check_last, 'prefix': prefix}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').with_args(
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
checks, check_last, prefix
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').with_args(
|
||||||
|
'1.2.3', checks, check_last, prefix
|
||||||
).and_return(())
|
).and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', 'repo'))
|
insert_execute_command_mock(('borg', 'check', 'repo'))
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
|
storage_config={},
|
||||||
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options():
|
def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options():
|
||||||
checks = ('repository',)
|
checks = ('repository',)
|
||||||
consistency_config = {'check_last': None}
|
consistency_config = {'check_last': None}
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
flexmock(module).should_receive('parse_checks')
|
||||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||||
|
flexmock(module.rinfo).should_receive('display_repository_info').and_return(
|
||||||
|
'{"repository": {"id": "repo"}}'
|
||||||
|
)
|
||||||
|
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg', 'check', '--extra', '--options', 'repo'))
|
insert_execute_command_mock(('borg', 'check', '--extra', '--options', 'repo'))
|
||||||
|
flexmock(module).should_receive('make_check_time_path')
|
||||||
|
flexmock(module).should_receive('write_check_time')
|
||||||
|
|
||||||
module.check_archives(
|
module.check_archives(
|
||||||
repository='repo',
|
repository='repo',
|
||||||
|
location_config={},
|
||||||
storage_config={'extra_borg_options': {'check': '--extra --options'}},
|
storage_config={'extra_borg_options': {'check': '--extra --options'}},
|
||||||
consistency_config=consistency_config,
|
consistency_config=consistency_config,
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
|
@ -8,8 +8,12 @@ from ..test_verbosity import insert_logging_mock
|
||||||
|
|
||||||
|
|
||||||
def insert_execute_command_mock(compact_command, output_log_level):
|
def insert_execute_command_mock(compact_command, output_log_level):
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
compact_command, output_log_level=output_log_level, borg_local_path=compact_command[0]
|
compact_command,
|
||||||
|
output_log_level=output_log_level,
|
||||||
|
borg_local_path=compact_command[0],
|
||||||
|
extra_environment=None,
|
||||||
).once()
|
).once()
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,94 +21,134 @@ COMPACT_COMMAND = ('borg', 'compact')
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_calls_borg_with_parameters():
|
def test_compact_segments_calls_borg_with_parameters():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(COMPACT_COMMAND + ('repo',), logging.INFO)
|
insert_execute_command_mock(COMPACT_COMMAND + ('repo',), logging.INFO)
|
||||||
|
|
||||||
module.compact_segments(dry_run=False, repository='repo', storage_config={})
|
module.compact_segments(
|
||||||
|
dry_run=False, repository='repo', storage_config={}, local_borg_version='1.2.3'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_log_info_calls_borg_with_info_parameter():
|
def test_compact_segments_with_log_info_calls_borg_with_info_parameter():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(COMPACT_COMMAND + ('--info', 'repo'), logging.INFO)
|
insert_execute_command_mock(COMPACT_COMMAND + ('--info', 'repo'), logging.INFO)
|
||||||
insert_logging_mock(logging.INFO)
|
insert_logging_mock(logging.INFO)
|
||||||
|
|
||||||
module.compact_segments(repository='repo', storage_config={}, dry_run=False)
|
module.compact_segments(
|
||||||
|
repository='repo', storage_config={}, local_borg_version='1.2.3', dry_run=False
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_log_debug_calls_borg_with_debug_parameter():
|
def test_compact_segments_with_log_debug_calls_borg_with_debug_parameter():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(COMPACT_COMMAND + ('--debug', '--show-rc', 'repo'), logging.INFO)
|
insert_execute_command_mock(COMPACT_COMMAND + ('--debug', '--show-rc', 'repo'), logging.INFO)
|
||||||
insert_logging_mock(logging.DEBUG)
|
insert_logging_mock(logging.DEBUG)
|
||||||
|
|
||||||
module.compact_segments(repository='repo', storage_config={}, dry_run=False)
|
module.compact_segments(
|
||||||
|
repository='repo', storage_config={}, local_borg_version='1.2.3', dry_run=False
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_dry_run_skips_borg_call():
|
def test_compact_segments_with_dry_run_skips_borg_call():
|
||||||
flexmock(module).should_receive('execute_command').never()
|
flexmock(module).should_receive('execute_command').never()
|
||||||
|
|
||||||
module.compact_segments(repository='repo', storage_config={}, dry_run=True)
|
module.compact_segments(
|
||||||
|
repository='repo', storage_config={}, local_borg_version='1.2.3', dry_run=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_local_path_calls_borg_via_local_path():
|
def test_compact_segments_with_local_path_calls_borg_via_local_path():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(('borg1',) + COMPACT_COMMAND[1:] + ('repo',), logging.INFO)
|
insert_execute_command_mock(('borg1',) + COMPACT_COMMAND[1:] + ('repo',), logging.INFO)
|
||||||
|
|
||||||
module.compact_segments(
|
module.compact_segments(
|
||||||
dry_run=False, repository='repo', storage_config={}, local_path='borg1',
|
dry_run=False,
|
||||||
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
local_path='borg1',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_remote_path_calls_borg_with_remote_path_parameters():
|
def test_compact_segments_with_remote_path_calls_borg_with_remote_path_parameters():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(COMPACT_COMMAND + ('--remote-path', 'borg1', 'repo'), logging.INFO)
|
insert_execute_command_mock(COMPACT_COMMAND + ('--remote-path', 'borg1', 'repo'), logging.INFO)
|
||||||
|
|
||||||
module.compact_segments(
|
module.compact_segments(
|
||||||
dry_run=False, repository='repo', storage_config={}, remote_path='borg1',
|
dry_run=False,
|
||||||
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
remote_path='borg1',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_progress_calls_borg_with_progress_parameter():
|
def test_compact_segments_with_progress_calls_borg_with_progress_parameter():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(COMPACT_COMMAND + ('--progress', 'repo'), logging.INFO)
|
insert_execute_command_mock(COMPACT_COMMAND + ('--progress', 'repo'), logging.INFO)
|
||||||
|
|
||||||
module.compact_segments(
|
module.compact_segments(
|
||||||
dry_run=False, repository='repo', storage_config={}, progress=True,
|
dry_run=False,
|
||||||
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
progress=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_cleanup_commits_calls_borg_with_cleanup_commits_parameter():
|
def test_compact_segments_with_cleanup_commits_calls_borg_with_cleanup_commits_parameter():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(COMPACT_COMMAND + ('--cleanup-commits', 'repo'), logging.INFO)
|
insert_execute_command_mock(COMPACT_COMMAND + ('--cleanup-commits', 'repo'), logging.INFO)
|
||||||
|
|
||||||
module.compact_segments(
|
module.compact_segments(
|
||||||
dry_run=False, repository='repo', storage_config={}, cleanup_commits=True,
|
dry_run=False,
|
||||||
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
cleanup_commits=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_threshold_calls_borg_with_threshold_parameter():
|
def test_compact_segments_with_threshold_calls_borg_with_threshold_parameter():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(COMPACT_COMMAND + ('--threshold', '20', 'repo'), logging.INFO)
|
insert_execute_command_mock(COMPACT_COMMAND + ('--threshold', '20', 'repo'), logging.INFO)
|
||||||
|
|
||||||
module.compact_segments(
|
module.compact_segments(
|
||||||
dry_run=False, repository='repo', storage_config={}, threshold=20,
|
dry_run=False,
|
||||||
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
threshold=20,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_umask_calls_borg_with_umask_parameters():
|
def test_compact_segments_with_umask_calls_borg_with_umask_parameters():
|
||||||
storage_config = {'umask': '077'}
|
storage_config = {'umask': '077'}
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(COMPACT_COMMAND + ('--umask', '077', 'repo'), logging.INFO)
|
insert_execute_command_mock(COMPACT_COMMAND + ('--umask', '077', 'repo'), logging.INFO)
|
||||||
|
|
||||||
module.compact_segments(
|
module.compact_segments(
|
||||||
dry_run=False, repository='repo', storage_config=storage_config,
|
dry_run=False, repository='repo', storage_config=storage_config, local_borg_version='1.2.3'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_lock_wait_calls_borg_with_lock_wait_parameters():
|
def test_compact_segments_with_lock_wait_calls_borg_with_lock_wait_parameters():
|
||||||
storage_config = {'lock_wait': 5}
|
storage_config = {'lock_wait': 5}
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(COMPACT_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO)
|
insert_execute_command_mock(COMPACT_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO)
|
||||||
|
|
||||||
module.compact_segments(
|
module.compact_segments(
|
||||||
dry_run=False, repository='repo', storage_config=storage_config,
|
dry_run=False, repository='repo', storage_config=storage_config, local_borg_version='1.2.3'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_compact_segments_with_extra_borg_options_calls_borg_with_extra_options():
|
def test_compact_segments_with_extra_borg_options_calls_borg_with_extra_options():
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||||
insert_execute_command_mock(COMPACT_COMMAND + ('--extra', '--options', 'repo'), logging.INFO)
|
insert_execute_command_mock(COMPACT_COMMAND + ('--extra', '--options', 'repo'), logging.INFO)
|
||||||
|
|
||||||
module.compact_segments(
|
module.compact_segments(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
repository='repo',
|
repository='repo',
|
||||||
storage_config={'extra_borg_options': {'compact': '--extra --options'}},
|
storage_config={'extra_borg_options': {'compact': '--extra --options'}},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,84 +1,34 @@
|
||||||
import os
|
|
||||||
|
|
||||||
from borgmatic.borg import environment as module
|
from borgmatic.borg import environment as module
|
||||||
|
|
||||||
|
|
||||||
def test_initialize_with_passcommand_should_set_environment():
|
def test_make_environment_with_passcommand_should_set_environment():
|
||||||
orig_environ = os.environ
|
environment = module.make_environment({'encryption_passcommand': 'command'})
|
||||||
|
|
||||||
try:
|
assert environment.get('BORG_PASSCOMMAND') == 'command'
|
||||||
os.environ = {}
|
|
||||||
module.initialize({'encryption_passcommand': 'command'})
|
|
||||||
assert os.environ.get('BORG_PASSCOMMAND') == 'command'
|
|
||||||
finally:
|
|
||||||
os.environ = orig_environ
|
|
||||||
|
|
||||||
|
|
||||||
def test_initialize_with_passphrase_should_set_environment():
|
def test_make_environment_with_passphrase_should_set_environment():
|
||||||
orig_environ = os.environ
|
environment = module.make_environment({'encryption_passphrase': 'pass'})
|
||||||
|
|
||||||
try:
|
assert environment.get('BORG_PASSPHRASE') == 'pass'
|
||||||
os.environ = {}
|
|
||||||
module.initialize({'encryption_passphrase': 'pass'})
|
|
||||||
assert os.environ.get('BORG_PASSPHRASE') == 'pass'
|
|
||||||
finally:
|
|
||||||
os.environ = orig_environ
|
|
||||||
|
|
||||||
|
|
||||||
def test_initialize_with_ssh_command_should_set_environment():
|
def test_make_environment_with_ssh_command_should_set_environment():
|
||||||
orig_environ = os.environ
|
environment = module.make_environment({'ssh_command': 'ssh -C'})
|
||||||
|
|
||||||
try:
|
assert environment.get('BORG_RSH') == 'ssh -C'
|
||||||
os.environ = {}
|
|
||||||
module.initialize({'ssh_command': 'ssh -C'})
|
|
||||||
assert os.environ.get('BORG_RSH') == 'ssh -C'
|
|
||||||
finally:
|
|
||||||
os.environ = orig_environ
|
|
||||||
|
|
||||||
|
|
||||||
def test_initialize_without_configuration_should_only_set_default_environment():
|
def test_make_environment_without_configuration_should_only_set_default_environment():
|
||||||
orig_environ = os.environ
|
environment = module.make_environment({})
|
||||||
|
|
||||||
try:
|
assert environment == {
|
||||||
os.environ = {}
|
'BORG_RELOCATED_REPO_ACCESS_IS_OK': 'no',
|
||||||
module.initialize({})
|
'BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK': 'no',
|
||||||
|
}
|
||||||
assert {key: value for key, value in os.environ.items() if key.startswith('BORG_')} == {
|
|
||||||
'BORG_RELOCATED_REPO_ACCESS_IS_OK': 'no',
|
|
||||||
'BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK': 'no',
|
|
||||||
}
|
|
||||||
finally:
|
|
||||||
os.environ = orig_environ
|
|
||||||
|
|
||||||
|
|
||||||
def test_initialize_with_relocated_repo_access_should_override_default():
|
def test_make_environment_with_relocated_repo_access_should_override_default():
|
||||||
orig_environ = os.environ
|
environment = module.make_environment({'relocated_repo_access_is_ok': True})
|
||||||
|
|
||||||
try:
|
assert environment.get('BORG_RELOCATED_REPO_ACCESS_IS_OK') == 'yes'
|
||||||
os.environ = {}
|
|
||||||
module.initialize({'relocated_repo_access_is_ok': True})
|
|
||||||
assert os.environ.get('BORG_RELOCATED_REPO_ACCESS_IS_OK') == 'yes'
|
|
||||||
finally:
|
|
||||||
os.environ = orig_environ
|
|
||||||
|
|
||||||
|
|
||||||
def test_initialize_prefers_configuration_option_over_borg_environment_variable():
|
|
||||||
orig_environ = os.environ
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.environ = {'BORG_SSH': 'mosh'}
|
|
||||||
module.initialize({'ssh_command': 'ssh -C'})
|
|
||||||
assert os.environ.get('BORG_RSH') == 'ssh -C'
|
|
||||||
finally:
|
|
||||||
os.environ = orig_environ
|
|
||||||
|
|
||||||
|
|
||||||
def test_initialize_passes_through_existing_borg_environment_variable():
|
|
||||||
orig_environ = os.environ
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.environ = {'BORG_PASSPHRASE': 'pass'}
|
|
||||||
module.initialize({'ssh_command': 'ssh -C'})
|
|
||||||
assert os.environ.get('BORG_PASSPHRASE') == 'pass'
|
|
||||||
finally:
|
|
||||||
os.environ = orig_environ
|
|
||||||
|
|
|
@ -10,15 +10,22 @@ from ..test_verbosity import insert_logging_mock
|
||||||
def insert_execute_command_mock(
|
def insert_execute_command_mock(
|
||||||
command, output_log_level=logging.INFO, borg_local_path='borg', capture=True
|
command, output_log_level=logging.INFO, borg_local_path='borg', capture=True
|
||||||
):
|
):
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
command,
|
command,
|
||||||
output_file=None if capture else module.DO_NOT_CAPTURE,
|
output_file=None if capture else module.DO_NOT_CAPTURE,
|
||||||
output_log_level=output_log_level,
|
output_log_level=output_log_level,
|
||||||
borg_local_path=borg_local_path,
|
borg_local_path=borg_local_path,
|
||||||
|
extra_environment=None,
|
||||||
).once()
|
).once()
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_calls_borg_with_path_parameters():
|
def test_export_tar_archive_calls_borg_with_path_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'export-tar', 'repo::archive', 'test.tar', 'path1', 'path2')
|
('borg', 'export-tar', 'repo::archive', 'test.tar', 'path1', 'path2')
|
||||||
|
@ -31,10 +38,16 @@ def test_export_tar_archive_calls_borg_with_path_parameters():
|
||||||
paths=['path1', 'path2'],
|
paths=['path1', 'path2'],
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_calls_borg_with_local_path_parameters():
|
def test_export_tar_archive_calls_borg_with_local_path_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg1', 'export-tar', 'repo::archive', 'test.tar'), borg_local_path='borg1'
|
('borg1', 'export-tar', 'repo::archive', 'test.tar'), borg_local_path='borg1'
|
||||||
|
@ -47,11 +60,17 @@ def test_export_tar_archive_calls_borg_with_local_path_parameters():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
local_path='borg1',
|
local_path='borg1',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_calls_borg_with_remote_path_parameters():
|
def test_export_tar_archive_calls_borg_with_remote_path_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'export-tar', '--remote-path', 'borg1', 'repo::archive', 'test.tar')
|
('borg', 'export-tar', '--remote-path', 'borg1', 'repo::archive', 'test.tar')
|
||||||
|
@ -64,11 +83,17 @@ def test_export_tar_archive_calls_borg_with_remote_path_parameters():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
remote_path='borg1',
|
remote_path='borg1',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_calls_borg_with_umask_parameters():
|
def test_export_tar_archive_calls_borg_with_umask_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'export-tar', '--umask', '0770', 'repo::archive', 'test.tar')
|
('borg', 'export-tar', '--umask', '0770', 'repo::archive', 'test.tar')
|
||||||
|
@ -81,10 +106,16 @@ def test_export_tar_archive_calls_borg_with_umask_parameters():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={'umask': '0770'},
|
storage_config={'umask': '0770'},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_calls_borg_with_lock_wait_parameters():
|
def test_export_tar_archive_calls_borg_with_lock_wait_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'export-tar', '--lock-wait', '5', 'repo::archive', 'test.tar')
|
('borg', 'export-tar', '--lock-wait', '5', 'repo::archive', 'test.tar')
|
||||||
|
@ -97,10 +128,16 @@ def test_export_tar_archive_calls_borg_with_lock_wait_parameters():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={'lock_wait': '5'},
|
storage_config={'lock_wait': '5'},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_with_log_info_calls_borg_with_info_parameter():
|
def test_export_tar_archive_with_log_info_calls_borg_with_info_parameter():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'export-tar', '--info', 'repo::archive', 'test.tar'))
|
insert_execute_command_mock(('borg', 'export-tar', '--info', 'repo::archive', 'test.tar'))
|
||||||
insert_logging_mock(logging.INFO)
|
insert_logging_mock(logging.INFO)
|
||||||
|
@ -112,10 +149,16 @@ def test_export_tar_archive_with_log_info_calls_borg_with_info_parameter():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_with_log_debug_calls_borg_with_debug_parameters():
|
def test_export_tar_archive_with_log_debug_calls_borg_with_debug_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'export-tar', '--debug', '--show-rc', 'repo::archive', 'test.tar')
|
('borg', 'export-tar', '--debug', '--show-rc', 'repo::archive', 'test.tar')
|
||||||
|
@ -129,10 +172,16 @@ def test_export_tar_archive_with_log_debug_calls_borg_with_debug_parameters():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_calls_borg_with_dry_run_parameter():
|
def test_export_tar_archive_calls_borg_with_dry_run_parameter():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
flexmock(module).should_receive('execute_command').never()
|
flexmock(module).should_receive('execute_command').never()
|
||||||
|
|
||||||
|
@ -143,10 +192,16 @@ def test_export_tar_archive_calls_borg_with_dry_run_parameter():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_calls_borg_with_tar_filter_parameters():
|
def test_export_tar_archive_calls_borg_with_tar_filter_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'export-tar', '--tar-filter', 'bzip2', 'repo::archive', 'test.tar')
|
('borg', 'export-tar', '--tar-filter', 'bzip2', 'repo::archive', 'test.tar')
|
||||||
|
@ -159,15 +214,21 @@ def test_export_tar_archive_calls_borg_with_tar_filter_parameters():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
tar_filter='bzip2',
|
tar_filter='bzip2',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_calls_borg_with_list_parameter():
|
def test_export_tar_archive_calls_borg_with_list_parameter():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'export-tar', '--list', 'repo::archive', 'test.tar'),
|
('borg', 'export-tar', '--list', 'repo::archive', 'test.tar'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=logging.ANSWER,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.export_tar_archive(
|
module.export_tar_archive(
|
||||||
|
@ -177,11 +238,17 @@ def test_export_tar_archive_calls_borg_with_list_parameter():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
files=True,
|
local_borg_version='1.2.3',
|
||||||
|
list_files=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_calls_borg_with_strip_components_parameter():
|
def test_export_tar_archive_calls_borg_with_strip_components_parameter():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'export-tar', '--strip-components', '5', 'repo::archive', 'test.tar')
|
('borg', 'export-tar', '--strip-components', '5', 'repo::archive', 'test.tar')
|
||||||
|
@ -194,11 +261,17 @@ def test_export_tar_archive_calls_borg_with_strip_components_parameter():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
strip_components=5,
|
strip_components=5,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_skips_abspath_for_remote_repository_parameter():
|
def test_export_tar_archive_skips_abspath_for_remote_repository_parameter():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('server:repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').never()
|
flexmock(module.os.path).should_receive('abspath').never()
|
||||||
insert_execute_command_mock(('borg', 'export-tar', 'server:repo::archive', 'test.tar'))
|
insert_execute_command_mock(('borg', 'export-tar', 'server:repo::archive', 'test.tar'))
|
||||||
|
|
||||||
|
@ -209,10 +282,16 @@ def test_export_tar_archive_skips_abspath_for_remote_repository_parameter():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='test.tar',
|
destination_path='test.tar',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_export_tar_archive_calls_borg_with_stdout_destination_path():
|
def test_export_tar_archive_calls_borg_with_stdout_destination_path():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'export-tar', 'repo::archive', '-'), capture=False)
|
insert_execute_command_mock(('borg', 'export-tar', 'repo::archive', '-'), capture=False)
|
||||||
|
|
||||||
|
@ -223,4 +302,5 @@ def test_export_tar_archive_calls_borg_with_stdout_destination_path():
|
||||||
paths=None,
|
paths=None,
|
||||||
destination_path='-',
|
destination_path='-',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
|
|
@ -9,96 +9,116 @@ from ..test_verbosity import insert_logging_mock
|
||||||
|
|
||||||
|
|
||||||
def insert_execute_command_mock(command, working_directory=None):
|
def insert_execute_command_mock(command, working_directory=None):
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
command, working_directory=working_directory
|
command, working_directory=working_directory, extra_environment=None,
|
||||||
).once()
|
).once()
|
||||||
|
|
||||||
|
|
||||||
def insert_execute_command_output_mock(command, result):
|
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
|
||||||
command, output_log_level=None, borg_local_path=command[0]
|
|
||||||
).and_return(result).once()
|
|
||||||
|
|
||||||
|
|
||||||
def test_extract_last_archive_dry_run_calls_borg_with_last_archive():
|
def test_extract_last_archive_dry_run_calls_borg_with_last_archive():
|
||||||
insert_execute_command_output_mock(
|
flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive')
|
||||||
('borg', 'list', '--short', 'repo'), result='archive1\narchive2\n'
|
insert_execute_command_mock(('borg', 'extract', '--dry-run', 'repo::archive'))
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
)
|
)
|
||||||
insert_execute_command_mock(('borg', 'extract', '--dry-run', 'repo::archive2'))
|
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
|
||||||
|
|
||||||
module.extract_last_archive_dry_run(repository='repo', lock_wait=None)
|
module.extract_last_archive_dry_run(
|
||||||
|
storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_last_archive_dry_run_without_any_archives_should_not_raise():
|
def test_extract_last_archive_dry_run_without_any_archives_should_not_raise():
|
||||||
insert_execute_command_output_mock(('borg', 'list', '--short', 'repo'), result='\n')
|
flexmock(module.rlist).should_receive('resolve_archive_name').and_raise(ValueError)
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(('repo',))
|
||||||
|
|
||||||
module.extract_last_archive_dry_run(repository='repo', lock_wait=None)
|
module.extract_last_archive_dry_run(
|
||||||
|
storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_last_archive_dry_run_with_log_info_calls_borg_with_info_parameter():
|
def test_extract_last_archive_dry_run_with_log_info_calls_borg_with_info_parameter():
|
||||||
insert_execute_command_output_mock(
|
flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive')
|
||||||
('borg', 'list', '--short', '--info', 'repo'), result='archive1\narchive2\n'
|
insert_execute_command_mock(('borg', 'extract', '--dry-run', '--info', 'repo::archive'))
|
||||||
)
|
|
||||||
insert_execute_command_mock(('borg', 'extract', '--dry-run', '--info', 'repo::archive2'))
|
|
||||||
insert_logging_mock(logging.INFO)
|
insert_logging_mock(logging.INFO)
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_last_archive_dry_run(repository='repo', lock_wait=None)
|
module.extract_last_archive_dry_run(
|
||||||
|
storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_last_archive_dry_run_with_log_debug_calls_borg_with_debug_parameter():
|
def test_extract_last_archive_dry_run_with_log_debug_calls_borg_with_debug_parameter():
|
||||||
insert_execute_command_output_mock(
|
flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive')
|
||||||
('borg', 'list', '--short', '--debug', '--show-rc', 'repo'), result='archive1\narchive2\n'
|
|
||||||
)
|
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'extract', '--dry-run', '--debug', '--show-rc', '--list', 'repo::archive2')
|
('borg', 'extract', '--dry-run', '--debug', '--show-rc', '--list', 'repo::archive')
|
||||||
)
|
)
|
||||||
insert_logging_mock(logging.DEBUG)
|
insert_logging_mock(logging.DEBUG)
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_last_archive_dry_run(repository='repo', lock_wait=None)
|
module.extract_last_archive_dry_run(
|
||||||
|
storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_last_archive_dry_run_calls_borg_via_local_path():
|
def test_extract_last_archive_dry_run_calls_borg_via_local_path():
|
||||||
insert_execute_command_output_mock(
|
flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive')
|
||||||
('borg1', 'list', '--short', 'repo'), result='archive1\narchive2\n'
|
insert_execute_command_mock(('borg1', 'extract', '--dry-run', 'repo::archive'))
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
)
|
)
|
||||||
insert_execute_command_mock(('borg1', 'extract', '--dry-run', 'repo::archive2'))
|
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
|
||||||
|
|
||||||
module.extract_last_archive_dry_run(repository='repo', lock_wait=None, local_path='borg1')
|
module.extract_last_archive_dry_run(
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
repository='repo',
|
||||||
|
lock_wait=None,
|
||||||
|
local_path='borg1',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_last_archive_dry_run_calls_borg_with_remote_path_parameters():
|
def test_extract_last_archive_dry_run_calls_borg_with_remote_path_parameters():
|
||||||
insert_execute_command_output_mock(
|
flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive')
|
||||||
('borg', 'list', '--short', '--remote-path', 'borg1', 'repo'), result='archive1\narchive2\n'
|
|
||||||
)
|
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'extract', '--dry-run', '--remote-path', 'borg1', 'repo::archive2')
|
('borg', 'extract', '--dry-run', '--remote-path', 'borg1', 'repo::archive')
|
||||||
|
)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
)
|
)
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
|
||||||
|
|
||||||
module.extract_last_archive_dry_run(repository='repo', lock_wait=None, remote_path='borg1')
|
module.extract_last_archive_dry_run(
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='1.2.3',
|
||||||
|
repository='repo',
|
||||||
|
lock_wait=None,
|
||||||
|
remote_path='borg1',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_last_archive_dry_run_calls_borg_with_lock_wait_parameters():
|
def test_extract_last_archive_dry_run_calls_borg_with_lock_wait_parameters():
|
||||||
insert_execute_command_output_mock(
|
flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive')
|
||||||
('borg', 'list', '--short', '--lock-wait', '5', 'repo'), result='archive1\narchive2\n'
|
|
||||||
)
|
|
||||||
insert_execute_command_mock(
|
insert_execute_command_mock(
|
||||||
('borg', 'extract', '--dry-run', '--lock-wait', '5', 'repo::archive2')
|
('borg', 'extract', '--dry-run', '--lock-wait', '5', 'repo::archive')
|
||||||
|
)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
)
|
)
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
|
||||||
|
|
||||||
module.extract_last_archive_dry_run(repository='repo', lock_wait=5)
|
module.extract_last_archive_dry_run(
|
||||||
|
storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=5
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_extract_archive_calls_borg_with_path_parameters():
|
def test_extract_archive_calls_borg_with_path_parameters():
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', 'repo::archive', 'path1', 'path2'))
|
insert_execute_command_mock(('borg', 'extract', 'repo::archive', 'path1', 'path2'))
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
|
@ -115,6 +135,9 @@ def test_extract_archive_calls_borg_with_remote_path_parameters():
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--remote-path', 'borg1', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--remote-path', 'borg1', 'repo::archive'))
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
|
@ -135,13 +158,16 @@ def test_extract_archive_calls_borg_with_numeric_ids_parameter(feature_available
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', option_flag, 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', option_flag, 'repo::archive'))
|
||||||
flexmock(module.feature).should_receive('available').and_return(feature_available)
|
flexmock(module.feature).should_receive('available').and_return(feature_available)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
repository='repo',
|
repository='repo',
|
||||||
archive='archive',
|
archive='archive',
|
||||||
paths=None,
|
paths=None,
|
||||||
location_config={'numeric_owner': True},
|
location_config={'numeric_ids': True},
|
||||||
storage_config={},
|
storage_config={},
|
||||||
local_borg_version='1.2.3',
|
local_borg_version='1.2.3',
|
||||||
)
|
)
|
||||||
|
@ -151,6 +177,9 @@ def test_extract_archive_calls_borg_with_umask_parameters():
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--umask', '0770', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--umask', '0770', 'repo::archive'))
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
|
@ -167,6 +196,9 @@ def test_extract_archive_calls_borg_with_lock_wait_parameters():
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--lock-wait', '5', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--lock-wait', '5', 'repo::archive'))
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
|
@ -184,6 +216,9 @@ def test_extract_archive_with_log_info_calls_borg_with_info_parameter():
|
||||||
insert_execute_command_mock(('borg', 'extract', '--info', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--info', 'repo::archive'))
|
||||||
insert_logging_mock(logging.INFO)
|
insert_logging_mock(logging.INFO)
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
|
@ -203,6 +238,9 @@ def test_extract_archive_with_log_debug_calls_borg_with_debug_parameters():
|
||||||
)
|
)
|
||||||
insert_logging_mock(logging.DEBUG)
|
insert_logging_mock(logging.DEBUG)
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
|
@ -219,6 +257,9 @@ def test_extract_archive_calls_borg_with_dry_run_parameter():
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--dry-run', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--dry-run', 'repo::archive'))
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=True,
|
dry_run=True,
|
||||||
|
@ -235,6 +276,9 @@ def test_extract_archive_calls_borg_with_destination_path():
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', 'repo::archive'), working_directory='/dest')
|
insert_execute_command_mock(('borg', 'extract', 'repo::archive'), working_directory='/dest')
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
|
@ -252,6 +296,9 @@ def test_extract_archive_calls_borg_with_strip_components():
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
insert_execute_command_mock(('borg', 'extract', '--strip-components', '5', 'repo::archive'))
|
insert_execute_command_mock(('borg', 'extract', '--strip-components', '5', 'repo::archive'))
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
|
@ -267,12 +314,17 @@ def test_extract_archive_calls_borg_with_strip_components():
|
||||||
|
|
||||||
def test_extract_archive_calls_borg_with_progress_parameter():
|
def test_extract_archive_calls_borg_with_progress_parameter():
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'extract', '--progress', 'repo::archive'),
|
('borg', 'extract', '--progress', 'repo::archive'),
|
||||||
output_file=module.DO_NOT_CAPTURE,
|
output_file=module.DO_NOT_CAPTURE,
|
||||||
working_directory=None,
|
working_directory=None,
|
||||||
|
extra_environment=None,
|
||||||
).once()
|
).once()
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
|
@ -306,13 +358,18 @@ def test_extract_archive_with_progress_and_extract_to_stdout_raises():
|
||||||
def test_extract_archive_calls_borg_with_stdout_parameter_and_returns_process():
|
def test_extract_archive_calls_borg_with_stdout_parameter_and_returns_process():
|
||||||
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
flexmock(module.os.path).should_receive('abspath').and_return('repo')
|
||||||
process = flexmock()
|
process = flexmock()
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'extract', '--stdout', 'repo::archive'),
|
('borg', 'extract', '--stdout', 'repo::archive'),
|
||||||
output_file=module.subprocess.PIPE,
|
output_file=module.subprocess.PIPE,
|
||||||
working_directory=None,
|
working_directory=None,
|
||||||
run_to_completion=False,
|
run_to_completion=False,
|
||||||
|
extra_environment=None,
|
||||||
).and_return(process).once()
|
).and_return(process).once()
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
|
@ -331,10 +388,14 @@ def test_extract_archive_calls_borg_with_stdout_parameter_and_returns_process():
|
||||||
|
|
||||||
def test_extract_archive_skips_abspath_for_remote_repository():
|
def test_extract_archive_skips_abspath_for_remote_repository():
|
||||||
flexmock(module.os.path).should_receive('abspath').never()
|
flexmock(module.os.path).should_receive('abspath').never()
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'extract', 'server:repo::archive'), working_directory=None
|
('borg', 'extract', 'server:repo::archive'), working_directory=None, extra_environment=None,
|
||||||
).once()
|
).once()
|
||||||
flexmock(module.feature).should_receive('available').and_return(True)
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
|
||||||
|
('server:repo::archive',)
|
||||||
|
)
|
||||||
|
|
||||||
module.extract_archive(
|
module.extract_archive(
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
|
|
|
@ -45,3 +45,34 @@ def test_make_flags_from_arguments_omits_excludes():
|
||||||
arguments = flexmock(foo='bar', baz='quux')
|
arguments = flexmock(foo='bar', baz='quux')
|
||||||
|
|
||||||
assert module.make_flags_from_arguments(arguments, excludes=('baz', 'other')) == ('foo', 'bar')
|
assert module.make_flags_from_arguments(arguments, excludes=('baz', 'other')) == ('foo', 'bar')
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_repository_flags_with_borg_features_includes_repo_flag():
|
||||||
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
assert module.make_repository_flags(repository='repo', local_borg_version='1.2.3') == (
|
||||||
|
'--repo',
|
||||||
|
'repo',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_repository_flags_without_borg_features_includes_omits_flag():
|
||||||
|
flexmock(module.feature).should_receive('available').and_return(False)
|
||||||
|
|
||||||
|
assert module.make_repository_flags(repository='repo', local_borg_version='1.2.3') == ('repo',)
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_repository_archive_flags_with_borg_features_separates_repository_and_archive():
|
||||||
|
flexmock(module.feature).should_receive('available').and_return(True)
|
||||||
|
|
||||||
|
assert module.make_repository_archive_flags(
|
||||||
|
repository='repo', archive='archive', local_borg_version='1.2.3'
|
||||||
|
) == ('--repo', 'repo', 'archive',)
|
||||||
|
|
||||||
|
|
||||||
|
def test_make_repository_archive_flags_with_borg_features_joins_repository_and_archive():
|
||||||
|
flexmock(module.feature).should_receive('available').and_return(False)
|
||||||
|
|
||||||
|
assert module.make_repository_archive_flags(
|
||||||
|
repository='repo', archive='archive', local_borg_version='1.2.3'
|
||||||
|
) == ('repo::archive',)
|
||||||
|
|
|
@ -9,139 +9,283 @@ from ..test_verbosity import insert_logging_mock
|
||||||
|
|
||||||
|
|
||||||
def test_display_archives_info_calls_borg_with_parameters():
|
def test_display_archives_info_calls_borg_with_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'info', 'repo'), output_log_level=logging.WARNING, borg_local_path='borg'
|
('borg', 'info', '--repo', 'repo'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.display_archives_info(
|
module.display_archives_info(
|
||||||
repository='repo', storage_config={}, info_arguments=flexmock(archive=None, json=False)
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=False, prefix=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_display_archives_info_with_log_info_calls_borg_with_info_parameter():
|
def test_display_archives_info_with_log_info_calls_borg_with_info_parameter():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'info', '--info', 'repo'), output_log_level=logging.WARNING, borg_local_path='borg'
|
('borg', 'info', '--info', '--repo', 'repo'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
insert_logging_mock(logging.INFO)
|
insert_logging_mock(logging.INFO)
|
||||||
module.display_archives_info(
|
module.display_archives_info(
|
||||||
repository='repo', storage_config={}, info_arguments=flexmock(archive=None, json=False)
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=False, prefix=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_display_archives_info_with_log_info_and_json_suppresses_most_borg_output():
|
def test_display_archives_info_with_log_info_and_json_suppresses_most_borg_output():
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
('borg', 'info', '--json', 'repo'), output_log_level=None, borg_local_path='borg'
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',))
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command_and_capture_output').with_args(
|
||||||
|
('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None,
|
||||||
).and_return('[]')
|
).and_return('[]')
|
||||||
|
|
||||||
insert_logging_mock(logging.INFO)
|
insert_logging_mock(logging.INFO)
|
||||||
json_output = module.display_archives_info(
|
json_output = module.display_archives_info(
|
||||||
repository='repo', storage_config={}, info_arguments=flexmock(archive=None, json=True)
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=True, prefix=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
assert json_output == '[]'
|
assert json_output == '[]'
|
||||||
|
|
||||||
|
|
||||||
def test_display_archives_info_with_log_debug_calls_borg_with_debug_parameter():
|
def test_display_archives_info_with_log_debug_calls_borg_with_debug_parameter():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'info', '--debug', '--show-rc', 'repo'),
|
('borg', 'info', '--debug', '--show-rc', '--repo', 'repo'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
borg_local_path='borg',
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
insert_logging_mock(logging.DEBUG)
|
insert_logging_mock(logging.DEBUG)
|
||||||
|
|
||||||
module.display_archives_info(
|
module.display_archives_info(
|
||||||
repository='repo', storage_config={}, info_arguments=flexmock(archive=None, json=False)
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=False, prefix=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_display_archives_info_with_log_debug_and_json_suppresses_most_borg_output():
|
def test_display_archives_info_with_log_debug_and_json_suppresses_most_borg_output():
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
('borg', 'info', '--json', 'repo'), output_log_level=None, borg_local_path='borg'
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',))
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command_and_capture_output').with_args(
|
||||||
|
('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None,
|
||||||
).and_return('[]')
|
).and_return('[]')
|
||||||
|
|
||||||
insert_logging_mock(logging.DEBUG)
|
insert_logging_mock(logging.DEBUG)
|
||||||
json_output = module.display_archives_info(
|
json_output = module.display_archives_info(
|
||||||
repository='repo', storage_config={}, info_arguments=flexmock(archive=None, json=True)
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=True, prefix=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
assert json_output == '[]'
|
assert json_output == '[]'
|
||||||
|
|
||||||
|
|
||||||
def test_display_archives_info_with_json_calls_borg_with_json_parameter():
|
def test_display_archives_info_with_json_calls_borg_with_json_parameter():
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
('borg', 'info', '--json', 'repo'), output_log_level=None, borg_local_path='borg'
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',))
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command_and_capture_output').with_args(
|
||||||
|
('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None,
|
||||||
).and_return('[]')
|
).and_return('[]')
|
||||||
|
|
||||||
json_output = module.display_archives_info(
|
json_output = module.display_archives_info(
|
||||||
repository='repo', storage_config={}, info_arguments=flexmock(archive=None, json=True)
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=True, prefix=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
assert json_output == '[]'
|
assert json_output == '[]'
|
||||||
|
|
||||||
|
|
||||||
def test_display_archives_info_with_archive_calls_borg_with_archive_parameter():
|
def test_display_archives_info_with_archive_calls_borg_with_match_archives_parameter():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags').with_args(
|
||||||
|
'match-archives', 'archive'
|
||||||
|
).and_return(('--match-archives', 'archive'))
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'info', 'repo::archive'), output_log_level=logging.WARNING, borg_local_path='borg'
|
('borg', 'info', '--repo', 'repo', '--match-archives', 'archive'),
|
||||||
)
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
module.display_archives_info(
|
extra_environment=None,
|
||||||
repository='repo', storage_config={}, info_arguments=flexmock(archive='archive', json=False)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_display_archives_info_with_local_path_calls_borg_via_local_path():
|
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
|
||||||
('borg1', 'info', 'repo'), output_log_level=logging.WARNING, borg_local_path='borg1'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
module.display_archives_info(
|
module.display_archives_info(
|
||||||
repository='repo',
|
repository='repo',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
info_arguments=flexmock(archive=None, json=False),
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive='archive', json=False, prefix=None),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_display_archives_info_with_local_path_calls_borg_via_local_path():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
('borg1', 'info', '--repo', 'repo'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg1',
|
||||||
|
extra_environment=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
module.display_archives_info(
|
||||||
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=False, prefix=None),
|
||||||
local_path='borg1',
|
local_path='borg1',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_display_archives_info_with_remote_path_calls_borg_with_remote_path_parameters():
|
def test_display_archives_info_with_remote_path_calls_borg_with_remote_path_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags').with_args(
|
||||||
|
'remote-path', 'borg1'
|
||||||
|
).and_return(('--remote-path', 'borg1'))
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'info', '--remote-path', 'borg1', 'repo'),
|
('borg', 'info', '--remote-path', 'borg1', '--repo', 'repo'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
borg_local_path='borg',
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.display_archives_info(
|
module.display_archives_info(
|
||||||
repository='repo',
|
repository='repo',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
info_arguments=flexmock(archive=None, json=False),
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=False, prefix=None),
|
||||||
remote_path='borg1',
|
remote_path='borg1',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_display_archives_info_with_lock_wait_calls_borg_with_lock_wait_parameters():
|
def test_display_archives_info_with_lock_wait_calls_borg_with_lock_wait_parameters():
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags').with_args('lock-wait', 5).and_return(
|
||||||
|
('--lock-wait', '5')
|
||||||
|
)
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
storage_config = {'lock_wait': 5}
|
storage_config = {'lock_wait': 5}
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'info', '--lock-wait', '5', 'repo'),
|
('borg', 'info', '--lock-wait', '5', '--repo', 'repo'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
borg_local_path='borg',
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.display_archives_info(
|
module.display_archives_info(
|
||||||
repository='repo',
|
repository='repo',
|
||||||
storage_config=storage_config,
|
storage_config=storage_config,
|
||||||
info_arguments=flexmock(archive=None, json=False),
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=False, prefix=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('argument_name', ('prefix', 'glob_archives', 'sort_by', 'first', 'last'))
|
def test_display_archives_info_with_prefix_calls_borg_with_match_archives_parameters():
|
||||||
def test_display_archives_info_passes_through_arguments_to_borg(argument_name):
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags').with_args(
|
||||||
|
'match-archives', 'sh:foo*'
|
||||||
|
).and_return(('--match-archives', 'sh:foo*'))
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
flexmock(module).should_receive('execute_command').with_args(
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
('borg', 'info', '--' + argument_name.replace('_', '-'), 'value', 'repo'),
|
('borg', 'info', '--match-archives', 'sh:foo*', '--repo', 'repo'),
|
||||||
output_log_level=logging.WARNING,
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
borg_local_path='borg',
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
module.display_archives_info(
|
module.display_archives_info(
|
||||||
repository='repo',
|
repository='repo',
|
||||||
storage_config={},
|
storage_config={},
|
||||||
info_arguments=flexmock(archive=None, json=False, **{argument_name: 'value'}),
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=False, prefix='foo'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('argument_name', ('match_archives', 'sort_by', 'first', 'last'))
|
||||||
|
def test_display_archives_info_passes_through_arguments_to_borg(argument_name):
|
||||||
|
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||||
|
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||||
|
flag_name = f"--{argument_name.replace('_', ' ')}"
|
||||||
|
flexmock(module.flags).should_receive('make_flags').and_return(())
|
||||||
|
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(
|
||||||
|
(flag_name, 'value')
|
||||||
|
)
|
||||||
|
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
|
||||||
|
flexmock(module.environment).should_receive('make_environment')
|
||||||
|
flexmock(module).should_receive('execute_command').with_args(
|
||||||
|
('borg', 'info', flag_name, 'value', '--repo', 'repo'),
|
||||||
|
output_log_level=module.borgmatic.logger.ANSWER,
|
||||||
|
borg_local_path='borg',
|
||||||
|
extra_environment=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
module.display_archives_info(
|
||||||
|
repository='repo',
|
||||||
|
storage_config={},
|
||||||
|
local_borg_version='2.3.4',
|
||||||
|
info_arguments=flexmock(archive=None, json=False, prefix=None, **{argument_name: 'value'}),
|
||||||
)
|
)
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue