Compare commits

...

158 Commits
master ... dev

Author SHA1 Message Date
Paul Hoffmann 62d7835018 Add ntfy warning 2023-03-31 11:47:54 +02:00
Dan Helfman caba5f8291 Update schema comment for check_repositories to mention labels (#635). 2023-03-31 11:46:38 +02:00
Dan Helfman fcdd839278 Add missing Docker Compose depends. 2023-03-31 11:46:38 +02:00
Dan Helfman daa0daab1b Remove unnecessary cd in dev documentation. 2023-03-31 11:46:38 +02:00
Dan Helfman 458b868371 Fix Drone intentation. 2023-03-31 11:46:38 +02:00
Dan Helfman e105799296 Error out if run-full-tests is run not inside a test container. 2023-03-31 11:46:38 +02:00
Dan Helfman be1e483e66 Add check_repositories regression fix to NEWS (#662). 2023-03-31 11:46:38 +02:00
Divyansh Singh ce040a1a2b add and update tests 2023-03-31 11:46:38 +02:00
Divyansh Singh fb92e1aec4 reformat 2023-03-31 11:46:38 +02:00
Divyansh Singh 6de48fa5d8 fix: make check repositories work with dict and str repositories 2023-03-31 11:46:38 +02:00
Dan Helfman 85c62625c7 Update script comment. 2023-03-31 11:46:38 +02:00
Dan Helfman 174ebcc99c Update OpenBSD borgmatic link. 2023-03-31 11:46:38 +02:00
Dan Helfman 3553cb0500 I had one job... (#461). 2023-03-31 11:46:38 +02:00
Dan Helfman f6266160f9 Document that most command-line flags are not config-file-able (#461). 2023-03-31 11:46:38 +02:00
Dan Helfman eeb3cafe71 Fix multiple repositories example. 2023-03-31 11:46:38 +02:00
Dan Helfman 11798edc15 Mention prior versions of borgmatic in repositories schema. 2023-03-31 11:46:38 +02:00
Dan Helfman cbc6957a11 Update docs with a few more "path:" repositories references (#635). 2023-03-31 11:46:38 +02:00
Dan Helfman f8a81419c4 Fix repository schema description. 2023-03-31 11:46:38 +02:00
Dan Helfman 7adddb5a12 Bump version for release. 2023-03-31 11:46:38 +02:00
Dan Helfman 84a0ab3d18 Add optional repository labels so you can select a repository via "--repository yourlabel" at the command-line (#635). 2023-03-31 11:46:38 +02:00
Divyansh Singh 5711151ad7 add feature to docs 2023-03-31 11:46:38 +02:00
Divyansh Singh c53455c49d add tests for repo labels 2023-03-31 11:46:38 +02:00
Divyansh Singh 698468e322 rename repository arg to repository_path in all borg actions 2023-03-31 11:46:38 +02:00
Divyansh Singh 71296cfa0d check all tests 2023-03-31 11:46:38 +02:00
Divyansh Singh 6be334d388 remove optional label for repos from tests 2023-03-31 11:46:38 +02:00
Divyansh Singh 0aa554cfa5 reformat 2023-03-31 11:46:36 +02:00
Divyansh Singh 74abc19df6 pass all tests 2023-03-31 11:45:34 +02:00
Divyansh Singh 4c8600aa5d use repository["path"] instead of repository 2023-03-31 11:45:02 +02:00
Divyansh Singh 31b552df68 reformat base 2023-03-31 11:37:04 +02:00
Divyansh Singh f2b2b3c984 feat: tag repos 2023-03-31 11:37:02 +02:00
Dan Helfman 1d79fa3910 When a database command errors, display and log the error message instead of swallowing it (#396). 2023-03-31 11:36:15 +02:00
Dan Helfman c0ab5ae422 Removing debugging command output. 2023-03-31 11:36:15 +02:00
Dan Helfman b1c6217047 Add troubleshooting documentation on PostgreSQL/MySQL authentication errors. 2023-03-31 11:36:15 +02:00
Dan Helfman bc662720df Add additional documentation link to environment variable feature. Rename constants section. 2023-03-31 11:36:15 +02:00
Dan Helfman 1f7f31098d Add documentation and NEWS for custom constants feature (#612). 2023-03-31 11:36:15 +02:00
Divyansh Singh df85d3e8d1 fix: replace primitive values in config without quotes 2023-03-31 11:36:15 +02:00
Divyansh Singh b48710b544 reformat 2023-03-31 11:36:15 +02:00
Divyansh Singh d5ad78d9af add test for complex constant 2023-03-31 11:36:15 +02:00
Divyansh Singh 0ae811c25d feat: constants support 2023-03-31 11:36:15 +02:00
Dan Helfman 4582cf5244 Hide obnoxious ruamel.yaml warnings during test runs. 2023-03-31 11:36:15 +02:00
Dan Helfman 8edd5db1cb Rename scripts/run-full-dev-tests to scripts/run-end-to-end-dev-tests and make it run end-to-end tests only. 2023-03-31 11:36:15 +02:00
Divyansh Singh 9e037af926 fix: docs cli reference create spelling 2023-03-31 11:36:15 +02:00
Dan Helfman 646370e675 Add documentation on backing up a database running in a container (#649). 2023-03-31 11:36:15 +02:00
Dan Helfman 7d1b8ed393 Add code style plugins to enforce use of Python f-strings and prevent single-letter variables. 2023-03-31 11:36:15 +02:00
Divyansh Singh 87bdc68fcd end with newline 2023-03-31 11:36:15 +02:00
Divyansh Singh 0940e9511d fix: remove extra links from docs css 2023-03-31 11:36:15 +02:00
Divyansh Singh 80091230d6 docs: copy to clipboard support 2023-03-31 11:36:15 +02:00
Dan Helfman 81c81792f7 Add missing source directory error fix to NEWS (#655). 2023-03-31 11:36:15 +02:00
Divyansh Singh a0810671bd review 2023-03-31 11:36:15 +02:00
Divyansh Singh c1a15d4cb4 remove extra parameter from function call 2023-03-31 11:36:15 +02:00
Divyansh Singh 9bff81ed1f use exit_code_indicates_error and modify it to accept a command 2023-03-31 11:36:13 +02:00
Divyansh Singh 419678e0a3 fix: no error on database backups without source dirs 2023-03-31 11:35:34 +02:00
Dan Helfman 9af87e7035 Add confusing error message fix to NEWS (#623). 2023-03-31 11:35:34 +02:00
Divyansh Singh baec8acbf8 fix: rephrase error when running from config
to avoid confusion, as the user might think the problem is with their config file
2023-03-31 11:35:34 +02:00
Dan Helfman a241b8a38b Remove unused module and outdated test expectations (#576). 2023-03-31 11:35:34 +02:00
Dan Helfman 72effb99b1 Add "file://" paths to NEWS (#576). 2023-03-31 11:35:34 +02:00
Divyansh Singh f4b3681013 add tests and remove magic number 2023-03-31 11:35:34 +02:00
Dan Helfman 300d9b9dc4 Add NixOS package link to installation docs. 2023-03-31 11:35:34 +02:00
Dan Helfman 52a64f6a2c Add "source_directories_must_exist" option to NEWS (#501). 2023-03-31 11:35:34 +02:00
Divyansh Singh 91a916967c reformat 2023-03-31 11:35:34 +02:00
Divyansh Singh 58798e0592 add tests 2023-03-31 11:35:34 +02:00
Divyansh Singh d0dce1c362 feat: add optional check for existence of source directories 2023-03-31 11:35:34 +02:00
Dan Helfman ed15ea932b Add "borg_files_cache_ttl" option to NEWS. 2023-03-31 11:35:34 +02:00
Soumik Dutta e1a182418a add test to ensure integers are converted to string
before setting them up to be environment variable values

Signed-off-by: Soumik Dutta <shalearkane@gmail.com>
2023-03-31 11:35:34 +02:00
Soumik Dutta 26c83d1f62 add option to set borg_files_cache_ttl in config
Signed-off-by: Soumik Dutta <shalearkane@gmail.com>
2023-03-31 11:35:34 +02:00
Dan Helfman 0782ca6aef Clarify check frequency default behavior (#653). 2023-03-31 11:35:34 +02:00
Dan Helfman f0e92591cc Bump version for release. 2023-03-31 11:35:34 +02:00
Dan Helfman 0ada19b444 Add support for Python 3.11. 2023-03-31 11:35:34 +02:00
Dan Helfman 7129e20141 Add "--repository" flag to all actions where it makes sense (#564). 2023-03-31 11:35:34 +02:00
Nain 44f9e2d723 Update tests
Make them more explicit. Also formatting.
2023-03-31 11:35:34 +02:00
Nain 5c89ad6eda Add "--repository" flag to the "create" action 2023-03-31 11:35:34 +02:00
Nain a0dbe93554 Add "--repository" flag to the "compact" action 2023-03-31 11:35:34 +02:00
Nain 5af6d784bd Add "--repository" flag to the "check" action 2023-03-31 11:35:34 +02:00
Nain 9ded51b78a Merge mostly repetetive tests 2023-03-31 11:35:34 +02:00
Nain 667a40cb0b Remove test now that --repository isn't expected to error
As discussed #652#issuecomment-5579
2023-03-31 11:35:34 +02:00
Nain c804793453 Add "--repository" flag to the "prune" action
part of ticket #564
2023-03-31 11:35:34 +02:00
Nain ef7b4c33ce Better indicate position of additional docs on page
On wide screens, the position of the documentation (how-to and reference guide)
is at same level as #it's-your-data.-keep-it-that-way.

So the jump due to anchor link makes it seem like we're taken to top aka
main content. Indicate that links are to the left so reader doesn't recurse.
2023-03-31 11:35:34 +02:00
Nain dc2b1ff8d2 Fix --editable (mode) option given --user as arg
--user option should be before, or after `--editable .` not in between.
Before seems better.
2023-03-31 11:35:34 +02:00
Dan Helfman 9f1d183712 Add "--strip-components all" on the "extract" action to remove leading path components (#647). 2023-03-31 11:35:34 +02:00
Dan Helfman 6bf858aae2 Document how to list database dumps in an archive. 2023-03-31 11:35:33 +02:00
Jelle @ Samson-IT 4386e66f65 reworded and added 'all' caveat 2023-03-31 11:35:33 +02:00
Jelle @ Samson-IT 1cf4a91b0a Added some info about fetching mysql database size 2023-03-31 11:35:33 +02:00
Dan Helfman 9fa57b3190 Change the default action order to: "create", "prune", "compact", "check" (#304). 2023-03-31 11:35:33 +02:00
Dan Helfman 15ca234349 Run any command-line actions in the order specified instead of using a fixed ordering (#304). 2023-03-31 11:35:33 +02:00
Dan Helfman fc9bf03409 Add Healthchecks "log" state feature to NEWS. 2023-03-31 11:35:33 +02:00
Soumik Dutta e66afc8d8a fix tests
Signed-off-by: Soumik Dutta <shalearkane@gmail.com>
2023-03-31 11:35:33 +02:00
Soumik Dutta 8cac207fc6 added tests to make sure unsupported log states are detected
Signed-off-by: Soumik Dutta <shalearkane@gmail.com>
2023-03-31 11:35:33 +02:00
Soumik Dutta e4fe56f526 return early if unsupported state is passed
Signed-off-by: Soumik Dutta <shalearkane@gmail.com>
2023-03-31 11:35:33 +02:00
Soumik Dutta e3c46d0084 fix logs_monitor_start_error()
Signed-off-by: Soumik Dutta <shalearkane@gmail.com>
2023-03-31 11:35:33 +02:00
Soumik Dutta 4ef65a5411 update test_borgmatic.py
Signed-off-by: Soumik Dutta <shalearkane@gmail.com>
2023-03-31 11:35:32 +02:00
Soumik Dutta 850021b749 add test for healthchecks
Signed-off-by: Soumik Dutta <shalearkane@gmail.com>
2023-03-31 11:35:29 +02:00
Soumik Dutta 49564585dd update schema.yaml description
also add monitor.State.LOG to cronitor.

Signed-off-by: Soumik Dutta <shalearkane@gmail.com>
2023-03-31 11:34:42 +02:00
Soumik Dutta 01b4d7c27e Add support for healthchecks "log" feature #628
Signed-off-by: Soumik Dutta <shalearkane@gmail.com>
2023-03-31 11:34:39 +02:00
Dan Helfman dbb39d0b10 Add flake8-quotes to complain about incorrect quoting so I don't have to! 2023-03-31 11:33:18 +02:00
Dan Helfman cd003f11dd Mention searching for files in the extract a backup guide. 2023-03-31 11:33:18 +02:00
Dan Helfman ba71509bba Update dates in documentation examples. 2023-03-31 11:33:18 +02:00
Divyansh Singh b0a1ba867c send repo directly to extract and export_tar 2023-03-31 11:33:18 +02:00
Divyansh Singh 13c16dd8f7 feat: file:// URLs support 2023-03-31 11:33:18 +02:00
Divyansh Singh fa99471e3e fix: remove extra dark mode styles 2023-03-31 11:33:18 +02:00
Dan Helfman 1e8148575c In the documentation, mention what version of borgmatic introduced SQLite support. 2023-03-31 11:33:18 +02:00
Dan Helfman 6948aa957f Add SQLite feature to NEWS and also integrations. 2023-03-31 11:33:18 +02:00
Divyansh Singh 61f1c9603c remove test path 2023-03-31 11:33:18 +02:00
Divyansh Singh d854d40472 mock os.remove instead of actually removing a file 2023-03-31 11:33:18 +02:00
Divyansh Singh 1804967bdc use os.remove and improve tests 2023-03-31 11:33:18 +02:00
Divyansh Singh 338a4cea29 add sqlite for e2e tests 2023-03-31 11:33:18 +02:00
Divyansh Singh 41b3975198 e2e tests schema update 2023-03-31 11:33:18 +02:00
Divyansh Singh f8f9debee8 code review 2023-03-31 11:33:18 +02:00
Divyansh Singh c80f3c8265 formatting fix 2023-03-31 11:33:18 +02:00
Divyansh Singh 4231405c80 feat: add dump-restore support for sqlite databases 2023-03-31 11:33:17 +02:00
Dan Helfman 15d408d7cf Bump version for release. 2023-03-31 11:33:17 +02:00
Dan Helfman 228f55a1de With the "create" action and the "--list" ("--files") flag, only show excluded files at verbosity 2 (#620). 2023-03-31 11:33:17 +02:00
Dan Helfman 7beb935122 Add "checkpoint_volume" configuration option to creates checkpoints every specified number of bytes. 2023-03-31 11:33:17 +02:00
Dan Helfman 37a32b0943 Fix tests. 2023-03-31 11:33:17 +02:00
Dan Helfman 24331d917c Add "--repository" flag to the "rcreate" action. Add "--progress" flag to the "transfer" action. 2023-03-31 11:33:17 +02:00
Dan Helfman 52889c646f Support status character changes in Borg 2.0.0b5 when filtering out special files that cause Borg to hang. 2023-03-31 11:33:17 +02:00
Dan Helfman 49950c2c77 Fix the "create" action with the "--dry-run" flag querying for databases when a PostgreSQL/MySQL "all" database is configured. 2023-03-31 11:33:17 +02:00
Dan Helfman a66542cce1 Internally support new Borg 2.0.0b5 "--filter" status characters / item flags for the "create" action. 2023-03-31 11:33:17 +02:00
Dan Helfman ee8535f577 With the "create" action, only one of "--list" ("--files") and "--progress" flags can be used. 2023-03-31 11:33:17 +02:00
Dan Helfman 9071b14dcf Update Borg 2.0 documentation links. 2023-03-31 11:33:17 +02:00
Dan Helfman 713ed66337 Add ntfy authentication to NEWS. 2023-03-31 11:33:17 +02:00
Tom Hubrecht 608dbcedc7 Add auth test for the ntfy hook 2023-03-31 11:33:17 +02:00
Tom Hubrecht 67ed746024 Make the auth logic more explicit and warnings if necessary 2023-03-31 11:33:17 +02:00
Tom Hubrecht 3e8236c26e Add authentication to the ntfy hook 2023-03-31 11:33:17 +02:00
Dan Helfman 6bdb1974cf Add MySQL database hook "add_drop_database" configuration option to control whether dumped MySQL databases get dropped right before restore (#642). 2023-03-31 11:33:17 +02:00
Dan Helfman 339ffc9c22 Fix for potential data loss (data not getting backed up) when dumping large "directory" format PostgreSQL/MongoDB databases (#643). 2023-03-31 11:33:17 +02:00
Clemens Lang 16da97d75d setup: Add link to MacPorts package 2023-03-31 11:33:17 +02:00
Dan Helfman db13217361 Clarify "checks" configuration documentation for older versions of borgmatic (#639). 2023-03-31 11:33:17 +02:00
Dan Helfman f152e33f35 Remove related documentation links. 2023-03-31 11:33:17 +02:00
Dan Helfman 7ae1eac0df Upgrade documentation image dependencies. 2023-03-31 11:33:17 +02:00
Dan Helfman 0241d56131 Clarify NEWS on database "all" dump feature applying to MySQL as well. 2023-03-31 11:33:17 +02:00
Dan Helfman 1c2ef7b24d Bump version for release. 2023-03-31 11:33:17 +02:00
Dan Helfman 1f12bf6461 Fix code style flake issue. 2023-03-31 11:33:17 +02:00
Dan Helfman 2cf1ed57c0 Add configuration options for database command customization (#630). 2023-03-31 11:33:17 +02:00
Dan Helfman deb46b0a70 Update documentation about changes to "all" database restores (#438, #560). 2023-03-31 11:33:17 +02:00
Dan Helfman 25df602a47 Optionally dump "all" PostgreSQL databases to separate files instead of one combined dump file (#438, #560). 2023-03-31 11:33:17 +02:00
Dan Helfman 7564a27dfc Mention "before_actions" command hook in soft failure documentation (#631). 2023-03-31 11:33:17 +02:00
Dan Helfman 62109adfb0 Add NEWS entry for #629. 2023-03-31 11:33:17 +02:00
palto42 abcd017e3e conditional warning for excluding special files 2023-03-31 11:33:17 +02:00
Dan Helfman bf3d364ac4 Lowercase borgmatic in documentation. 2023-03-31 11:33:17 +02:00
Macguire Rintoul c2622a4199 add borgmatic minimum version to compact docs 2023-03-31 11:33:17 +02:00
Dan Helfman 1e3fd59ce6 Fix traceback when include merging on ARM64 (#622). 2023-03-31 11:33:17 +02:00
Dan Helfman 8f4c8200a6 Update borgmatic social links. 2023-03-31 11:33:17 +02:00
Dan Helfman a97dc48fee Optionally dump "all" PostgreSQL databases to separate files instead of one combined dump file (#438, #560). 2023-03-31 11:33:17 +02:00
Dan Helfman c54e6b319f Update developer constributing instructions as well. 2023-03-31 11:33:17 +02:00
Dan Helfman e96ad5b854 Update developer instructions. 2023-03-31 11:33:17 +02:00
Dan Helfman a4111cc417 Clarify data/archives check implicit enabling. 2023-03-31 11:33:17 +02:00
Dan Helfman 90fb6f5b9e Clarify documentation about transferring archives between related repositories. 2023-03-31 11:33:17 +02:00
Dan Helfman 7e49b63a26 Fix logs that interfere with JSON output by making warnings go to stderr instead of stdout (#602). 2023-03-31 11:33:17 +02:00
Dan Helfman df2c34c260 Bump version for release. 2023-03-31 11:33:17 +02:00
Dan Helfman 3ee7d502d3 Clarify documentation about multiple repositories and separate configuration files (#613). 2023-03-31 11:33:17 +02:00
Dan Helfman e252f44696 Upgrade pytest test dependency (security). 2023-03-31 11:33:17 +02:00
Dan Helfman 58e3869dbd Code formatting. 2023-03-31 11:33:17 +02:00
Javier Paniagua 781e479966 specify pg dump/restore commands (#311) 2023-03-31 11:33:17 +02:00
Dan Helfman 3113aaba27 Fix "data" consistency check to support "check_last" and consistency "prefix" options (#611). 2023-03-31 11:33:17 +02:00
Dan Helfman e5c762419c More consistency checks documentation edits. 2023-03-31 11:33:17 +02:00
Dan Helfman 85a60ffa15 Clarify consistency check configuration. 2023-03-31 11:33:17 +02:00
Dan Helfman 1c00954969 Clarify examples in include merging and deep merging documentation (#607). 2023-03-31 11:33:17 +02:00
Paul Hoffmann 89cc4e824a Add ntfy warning 2023-03-30 16:02:19 +02:00
159 changed files with 6847 additions and 2393 deletions

View File

@ -24,6 +24,8 @@ clone:
steps:
- name: build
image: alpine:3.13
environment:
TEST_CONTAINER: true
pull: always
commands:
- scripts/run-full-tests

View File

@ -1,4 +1,5 @@
const pluginSyntaxHighlight = require("@11ty/eleventy-plugin-syntaxhighlight");
const codeClipboard = require("eleventy-plugin-code-clipboard");
const inclusiveLangPlugin = require("@11ty/eleventy-plugin-inclusive-language");
const navigationPlugin = require("@11ty/eleventy-navigation");
@ -6,6 +7,7 @@ module.exports = function(eleventyConfig) {
eleventyConfig.addPlugin(pluginSyntaxHighlight);
eleventyConfig.addPlugin(inclusiveLangPlugin);
eleventyConfig.addPlugin(navigationPlugin);
eleventyConfig.addPlugin(codeClipboard);
let markdownIt = require("markdown-it");
let markdownItAnchor = require("markdown-it-anchor");
@ -31,6 +33,7 @@ module.exports = function(eleventyConfig) {
markdownIt(markdownItOptions)
.use(markdownItAnchor, markdownItAnchorOptions)
.use(markdownItReplaceLink)
.use(codeClipboard.markdownItCopyButton)
);
eleventyConfig.addPassthroughCopy({"docs/static": "static"});

1
.flake8 Normal file
View File

@ -0,0 +1 @@
select = Q0

83
NEWS
View File

@ -1,5 +1,86 @@
1.7.5.dev0
1.7.11.dev0
* #662: Fix regression in which "check_repositories" option failed to match repositories.
1.7.10
* #396: When a database command errors, display and log the error message instead of swallowing it.
* #501: Optionally error if a source directory does not exist via "source_directories_must_exist"
option in borgmatic's location configuration.
* #576: Add support for "file://" paths within "repositories" option.
* #612: Define and use custom constants in borgmatic configuration files. See the documentation for
more information:
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#constant-interpolation
* #618: Add support for BORG_FILES_CACHE_TTL environment variable via "borg_files_cache_ttl" option
in borgmatic's storage configuration.
* #623: Fix confusing message when an error occurs running actions for a configuration file.
* #635: Add optional repository labels so you can select a repository via "--repository yourlabel"
at the command-line. See the configuration reference for more information:
https://torsion.org/borgmatic/docs/reference/configuration/
* #649: Add documentation on backing up a database running in a container:
https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#containers
* #655: Fix error when databases are configured and a source directory doesn't exist.
* Add code style plugins to enforce use of Python f-strings and prevent single-letter variables.
To join in the pedantry, refresh your test environment with "tox --recreate".
* Rename scripts/run-full-dev-tests to scripts/run-end-to-end-dev-tests and make it run end-to-end
tests only. Continue using tox to run unit and integration tests.
1.7.9
* #295: Add a SQLite database dump/restore hook.
* #304: Change the default action order when no actions are specified on the command-line to:
"create", "prune", "compact", "check". If you'd like to retain the old ordering ("prune" and
"compact" first), then specify actions explicitly on the command-line.
* #304: Run any command-line actions in the order specified instead of using a fixed ordering.
* #564: Add "--repository" flag to all actions where it makes sense, so you can run borgmatic on
a single configured repository instead of all of them.
* #628: Add a Healthchecks "log" state to send borgmatic logs to Healthchecks without signalling
success or failure.
* #647: Add "--strip-components all" feature on the "extract" action to remove leading path
components of files you extract. Must be used with the "--path" flag.
* Add support for Python 3.11.
1.7.8
* #620: With the "create" action and the "--list" ("--files") flag, only show excluded files at
verbosity 2.
* #621: Add optional authentication to the ntfy monitoring hook.
* With the "create" action, only one of "--list" ("--files") and "--progress" flags can be used.
This lines up with the new behavior in Borg 2.0.0b5.
* Internally support new Borg 2.0.0b5 "--filter" status characters / item flags for the "create"
action.
* Fix the "create" action with the "--dry-run" flag querying for databases when a PostgreSQL/MySQL
"all" database is configured. Now, these queries are skipped due to the dry run.
* Add "--repository" flag to the "rcreate" action to optionally select one configured repository to
create.
* Add "--progress" flag to the "transfer" action, new in Borg 2.0.0b5.
* Add "checkpoint_volume" configuration option to creates checkpoints every specified number of
bytes during a long-running backup, new in Borg 2.0.0b5.
1.7.7
* #642: Add MySQL database hook "add_drop_database" configuration option to control whether dumped
MySQL databases get dropped right before restore.
* #643: Fix for potential data loss (data not getting backed up) when dumping large "directory"
format PostgreSQL/MongoDB databases. Prior to the fix, these dumps would not finish writing to
disk before Borg consumed them. Now, the dumping process completes before Borg starts. This only
applies to "directory" format databases; other formats still stream to Borg without using
temporary disk space.
* Fix MongoDB "directory" format to work with mongodump/mongorestore without error. Prior to this
fix, only the "archive" format worked.
1.7.6
* #393, #438, #560: Optionally dump "all" PostgreSQL/MySQL databases to separate files instead of
one combined dump file, allowing more convenient restores of individual databases. You can enable
this by specifying the database dump "format" option when the database is named "all".
* #602: Fix logs that interfere with JSON output by making warnings go to stderr instead of stdout.
* #622: Fix traceback when include merging configuration files on ARM64.
* #629: Skip warning about excluded special files when no special files have been excluded.
* #630: Add configuration options for database command customization: "list_options",
"restore_options", and "analyze_options" for PostgreSQL, "restore_options" for MySQL, and
"restore_options" for MongoDB.
1.7.5
* #311: Override PostgreSQL dump/restore commands via configuration options.
* #604: Fix traceback when a configuration section is present but lacking any options.
* #607: Clarify documentation examples for include merging and deep merging.
* #611: Fix "data" consistency check to support "check_last" and consistency "prefix" options.
* #613: Clarify documentation about multiple repositories and separate configuration files.
1.7.4
* #596: Fix special file detection erroring when broken symlinks are encountered.

View File

@ -24,9 +24,10 @@ location:
# Paths of local or remote repositories to backup to.
repositories:
- ssh://1234@usw-s001.rsync.net/./backups.borg
- ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
- /var/lib/backups/local.borg
- path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
label: borgbase
- path: /var/lib/backups/local.borg
label: local
retention:
# Retention policy for how many backups to keep.
@ -67,6 +68,7 @@ borgmatic is powered by [Borg Backup](https://www.borgbackup.org/).
<a href="https://www.mysql.com/"><img src="docs/static/mysql.png" alt="MySQL" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://mariadb.com/"><img src="docs/static/mariadb.png" alt="MariaDB" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://www.mongodb.com/"><img src="docs/static/mongodb.png" alt="MongoDB" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://sqlite.org/"><img src="docs/static/sqlite.png" alt="SQLite" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://healthchecks.io/"><img src="docs/static/healthchecks.png" alt="Healthchecks" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://cronitor.io/"><img src="docs/static/cronitor.png" alt="Cronitor" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://cronhub.io/"><img src="docs/static/cronhub.png" alt="Cronhub" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
@ -80,8 +82,8 @@ borgmatic is powered by [Borg Backup](https://www.borgbackup.org/).
Your first step is to [install and configure
borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/).
For additional documentation, check out the links above for <a
href="https://torsion.org/borgmatic/#documentation">borgmatic how-to and
For additional documentation, check out the links above (left panel on wide screens)
for <a href="https://torsion.org/borgmatic/#documentation">borgmatic how-to and
reference guides</a>.
@ -104,23 +106,38 @@ offerings, but do not currently fund borgmatic development or hosting.
### Issues
You've got issues? Or an idea for a feature enhancement? We've got an [issue
tracker](https://projects.torsion.org/borgmatic-collective/borgmatic/issues). In order to
create a new issue or comment on an issue, you'll need to [login
first](https://projects.torsion.org/user/login). Note that you can login with
an existing GitHub account if you prefer.
If you'd like to chat with borgmatic developers or users, head on over to the
`#borgmatic` IRC channel on Libera Chat, either via <a
href="https://web.libera.chat/#borgmatic">web chat</a> or a
native <a href="ircs://irc.libera.chat:6697">IRC client</a>. If you
don't get a response right away, please hang around a while—or file a ticket
instead.
Are you experiencing an issue with borgmatic? Or do you have an idea for a
feature enhancement? Head on over to our [issue
tracker](https://projects.torsion.org/borgmatic-collective/borgmatic/issues).
In order to create a new issue or add a comment, you'll need to
[register](https://projects.torsion.org/user/sign_up?invite_code=borgmatic)
first. If you prefer to use an existing GitHub account, you can skip account
creation and [login directly](https://projects.torsion.org/user/login).
Also see the [security
policy](https://torsion.org/borgmatic/docs/security-policy/) for any security
issues.
### Social
Check out the [Borg subreddit](https://www.reddit.com/r/BorgBackup/) for
general Borg and borgmatic discussion and support.
Also follow [borgmatic on Mastodon](https://fosstodon.org/@borgmatic).
### Chat
To chat with borgmatic developers or users, check out the `#borgmatic`
IRC channel on Libera Chat, either via <a
href="https://web.libera.chat/#borgmatic">web chat</a> or a native <a
href="ircs://irc.libera.chat:6697">IRC client</a>. If you don't get a response
right away, please hang around a while—or file a ticket instead.
### Other
Other questions or comments? Contact
[witten@torsion.org](mailto:witten@torsion.org).
@ -135,10 +152,14 @@ borgmatic is licensed under the GNU General Public License version 3 or any
later version.
If you'd like to contribute to borgmatic development, please feel free to
submit a [Pull Request](https://projects.torsion.org/borgmatic-collective/borgmatic/pulls)
or open an [issue](https://projects.torsion.org/borgmatic-collective/borgmatic/issues) first
to discuss your idea. We also accept Pull Requests on GitHub, if that's more
your thing. In general, contributions are very welcome. We don't bite!
submit a [Pull
Request](https://projects.torsion.org/borgmatic-collective/borgmatic/pulls) or
open an
[issue](https://projects.torsion.org/borgmatic-collective/borgmatic/issues) to
discuss your idea. Note that you'll need to
[register](https://projects.torsion.org/user/sign_up?invite_code=borgmatic)
first. We also accept Pull Requests on GitHub, if that's more your thing. In
general, contributions are very welcome. We don't bite!
Also, please check out the [borgmatic development
how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for

View File

36
borgmatic/actions/borg.py Normal file
View File

@ -0,0 +1,36 @@
import logging
import borgmatic.borg.borg
import borgmatic.borg.rlist
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_borg(
repository, storage, local_borg_version, borg_arguments, local_path, remote_path,
):
'''
Run the "borg" action for the given repository.
'''
if borg_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, borg_arguments.repository
):
logger.info('{}: Running arbitrary Borg command'.format(repository['path']))
archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
borg_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
)
borgmatic.borg.borg.run_arbitrary_borg(
repository['path'],
storage,
local_borg_version,
options=borg_arguments.options,
archive=archive_name,
local_path=local_path,
remote_path=remote_path,
)

View File

@ -0,0 +1,25 @@
import logging
import borgmatic.borg.break_lock
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_break_lock(
repository, storage, local_borg_version, break_lock_arguments, local_path, remote_path,
):
'''
Run the "break-lock" action for the given repository.
'''
if break_lock_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, break_lock_arguments.repository
):
logger.info(f'{repository["path"]}: Breaking repository and cache locks')
borgmatic.borg.break_lock.break_lock(
repository['path'],
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
)

View File

@ -0,0 +1,61 @@
import logging
import borgmatic.borg.check
import borgmatic.config.validate
import borgmatic.hooks.command
logger = logging.getLogger(__name__)
def run_check(
config_filename,
repository,
location,
storage,
consistency,
hooks,
hook_context,
local_borg_version,
check_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "check" action for the given repository.
'''
if check_arguments.repository and not borgmatic.config.validate.repositories_match(
repository, check_arguments.repository
):
return
borgmatic.hooks.command.execute_hook(
hooks.get('before_check'),
hooks.get('umask'),
config_filename,
'pre-check',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Running consistency checks'.format(repository['path']))
borgmatic.borg.check.check_archives(
repository['path'],
location,
storage,
consistency,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=check_arguments.progress,
repair=check_arguments.repair,
only_checks=check_arguments.only,
force=check_arguments.force,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_check'),
hooks.get('umask'),
config_filename,
'post-check',
global_arguments.dry_run,
**hook_context,
)

View File

@ -0,0 +1,65 @@
import logging
import borgmatic.borg.compact
import borgmatic.borg.feature
import borgmatic.config.validate
import borgmatic.hooks.command
logger = logging.getLogger(__name__)
def run_compact(
config_filename,
repository,
storage,
retention,
hooks,
hook_context,
local_borg_version,
compact_arguments,
global_arguments,
dry_run_label,
local_path,
remote_path,
):
'''
Run the "compact" action for the given repository.
'''
if compact_arguments.repository and not borgmatic.config.validate.repositories_match(
repository, compact_arguments.repository
):
return
borgmatic.hooks.command.execute_hook(
hooks.get('before_compact'),
hooks.get('umask'),
config_filename,
'pre-compact',
global_arguments.dry_run,
**hook_context,
)
if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version):
logger.info('{}: Compacting segments{}'.format(repository['path'], dry_run_label))
borgmatic.borg.compact.compact_segments(
global_arguments.dry_run,
repository['path'],
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=compact_arguments.progress,
cleanup_commits=compact_arguments.cleanup_commits,
threshold=compact_arguments.threshold,
)
else: # pragma: nocover
logger.info(
'{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository['path'])
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_compact'),
hooks.get('umask'),
config_filename,
'post-compact',
global_arguments.dry_run,
**hook_context,
)

View File

@ -0,0 +1,96 @@
import json
import logging
import borgmatic.borg.create
import borgmatic.config.validate
import borgmatic.hooks.command
import borgmatic.hooks.dispatch
import borgmatic.hooks.dump
logger = logging.getLogger(__name__)
def run_create(
config_filename,
repository,
location,
storage,
hooks,
hook_context,
local_borg_version,
create_arguments,
global_arguments,
dry_run_label,
local_path,
remote_path,
):
'''
Run the "create" action for the given repository.
If create_arguments.json is True, yield the JSON output from creating the archive.
'''
if create_arguments.repository and not borgmatic.config.validate.repositories_match(
repository, create_arguments.repository
):
return
borgmatic.hooks.command.execute_hook(
hooks.get('before_backup'),
hooks.get('umask'),
config_filename,
'pre-backup',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Creating archive{}'.format(repository['path'], dry_run_label))
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
active_dumps = borgmatic.hooks.dispatch.call_hooks(
'dump_databases',
hooks,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
stream_processes = [process for processes in active_dumps.values() for process in processes]
json_output = borgmatic.borg.create.create_archive(
global_arguments.dry_run,
repository['path'],
location,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=create_arguments.progress,
stats=create_arguments.stats,
json=create_arguments.json,
list_files=create_arguments.list_files,
stream_processes=stream_processes,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
config_filename,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_backup'),
hooks.get('umask'),
config_filename,
'post-backup',
global_arguments.dry_run,
**hook_context,
)

View File

@ -0,0 +1,48 @@
import logging
import borgmatic.borg.export_tar
import borgmatic.borg.rlist
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_export_tar(
repository,
storage,
local_borg_version,
export_tar_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "export-tar" action for the given repository.
'''
if export_tar_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, export_tar_arguments.repository
):
logger.info(
f'{repository["path"]}: Exporting archive {export_tar_arguments.archive} as tar file'
)
borgmatic.borg.export_tar.export_tar_archive(
global_arguments.dry_run,
repository['path'],
borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
export_tar_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
),
export_tar_arguments.paths,
export_tar_arguments.destination,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
tar_filter=export_tar_arguments.tar_filter,
list_files=export_tar_arguments.list_files,
strip_components=export_tar_arguments.strip_components,
)

View File

@ -0,0 +1,69 @@
import logging
import borgmatic.borg.extract
import borgmatic.borg.rlist
import borgmatic.config.validate
import borgmatic.hooks.command
logger = logging.getLogger(__name__)
def run_extract(
config_filename,
repository,
location,
storage,
hooks,
hook_context,
local_borg_version,
extract_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "extract" action for the given repository.
'''
borgmatic.hooks.command.execute_hook(
hooks.get('before_extract'),
hooks.get('umask'),
config_filename,
'pre-extract',
global_arguments.dry_run,
**hook_context,
)
if extract_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, extract_arguments.repository
):
logger.info(
'{}: Extracting archive {}'.format(repository['path'], extract_arguments.archive)
)
borgmatic.borg.extract.extract_archive(
global_arguments.dry_run,
repository['path'],
borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
extract_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
),
extract_arguments.paths,
location,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
destination_path=extract_arguments.destination,
strip_components=extract_arguments.strip_components,
progress=extract_arguments.progress,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_extract'),
hooks.get('umask'),
config_filename,
'post-extract',
global_arguments.dry_run,
**hook_context,
)

41
borgmatic/actions/info.py Normal file
View File

@ -0,0 +1,41 @@
import json
import logging
import borgmatic.borg.info
import borgmatic.borg.rlist
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_info(
repository, storage, local_borg_version, info_arguments, local_path, remote_path,
):
'''
Run the "info" action for the given repository and archive.
If info_arguments.json is True, yield the JSON output from the info for the archive.
'''
if info_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, info_arguments.repository
):
if not info_arguments.json: # pragma: nocover
logger.answer(f'{repository["path"]}: Displaying archive summary information')
info_arguments.archive = borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
info_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
)
json_output = borgmatic.borg.info.display_archives_info(
repository['path'],
storage,
local_borg_version,
info_arguments=info_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)

43
borgmatic/actions/list.py Normal file
View File

@ -0,0 +1,43 @@
import json
import logging
import borgmatic.borg.list
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_list(
repository, storage, local_borg_version, list_arguments, local_path, remote_path,
):
'''
Run the "list" action for the given repository and archive.
If list_arguments.json is True, yield the JSON output from listing the archive.
'''
if list_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, list_arguments.repository
):
if not list_arguments.json: # pragma: nocover
if list_arguments.find_paths:
logger.answer(f'{repository["path"]}: Searching archives')
elif not list_arguments.archive:
logger.answer(f'{repository["path"]}: Listing archives')
list_arguments.archive = borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
list_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
)
json_output = borgmatic.borg.list.list_archive(
repository['path'],
storage,
local_borg_version,
list_arguments=list_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)

View File

@ -0,0 +1,44 @@
import logging
import borgmatic.borg.mount
import borgmatic.borg.rlist
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_mount(
repository, storage, local_borg_version, mount_arguments, local_path, remote_path,
):
'''
Run the "mount" action for the given repository.
'''
if mount_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, mount_arguments.repository
):
if mount_arguments.archive:
logger.info(
'{}: Mounting archive {}'.format(repository['path'], mount_arguments.archive)
)
else: # pragma: nocover
logger.info('{}: Mounting repository'.format(repository['path']))
borgmatic.borg.mount.mount_archive(
repository['path'],
borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
mount_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
),
mount_arguments.mount_point,
mount_arguments.paths,
mount_arguments.foreground,
mount_arguments.options,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
)

View File

@ -0,0 +1,59 @@
import logging
import borgmatic.borg.prune
import borgmatic.config.validate
import borgmatic.hooks.command
logger = logging.getLogger(__name__)
def run_prune(
config_filename,
repository,
storage,
retention,
hooks,
hook_context,
local_borg_version,
prune_arguments,
global_arguments,
dry_run_label,
local_path,
remote_path,
):
'''
Run the "prune" action for the given repository.
'''
if prune_arguments.repository and not borgmatic.config.validate.repositories_match(
repository, prune_arguments.repository
):
return
borgmatic.hooks.command.execute_hook(
hooks.get('before_prune'),
hooks.get('umask'),
config_filename,
'pre-prune',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Pruning archives{}'.format(repository['path'], dry_run_label))
borgmatic.borg.prune.prune_archives(
global_arguments.dry_run,
repository['path'],
storage,
retention,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
stats=prune_arguments.stats,
list_archives=prune_arguments.list_archives,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_prune'),
hooks.get('umask'),
config_filename,
'post-prune',
global_arguments.dry_run,
**hook_context,
)

View File

@ -0,0 +1,40 @@
import logging
import borgmatic.borg.rcreate
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_rcreate(
repository,
storage,
local_borg_version,
rcreate_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "rcreate" action for the given repository.
'''
if rcreate_arguments.repository and not borgmatic.config.validate.repositories_match(
repository, rcreate_arguments.repository
):
return
logger.info('{}: Creating repository'.format(repository['path']))
borgmatic.borg.rcreate.create_repository(
global_arguments.dry_run,
repository['path'],
storage,
local_borg_version,
rcreate_arguments.encryption_mode,
rcreate_arguments.source_repository,
rcreate_arguments.copy_crypt_key,
rcreate_arguments.append_only,
rcreate_arguments.storage_quota,
rcreate_arguments.make_parent_dirs,
local_path=local_path,
remote_path=remote_path,
)

View File

@ -0,0 +1,357 @@
import copy
import logging
import os
import borgmatic.borg.extract
import borgmatic.borg.list
import borgmatic.borg.mount
import borgmatic.borg.rlist
import borgmatic.borg.state
import borgmatic.config.validate
import borgmatic.hooks.dispatch
import borgmatic.hooks.dump
logger = logging.getLogger(__name__)
UNSPECIFIED_HOOK = object()
def get_configured_database(
hooks, archive_database_names, hook_name, database_name, configuration_database_name=None
):
'''
Find the first database with the given hook name and database name in the configured hooks
dict and the given archive database names dict (from hook name to database names contained in
a particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database
hooks for the named database. If a configuration database name is given, use that instead of the
database name to lookup the database in the given hooks configuration.
Return the found database as a tuple of (found hook name, database configuration dict).
'''
if not configuration_database_name:
configuration_database_name = database_name
if hook_name == UNSPECIFIED_HOOK:
hooks_to_search = hooks
else:
hooks_to_search = {hook_name: hooks[hook_name]}
return next(
(
(name, hook_database)
for (name, hook) in hooks_to_search.items()
for hook_database in hook
if hook_database['name'] == configuration_database_name
and database_name in archive_database_names.get(name, [])
),
(None, None),
)
def get_configured_hook_name_and_database(hooks, database_name):
'''
Find the hook name and first database dict with the given database name in the configured hooks
dict. This searches across all database hooks.
'''
def restore_single_database(
repository,
location,
storage,
hooks,
local_borg_version,
global_arguments,
local_path,
remote_path,
archive_name,
hook_name,
database,
): # pragma: no cover
'''
Given (among other things) an archive name, a database hook name, and a configured database
configuration dict, restore that database from the archive.
'''
logger.info(f'{repository}: Restoring database {database["name"]}')
dump_pattern = borgmatic.hooks.dispatch.call_hooks(
'make_database_dump_pattern',
hooks,
repository,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
database['name'],
)[hook_name]
# Kick off a single database extract to stdout.
extract_process = borgmatic.borg.extract.extract_archive(
dry_run=global_arguments.dry_run,
repository=repository,
archive=archive_name,
paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]),
location_config=location,
storage_config=storage,
local_borg_version=local_borg_version,
local_path=local_path,
remote_path=remote_path,
destination_path='/',
# A directory format dump isn't a single file, and therefore can't extract
# to stdout. In this case, the extract_process return value is None.
extract_to_stdout=bool(database.get('format') != 'directory'),
)
# Run a single database restore, consuming the extract stdout (if any).
borgmatic.hooks.dispatch.call_hooks(
'restore_database_dump',
{hook_name: [database]},
repository,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
extract_process,
)
def collect_archive_database_names(
repository, archive, location, storage, local_borg_version, local_path, remote_path,
):
'''
Given a local or remote repository path, a resolved archive name, a location configuration dict,
a storage configuration dict, the local Borg version, and local and remote Borg paths, query the
archive for the names of databases it contains and return them as a dict from hook name to a
sequence of database names.
'''
borgmatic_source_directory = os.path.expanduser(
location.get(
'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
)
).lstrip('/')
parent_dump_path = os.path.expanduser(
borgmatic.hooks.dump.make_database_dump_path(borgmatic_source_directory, '*_databases/*/*')
)
dump_paths = borgmatic.borg.list.capture_archive_listing(
repository,
archive,
storage,
local_borg_version,
list_path=parent_dump_path,
local_path=local_path,
remote_path=remote_path,
)
# Determine the database names corresponding to the dumps found in the archive and
# add them to restore_names.
archive_database_names = {}
for dump_path in dump_paths:
try:
(hook_name, _, database_name) = dump_path.split(
borgmatic_source_directory + os.path.sep, 1
)[1].split(os.path.sep)[0:3]
except (ValueError, IndexError):
logger.warning(
f'{repository}: Ignoring invalid database dump path "{dump_path}" in archive {archive}'
)
else:
if database_name not in archive_database_names.get(hook_name, []):
archive_database_names.setdefault(hook_name, []).extend([database_name])
return archive_database_names
def find_databases_to_restore(requested_database_names, archive_database_names):
'''
Given a sequence of requested database names to restore and a dict of hook name to the names of
databases found in an archive, return an expanded sequence of database names to restore,
replacing "all" with actual database names as appropriate.
Raise ValueError if any of the requested database names cannot be found in the archive.
'''
# A map from database hook name to the database names to restore for that hook.
restore_names = (
{UNSPECIFIED_HOOK: requested_database_names}
if requested_database_names
else {UNSPECIFIED_HOOK: ['all']}
)
# If "all" is in restore_names, then replace it with the names of dumps found within the
# archive.
if 'all' in restore_names[UNSPECIFIED_HOOK]:
restore_names[UNSPECIFIED_HOOK].remove('all')
for (hook_name, database_names) in archive_database_names.items():
restore_names.setdefault(hook_name, []).extend(database_names)
# If a database is to be restored as part of "all", then remove it from restore names so
# it doesn't get restored twice.
for database_name in database_names:
if database_name in restore_names[UNSPECIFIED_HOOK]:
restore_names[UNSPECIFIED_HOOK].remove(database_name)
if not restore_names[UNSPECIFIED_HOOK]:
restore_names.pop(UNSPECIFIED_HOOK)
combined_restore_names = set(
name for database_names in restore_names.values() for name in database_names
)
combined_archive_database_names = set(
name for database_names in archive_database_names.values() for name in database_names
)
missing_names = sorted(set(combined_restore_names) - combined_archive_database_names)
if missing_names:
joined_names = ', '.join(f'"{name}"' for name in missing_names)
raise ValueError(
f"Cannot restore database{'s' if len(missing_names) > 1 else ''} {joined_names} missing from archive"
)
return restore_names
def ensure_databases_found(restore_names, remaining_restore_names, found_names):
'''
Given a dict from hook name to database names to restore, a dict from hook name to remaining
database names to restore, and a sequence of found (actually restored) database names, raise
ValueError if requested databases to restore were missing from the archive and/or configuration.
'''
combined_restore_names = set(
name
for database_names in tuple(restore_names.values())
+ tuple(remaining_restore_names.values())
for name in database_names
)
if not combined_restore_names and not found_names:
raise ValueError('No databases were found to restore')
missing_names = sorted(set(combined_restore_names) - set(found_names))
if missing_names:
joined_names = ', '.join(f'"{name}"' for name in missing_names)
raise ValueError(
f"Cannot restore database{'s' if len(missing_names) > 1 else ''} {joined_names} missing from borgmatic's configuration"
)
def run_restore(
repository,
location,
storage,
hooks,
local_borg_version,
restore_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "restore" action for the given repository, but only if the repository matches the
requested repository in restore arguments.
Raise ValueError if a configured database could not be found to restore.
'''
if restore_arguments.repository and not borgmatic.config.validate.repositories_match(
repository, restore_arguments.repository
):
return
logger.info(
f'{repository["path"]}: Restoring databases from archive {restore_arguments.archive}'
)
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'],
restore_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
)
archive_database_names = collect_archive_database_names(
repository['path'],
archive_name,
location,
storage,
local_borg_version,
local_path,
remote_path,
)
restore_names = find_databases_to_restore(restore_arguments.databases, archive_database_names)
found_names = set()
remaining_restore_names = {}
for hook_name, database_names in restore_names.items():
for database_name in database_names:
found_hook_name, found_database = get_configured_database(
hooks, archive_database_names, hook_name, database_name
)
if not found_database:
remaining_restore_names.setdefault(found_hook_name or hook_name, []).append(
database_name
)
continue
found_names.add(database_name)
restore_single_database(
repository['path'],
location,
storage,
hooks,
local_borg_version,
global_arguments,
local_path,
remote_path,
archive_name,
found_hook_name or hook_name,
found_database,
)
# For any database that weren't found via exact matches in the hooks configuration, try to
# fallback to "all" entries.
for hook_name, database_names in remaining_restore_names.items():
for database_name in database_names:
found_hook_name, found_database = get_configured_database(
hooks, archive_database_names, hook_name, database_name, 'all'
)
if not found_database:
continue
found_names.add(database_name)
database = copy.copy(found_database)
database['name'] = database_name
restore_single_database(
repository['path'],
location,
storage,
hooks,
local_borg_version,
global_arguments,
local_path,
remote_path,
archive_name,
found_hook_name or hook_name,
database,
)
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
ensure_databases_found(restore_names, remaining_restore_names, found_names)

View File

@ -0,0 +1,34 @@
import json
import logging
import borgmatic.borg.rinfo
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_rinfo(
repository, storage, local_borg_version, rinfo_arguments, local_path, remote_path,
):
'''
Run the "rinfo" action for the given repository.
If rinfo_arguments.json is True, yield the JSON output from the info for the repository.
'''
if rinfo_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, rinfo_arguments.repository
):
if not rinfo_arguments.json: # pragma: nocover
logger.answer(
'{}: Displaying repository summary information'.format(repository['path'])
)
json_output = borgmatic.borg.rinfo.display_repository_info(
repository['path'],
storage,
local_borg_version,
rinfo_arguments=rinfo_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)

View File

@ -0,0 +1,32 @@
import json
import logging
import borgmatic.borg.rlist
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_rlist(
repository, storage, local_borg_version, rlist_arguments, local_path, remote_path,
):
'''
Run the "rlist" action for the given repository.
If rlist_arguments.json is True, yield the JSON output from listing the repository.
'''
if rlist_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, rlist_arguments.repository
):
if not rlist_arguments.json: # pragma: nocover
logger.answer('{}: Listing repository'.format(repository['path']))
json_output = borgmatic.borg.rlist.list_repository(
repository['path'],
storage,
local_borg_version,
rlist_arguments=rlist_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)

View File

@ -0,0 +1,29 @@
import logging
import borgmatic.borg.transfer
logger = logging.getLogger(__name__)
def run_transfer(
repository,
storage,
local_borg_version,
transfer_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "transfer" action for the given repository.
'''
logger.info(f'{repository}: Transferring archives to repository')
borgmatic.borg.transfer.transfer_archives(
global_arguments.dry_run,
repository,
storage,
local_borg_version,
transfer_arguments,
local_path=local_path,
remote_path=remote_path,
)

View File

@ -1,5 +1,6 @@
import logging
import borgmatic.logger
from borgmatic.borg import environment, flags
from borgmatic.execute import execute_command
@ -12,7 +13,7 @@ BORG_SUBCOMMANDS_WITHOUT_REPOSITORY = (('debug', 'info'), ('debug', 'convert-pro
def run_arbitrary_borg(
repository,
repository_path,
storage_config,
local_borg_version,
options,
@ -25,6 +26,7 @@ def run_arbitrary_borg(
sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary
Borg command on the given repository/archive.
'''
borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None)
try:
@ -42,10 +44,10 @@ def run_arbitrary_borg(
repository_archive_flags = ()
elif archive:
repository_archive_flags = flags.make_repository_archive_flags(
repository, archive, local_borg_version
repository_path, archive, local_borg_version
)
else:
repository_archive_flags = flags.make_repository_flags(repository, local_borg_version)
repository_archive_flags = flags.make_repository_flags(repository_path, local_borg_version)
full_command = (
(local_path,)
@ -60,7 +62,7 @@ def run_arbitrary_borg(
return execute_command(
full_command,
output_log_level=logging.WARNING,
output_log_level=logging.ANSWER,
borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config),
)

View File

@ -7,7 +7,7 @@ logger = logging.getLogger(__name__)
def break_lock(
repository, storage_config, local_borg_version, local_path='borg', remote_path=None,
repository_path, storage_config, local_borg_version, local_path='borg', remote_path=None,
):
'''
Given a local or remote repository path, a storage configuration dict, the local Borg version,
@ -24,7 +24,7 @@ def break_lock(
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ flags.make_repository_flags(repository, local_borg_version)
+ flags.make_repository_flags(repository_path, local_borg_version)
)
borg_environment = environment.make_environment(storage_config)

View File

@ -12,7 +12,7 @@ DEFAULT_CHECKS = (
{'name': 'repository', 'frequency': '1 month'},
{'name': 'archives', 'frequency': '1 month'},
)
DEFAULT_PREFIX = '{hostname}-'
DEFAULT_PREFIX = '{hostname}-' # noqa: FS003
logger = logging.getLogger(__name__)
@ -139,7 +139,7 @@ def filter_checks_on_frequency(
if datetime.datetime.now() < check_time + frequency_delta:
remaining = check_time + frequency_delta - datetime.datetime.now()
logger.info(
f"Skipping {check} check due to configured frequency; {remaining} until next check"
f'Skipping {check} check due to configured frequency; {remaining} until next check'
)
filtered_checks.remove(check)
@ -166,6 +166,12 @@ def make_check_flags(local_borg_version, checks, check_last=None, prefix=None):
"--last" flag. And if a prefix value is given and "archives" is in checks, then include a
"--match-archives" flag.
'''
if 'data' in checks:
data_flags = ('--verify-data',)
checks += ('archives',)
else:
data_flags = ()
if 'archives' in checks:
last_flags = ('--last', str(check_last)) if check_last else ()
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
@ -176,17 +182,13 @@ def make_check_flags(local_borg_version, checks, check_last=None, prefix=None):
last_flags = ()
match_archives_flags = ()
if check_last:
logger.info('Ignoring check_last option, as "archives" is not in consistency checks')
if prefix:
logger.info(
'Ignoring consistency prefix option, as "archives" is not in consistency checks'
logger.warning(
'Ignoring check_last option, as "archives" or "data" are not in consistency checks'
)
if prefix:
logger.warning(
'Ignoring consistency prefix option, as "archives" or "data" are not in consistency checks'
)
if 'data' in checks:
data_flags = ('--verify-data',)
checks += ('archives',)
else:
data_flags = ()
common_flags = last_flags + match_archives_flags + data_flags
@ -194,7 +196,7 @@ def make_check_flags(local_borg_version, checks, check_last=None, prefix=None):
return common_flags
return (
tuple('--{}-only'.format(check) for check in checks if check in ('repository', 'archives'))
tuple(f'--{check}-only' for check in checks if check in ('repository', 'archives'))
+ common_flags
)
@ -241,7 +243,7 @@ def read_check_time(path):
def check_archives(
repository,
repository_path,
location_config,
storage_config,
consistency_config,
@ -266,7 +268,7 @@ def check_archives(
try:
borg_repository_id = json.loads(
rinfo.display_repository_info(
repository,
repository_path,
storage_config,
local_borg_version,
argparse.Namespace(json=True),
@ -275,7 +277,7 @@ def check_archives(
)
)['repository']['id']
except (json.JSONDecodeError, KeyError):
raise ValueError(f'Cannot determine Borg repository ID for {repository}')
raise ValueError(f'Cannot determine Borg repository ID for {repository_path}')
checks = filter_checks_on_frequency(
location_config,
@ -308,7 +310,7 @@ def check_archives(
+ verbosity_flags
+ (('--progress',) if progress else ())
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ flags.make_repository_flags(repository, local_borg_version)
+ flags.make_repository_flags(repository_path, local_borg_version)
)
borg_environment = environment.make_environment(storage_config)
@ -327,6 +329,6 @@ def check_archives(
if 'extract' in checks:
extract.extract_last_archive_dry_run(
storage_config, local_borg_version, repository, lock_wait, local_path, remote_path
storage_config, local_borg_version, repository_path, lock_wait, local_path, remote_path
)
write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract'))

View File

@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
def compact_segments(
dry_run,
repository,
repository_path,
storage_config,
local_borg_version,
local_path='borg',
@ -36,11 +36,11 @@ def compact_segments(
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ flags.make_repository_flags(repository, local_borg_version)
+ flags.make_repository_flags(repository_path, local_borg_version)
)
if dry_run:
logging.info(f'{repository}: Skipping compact (dry run)')
logging.info(f'{repository_path}: Skipping compact (dry run)')
return
execute_command(

View File

@ -6,6 +6,7 @@ import pathlib
import stat
import tempfile
import borgmatic.logger
from borgmatic.borg import environment, feature, flags, state
from borgmatic.execute import (
DO_NOT_CAPTURE,
@ -195,7 +196,28 @@ def make_exclude_flags(location_config, exclude_filename=None):
)
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
def make_list_filter_flags(local_borg_version, dry_run):
'''
Given the local Borg version and whether this is a dry run, return the corresponding flags for
passing to "--list --filter". The general idea is that excludes are shown for a dry run or when
the verbosity is debug.
'''
base_flags = 'AME'
show_excludes = logger.isEnabledFor(logging.DEBUG)
if feature.available(feature.Feature.EXCLUDED_FILES_MINUS, local_borg_version):
if show_excludes or dry_run:
return f'{base_flags}+-'
else:
return base_flags
if show_excludes:
return f'{base_flags}x-'
else:
return f'{base_flags}-'
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' # noqa: FS003
def collect_borgmatic_source_directories(borgmatic_source_directory):
@ -274,7 +296,7 @@ def collect_special_file_paths(
paths = tuple(
path_line.split(' ', 1)[1]
for path_line in paths_output.split('\n')
if path_line and path_line.startswith('- ')
if path_line and path_line.startswith('- ') or path_line.startswith('+ ')
)
return tuple(
@ -284,9 +306,23 @@ def collect_special_file_paths(
)
def check_all_source_directories_exist(source_directories):
'''
Given a sequence of source directories, check that they all exist. If any do not, raise an
exception.
'''
missing_directories = [
source_directory
for source_directory in source_directories
if not os.path.exists(source_directory)
]
if missing_directories:
raise ValueError(f"Source directories do not exist: {', '.join(missing_directories)}")
def create_archive(
dry_run,
repository,
repository_path,
location_config,
storage_config,
local_borg_version,
@ -305,9 +341,12 @@ def create_archive(
If a sequence of stream processes is given (instances of subprocess.Popen), then execute the
create command while also triggering the given processes to produce output.
'''
borgmatic.logger.add_custom_log_levels()
borgmatic_source_directories = expand_directories(
collect_borgmatic_source_directories(location_config.get('borgmatic_source_directory'))
)
if location_config.get('source_directories_must_exist', False):
check_all_source_directories_exist(location_config.get('source_directories'))
sources = deduplicate_directories(
map_directories_to_devices(
expand_directories(
@ -335,11 +374,13 @@ def create_archive(
expand_home_directories(location_config.get('exclude_patterns'))
)
checkpoint_interval = storage_config.get('checkpoint_interval', None)
checkpoint_volume = storage_config.get('checkpoint_volume', None)
chunker_params = storage_config.get('chunker_params', None)
compression = storage_config.get('compression', None)
upload_rate_limit = storage_config.get('upload_rate_limit', None)
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
list_filter_flags = make_list_filter_flags(local_borg_version, dry_run)
files_cache = location_config.get('files_cache')
archive_name_format = storage_config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('create', '')
@ -370,7 +411,7 @@ def create_archive(
if stream_processes and location_config.get('read_special') is False:
logger.warning(
f'{repository}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
)
create_command = (
@ -379,6 +420,7 @@ def create_archive(
+ make_pattern_flags(location_config, pattern_file.name if pattern_file else None)
+ make_exclude_flags(location_config, exclude_file.name if exclude_file else None)
+ (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ())
+ (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ())
+ (('--chunker-params', chunker_params) if chunker_params else ())
+ (('--compression', compression) if compression else ())
+ upload_ratelimit_flags
@ -397,17 +439,23 @@ def create_archive(
+ (('--remote-path', remote_path) if remote_path else ())
+ (('--umask', str(umask)) if umask else ())
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
+ (('--list', '--filter', 'AMEx-') if list_files and not json and not progress else ())
+ (
('--list', '--filter', list_filter_flags)
if list_files and not json and not progress
else ()
)
+ (('--dry-run',) if dry_run else ())
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ flags.make_repository_archive_flags(repository, archive_name_format, local_borg_version)
+ flags.make_repository_archive_flags(
repository_path, archive_name_format, local_borg_version
)
+ (sources if not pattern_file else ())
)
if json:
output_log_level = None
elif (stats or list_files) and logger.getEffectiveLevel() == logging.WARNING:
output_log_level = logging.WARNING
elif list_files or (stats and not dry_run):
output_log_level = logging.ANSWER
else:
output_log_level = logging.INFO
@ -420,7 +468,7 @@ def create_archive(
# If database hooks are enabled (as indicated by streaming processes), exclude files that might
# cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True.
if stream_processes and not location_config.get('read_special'):
logger.debug(f'{repository}: Collecting special file paths')
logger.debug(f'{repository_path}: Collecting special file paths')
special_file_paths = collect_special_file_paths(
create_command,
local_path,
@ -428,18 +476,17 @@ def create_archive(
borg_environment,
skip_directories=borgmatic_source_directories,
)
logger.warning(
f'{repository}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}'
)
exclude_file = write_pattern_file(
expand_home_directories(
tuple(location_config.get('exclude_patterns') or ()) + special_file_paths
),
pattern_file=exclude_file,
)
if exclude_file:
if special_file_paths:
logger.warning(
f'{repository_path}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}'
)
exclude_file = write_pattern_file(
expand_home_directories(
tuple(location_config.get('exclude_patterns') or ()) + special_file_paths
),
pattern_file=exclude_file,
)
create_command += make_exclude_flags(location_config, exclude_file.name)
create_command += (

View File

@ -2,6 +2,7 @@ OPTION_TO_ENVIRONMENT_VARIABLE = {
'borg_base_directory': 'BORG_BASE_DIR',
'borg_config_directory': 'BORG_CONFIG_DIR',
'borg_cache_directory': 'BORG_CACHE_DIR',
'borg_files_cache_ttl': 'BORG_FILES_CACHE_TTL',
'borg_security_directory': 'BORG_SECURITY_DIR',
'borg_keys_directory': 'BORG_KEYS_DIR',
'encryption_passcommand': 'BORG_PASSCOMMAND',
@ -27,7 +28,7 @@ def make_environment(storage_config):
value = storage_config.get(option_name)
if value:
environment[environment_variable_name] = value
environment[environment_variable_name] = str(value)
for (
option_name,

View File

@ -1,6 +1,6 @@
import logging
import os
import borgmatic.logger
from borgmatic.borg import environment, flags
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def export_tar_archive(
dry_run,
repository,
repository_path,
archive,
paths,
destination_path,
@ -30,6 +30,7 @@ def export_tar_archive(
If the destination path is "-", then stream the output to stdout instead of to a file.
'''
borgmatic.logger.add_custom_log_levels()
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
@ -44,22 +45,18 @@ def export_tar_archive(
+ (('--dry-run',) if dry_run else ())
+ (('--tar-filter', tar_filter) if tar_filter else ())
+ (('--strip-components', str(strip_components)) if strip_components else ())
+ flags.make_repository_archive_flags(
repository if ':' in repository else os.path.abspath(repository),
archive,
local_borg_version,
)
+ flags.make_repository_archive_flags(repository_path, archive, local_borg_version,)
+ (destination_path,)
+ (tuple(paths) if paths else ())
)
if list_files and logger.getEffectiveLevel() == logging.WARNING:
output_log_level = logging.WARNING
if list_files:
output_log_level = logging.ANSWER
else:
output_log_level = logging.INFO
if dry_run:
logging.info('{}: Skipping export to tar file (dry run)'.format(repository))
logging.info(f'{repository_path}: Skipping export to tar file (dry run)')
return
execute_command(

View File

@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
def extract_last_archive_dry_run(
storage_config,
local_borg_version,
repository,
repository_path,
lock_wait=None,
local_path='borg',
remote_path=None,
@ -30,7 +30,7 @@ def extract_last_archive_dry_run(
try:
last_archive_name = rlist.resolve_archive_name(
repository, 'latest', storage_config, local_borg_version, local_path, remote_path
repository_path, 'latest', storage_config, local_borg_version, local_path, remote_path
)
except ValueError:
logger.warning('No archives found. Skipping extract consistency check.')
@ -44,7 +44,9 @@ def extract_last_archive_dry_run(
+ lock_wait_flags
+ verbosity_flags
+ list_flag
+ flags.make_repository_archive_flags(repository, last_archive_name, local_borg_version)
+ flags.make_repository_archive_flags(
repository_path, last_archive_name, local_borg_version
)
)
execute_command(
@ -87,6 +89,13 @@ def extract_archive(
else:
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else ()
if strip_components == 'all':
if not paths:
raise ValueError('The --strip-components flag with "all" requires at least one --path')
# Calculate the maximum number of leading path components of the given paths.
strip_components = max(0, *(len(path.split(os.path.sep)) - 1 for path in paths))
full_command = (
(local_path, 'extract')
+ (('--remote-path', remote_path) if remote_path else ())
@ -99,11 +108,7 @@ def extract_archive(
+ (('--strip-components', str(strip_components)) if strip_components else ())
+ (('--progress',) if progress else ())
+ (('--stdout',) if extract_to_stdout else ())
+ flags.make_repository_archive_flags(
repository if ':' in repository else os.path.abspath(repository),
archive,
local_borg_version,
)
+ flags.make_repository_archive_flags(repository, archive, local_borg_version,)
+ (tuple(paths) if paths else ())
)

View File

@ -14,6 +14,7 @@ class Feature(Enum):
RLIST = 8
RINFO = 9
MATCH_ARCHIVES = 10
EXCLUDED_FILES_MINUS = 11
FEATURE_TO_MINIMUM_BORG_VERSION = {
@ -27,6 +28,7 @@ FEATURE_TO_MINIMUM_BORG_VERSION = {
Feature.RLIST: parse_version('2.0.0a2'), # borg rlist
Feature.RINFO: parse_version('2.0.0a2'), # borg rinfo
Feature.MATCH_ARCHIVES: parse_version('2.0.0b3'), # borg --match-archives
Feature.EXCLUDED_FILES_MINUS: parse_version('2.0.0b5'), # --list --filter uses "-" for excludes
}

View File

@ -10,7 +10,7 @@ def make_flags(name, value):
if not value:
return ()
flag = '--{}'.format(name.replace('_', '-'))
flag = f"--{name.replace('_', '-')}"
if value is True:
return (flag,)
@ -33,7 +33,7 @@ def make_flags_from_arguments(arguments, excludes=()):
)
def make_repository_flags(repository, local_borg_version):
def make_repository_flags(repository_path, local_borg_version):
'''
Given the path of a Borg repository and the local Borg version, return Borg-version-appropriate
command-line flags (as a tuple) for selecting that repository.
@ -42,17 +42,17 @@ def make_repository_flags(repository, local_borg_version):
('--repo',)
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
else ()
) + (repository,)
) + (repository_path,)
def make_repository_archive_flags(repository, archive, local_borg_version):
def make_repository_archive_flags(repository_path, archive, local_borg_version):
'''
Given the path of a Borg repository, an archive name or pattern, and the local Borg version,
return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository
and archive.
'''
return (
('--repo', repository, archive)
('--repo', repository_path, archive)
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
else (f'{repository}::{archive}',)
else (f'{repository_path}::{archive}',)
)

View File

@ -1,5 +1,6 @@
import logging
import borgmatic.logger
from borgmatic.borg import environment, feature, flags
from borgmatic.execute import execute_command, execute_command_and_capture_output
@ -7,7 +8,7 @@ logger = logging.getLogger(__name__)
def display_archives_info(
repository,
repository_path,
storage_config,
local_borg_version,
info_arguments,
@ -19,6 +20,7 @@ def display_archives_info(
arguments to the info action, display summary information for Borg archives in the repository or
return JSON summary information.
'''
borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None)
full_command = (
@ -47,7 +49,7 @@ def display_archives_info(
+ flags.make_flags_from_arguments(
info_arguments, excludes=('repository', 'archive', 'prefix')
)
+ flags.make_repository_flags(repository, local_borg_version)
+ flags.make_repository_flags(repository_path, local_borg_version)
+ (
flags.make_flags('match-archives', info_arguments.archive)
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
@ -62,7 +64,7 @@ def display_archives_info(
else:
execute_command(
full_command,
output_log_level=logging.WARNING,
output_log_level=logging.ANSWER,
borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config),
)

View File

@ -3,6 +3,7 @@ import copy
import logging
import re
import borgmatic.logger
from borgmatic.borg import environment, feature, flags, rlist
from borgmatic.execute import execute_command, execute_command_and_capture_output
@ -20,7 +21,7 @@ MAKE_FLAGS_EXCLUDES = (
def make_list_command(
repository,
repository_path,
storage_config,
local_borg_version,
list_arguments,
@ -51,10 +52,10 @@ def make_list_command(
+ flags.make_flags_from_arguments(list_arguments, excludes=MAKE_FLAGS_EXCLUDES)
+ (
flags.make_repository_archive_flags(
repository, list_arguments.archive, local_borg_version
repository_path, list_arguments.archive, local_borg_version
)
if list_arguments.archive
else flags.make_repository_flags(repository, local_borg_version)
else flags.make_repository_flags(repository_path, local_borg_version)
)
+ (tuple(list_arguments.paths) if list_arguments.paths else ())
)
@ -84,8 +85,48 @@ def make_find_paths(find_paths):
)
def capture_archive_listing(
repository_path,
archive,
storage_config,
local_borg_version,
list_path=None,
local_path='borg',
remote_path=None,
):
'''
Given a local or remote repository path, an archive name, a storage config dict, the local Borg
version, the archive path in which to list files, and local and remote Borg paths, capture the
output of listing that archive and return it as a list of file paths.
'''
borg_environment = environment.make_environment(storage_config)
return tuple(
execute_command_and_capture_output(
make_list_command(
repository_path,
storage_config,
local_borg_version,
argparse.Namespace(
repository=repository_path,
archive=archive,
paths=[f'sh:{list_path}'],
find_paths=None,
json=None,
format='{path}{NL}', # noqa: FS003
),
local_path,
remote_path,
),
extra_environment=borg_environment,
)
.strip('\n')
.split('\n')
)
def list_archive(
repository,
repository_path,
storage_config,
local_borg_version,
list_arguments,
@ -99,6 +140,8 @@ def list_archive(
list the files by searching across multiple archives. If neither find_paths nor archive name
are given, instead list the archives in the given repository.
'''
borgmatic.logger.add_custom_log_levels()
if not list_arguments.archive and not list_arguments.find_paths:
if feature.available(feature.Feature.RLIST, local_borg_version):
logger.warning(
@ -106,7 +149,7 @@ def list_archive(
)
rlist_arguments = argparse.Namespace(
repository=repository,
repository=repository_path,
short=list_arguments.short,
format=list_arguments.format,
json=list_arguments.json,
@ -117,7 +160,12 @@ def list_archive(
last=list_arguments.last,
)
return rlist.list_repository(
repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path
repository_path,
storage_config,
local_borg_version,
rlist_arguments,
local_path,
remote_path,
)
if list_arguments.archive:
@ -138,7 +186,7 @@ def list_archive(
# getting a list of archives to search.
if list_arguments.find_paths and not list_arguments.archive:
rlist_arguments = argparse.Namespace(
repository=repository,
repository=repository_path,
short=True,
format=None,
json=None,
@ -153,7 +201,7 @@ def list_archive(
archive_lines = tuple(
execute_command_and_capture_output(
rlist.make_rlist_command(
repository,
repository_path,
storage_config,
local_borg_version,
rlist_arguments,
@ -170,7 +218,7 @@ def list_archive(
# For each archive listed by Borg, run list on the contents of that archive.
for archive in archive_lines:
logger.warning(f'{repository}: Listing archive {archive}')
logger.answer(f'{repository_path}: Listing archive {archive}')
archive_arguments = copy.copy(list_arguments)
archive_arguments.archive = archive
@ -181,7 +229,7 @@ def list_archive(
setattr(archive_arguments, name, None)
main_command = make_list_command(
repository,
repository_path,
storage_config,
local_borg_version,
archive_arguments,
@ -191,7 +239,7 @@ def list_archive(
execute_command(
main_command,
output_log_level=logging.WARNING,
output_log_level=logging.ANSWER,
borg_local_path=local_path,
extra_environment=borg_environment,
)

View File

@ -7,7 +7,7 @@ logger = logging.getLogger(__name__)
def mount_archive(
repository,
repository_path,
archive,
mount_point,
paths,
@ -38,7 +38,7 @@ def mount_archive(
+ (('-o', options) if options else ())
+ (
(
flags.make_repository_flags(repository, local_borg_version)
flags.make_repository_flags(repository_path, local_borg_version)
+ (
('--match-archives', archive)
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
@ -47,9 +47,9 @@ def mount_archive(
)
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
else (
flags.make_repository_archive_flags(repository, archive, local_borg_version)
flags.make_repository_archive_flags(repository_path, archive, local_borg_version)
if archive
else flags.make_repository_flags(repository, local_borg_version)
else flags.make_repository_flags(repository_path, local_borg_version)
)
)
+ (mount_point,)

View File

@ -1,5 +1,6 @@
import logging
import borgmatic.logger
from borgmatic.borg import environment, feature, flags
from borgmatic.execute import execute_command
@ -23,7 +24,7 @@ def make_prune_flags(retention_config, local_borg_version):
)
'''
config = retention_config.copy()
prefix = config.pop('prefix', '{hostname}-')
prefix = config.pop('prefix', '{hostname}-') # noqa: FS003
if prefix:
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
@ -38,7 +39,7 @@ def make_prune_flags(retention_config, local_borg_version):
def prune_archives(
dry_run,
repository,
repository_path,
storage_config,
retention_config,
local_borg_version,
@ -52,6 +53,7 @@ def prune_archives(
retention config dict, prune Borg archives according to the retention policy specified in that
configuration.
'''
borgmatic.logger.add_custom_log_levels()
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '')
@ -72,11 +74,11 @@ def prune_archives(
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ (('--dry-run',) if dry_run else ())
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ flags.make_repository_flags(repository, local_borg_version)
+ flags.make_repository_flags(repository_path, local_borg_version)
)
if (stats or list_archives) and logger.getEffectiveLevel() == logging.WARNING:
output_log_level = logging.WARNING
if stats or list_archives:
output_log_level = logging.ANSWER
else:
output_log_level = logging.INFO

View File

@ -13,7 +13,7 @@ RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2
def create_repository(
dry_run,
repository,
repository_path,
storage_config,
local_borg_version,
encryption_mode,
@ -33,14 +33,14 @@ def create_repository(
'''
try:
rinfo.display_repository_info(
repository,
repository_path,
storage_config,
local_borg_version,
argparse.Namespace(json=True),
local_path,
remote_path,
)
logger.info(f'{repository}: Repository already exists. Skipping creation.')
logger.info(f'{repository_path}: Repository already exists. Skipping creation.')
return
except subprocess.CalledProcessError as error:
if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
@ -65,11 +65,11 @@ def create_repository(
+ (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
+ (('--remote-path', remote_path) if remote_path else ())
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ flags.make_repository_flags(repository, local_borg_version)
+ flags.make_repository_flags(repository_path, local_borg_version)
)
if dry_run:
logging.info(f'{repository}: Skipping repository creation (dry run)')
logging.info(f'{repository_path}: Skipping repository creation (dry run)')
return
# Do not capture output here, so as to support interactive prompts.

View File

@ -1,5 +1,6 @@
import logging
import borgmatic.logger
from borgmatic.borg import environment, feature, flags
from borgmatic.execute import execute_command, execute_command_and_capture_output
@ -7,7 +8,7 @@ logger = logging.getLogger(__name__)
def display_repository_info(
repository,
repository_path,
storage_config,
local_borg_version,
rinfo_arguments,
@ -19,6 +20,7 @@ def display_repository_info(
arguments to the rinfo action, display summary information for the Borg repository or return
JSON summary information.
'''
borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None)
full_command = (
@ -41,7 +43,7 @@ def display_repository_info(
+ flags.make_flags('remote-path', remote_path)
+ flags.make_flags('lock-wait', lock_wait)
+ (('--json',) if rinfo_arguments.json else ())
+ flags.make_repository_flags(repository, local_borg_version)
+ flags.make_repository_flags(repository_path, local_borg_version)
)
extra_environment = environment.make_environment(storage_config)
@ -53,7 +55,7 @@ def display_repository_info(
else:
execute_command(
full_command,
output_log_level=logging.WARNING,
output_log_level=logging.ANSWER,
borg_local_path=local_path,
extra_environment=extra_environment,
)

View File

@ -1,5 +1,6 @@
import logging
import borgmatic.logger
from borgmatic.borg import environment, feature, flags
from borgmatic.execute import execute_command, execute_command_and_capture_output
@ -7,7 +8,12 @@ logger = logging.getLogger(__name__)
def resolve_archive_name(
repository, archive, storage_config, local_borg_version, local_path='borg', remote_path=None
repository_path,
archive,
storage_config,
local_borg_version,
local_path='borg',
remote_path=None,
):
'''
Given a local or remote repository path, an archive name, a storage config dict, a local Borg
@ -16,7 +22,7 @@ def resolve_archive_name(
Raise ValueError if "latest" is given but there are no archives in the repository.
'''
if archive != "latest":
if archive != 'latest':
return archive
lock_wait = storage_config.get('lock_wait', None)
@ -30,7 +36,7 @@ def resolve_archive_name(
+ flags.make_flags('lock-wait', lock_wait)
+ flags.make_flags('last', 1)
+ ('--short',)
+ flags.make_repository_flags(repository, local_borg_version)
+ flags.make_repository_flags(repository_path, local_borg_version)
)
output = execute_command_and_capture_output(
@ -41,7 +47,7 @@ def resolve_archive_name(
except IndexError:
raise ValueError('No archives found in the repository')
logger.debug('{}: Latest archive is {}'.format(repository, latest_archive))
logger.debug(f'{repository_path}: Latest archive is {latest_archive}')
return latest_archive
@ -50,7 +56,7 @@ MAKE_FLAGS_EXCLUDES = ('repository', 'prefix')
def make_rlist_command(
repository,
repository_path,
storage_config,
local_borg_version,
rlist_arguments,
@ -91,12 +97,12 @@ def make_rlist_command(
else ()
)
+ flags.make_flags_from_arguments(rlist_arguments, excludes=MAKE_FLAGS_EXCLUDES)
+ flags.make_repository_flags(repository, local_borg_version)
+ flags.make_repository_flags(repository_path, local_borg_version)
)
def list_repository(
repository,
repository_path,
storage_config,
local_borg_version,
rlist_arguments,
@ -108,18 +114,24 @@ def list_repository(
arguments to the list action, and local and remote Borg paths, display the output of listing
Borg archives in the given repository (or return JSON output).
'''
borgmatic.logger.add_custom_log_levels()
borg_environment = environment.make_environment(storage_config)
main_command = make_rlist_command(
repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path
repository_path,
storage_config,
local_borg_version,
rlist_arguments,
local_path,
remote_path,
)
if rlist_arguments.json:
return execute_command_and_capture_output(main_command, extra_environment=borg_environment,)
return execute_command_and_capture_output(main_command, extra_environment=borg_environment)
else:
execute_command(
main_command,
output_log_level=logging.WARNING,
output_log_level=logging.ANSWER,
borg_local_path=local_path,
extra_environment=borg_environment,
)

View File

@ -1,14 +1,15 @@
import logging
import borgmatic.logger
from borgmatic.borg import environment, flags
from borgmatic.execute import execute_command
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
logger = logging.getLogger(__name__)
def transfer_archives(
dry_run,
repository,
repository_path,
storage_config,
local_borg_version,
transfer_arguments,
@ -19,12 +20,15 @@ def transfer_archives(
Given a dry-run flag, a local or remote repository path, a storage config dict, the local Borg
version, and the arguments to the transfer action, transfer archives to the given repository.
'''
borgmatic.logger.add_custom_log_levels()
full_command = (
(local_path, 'transfer')
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ flags.make_flags('remote-path', remote_path)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait', None))
+ (('--progress',) if transfer_arguments.progress else ())
+ (
flags.make_flags(
'match-archives', transfer_arguments.match_archives or transfer_arguments.archive
@ -34,14 +38,15 @@ def transfer_archives(
transfer_arguments,
excludes=('repository', 'source_repository', 'archive', 'match_archives'),
)
+ flags.make_repository_flags(repository, local_borg_version)
+ flags.make_repository_flags(repository_path, local_borg_version)
+ flags.make_flags('other-repo', transfer_arguments.source_repository)
+ flags.make_flags('dry-run', dry_run)
)
return execute_command(
full_command,
output_log_level=logging.WARNING,
output_log_level=logging.ANSWER,
output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None,
borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config),
)

View File

@ -46,11 +46,12 @@ def parse_subparser_arguments(unparsed_arguments, subparsers):
if 'borg' in unparsed_arguments:
subparsers = {'borg': subparsers['borg']}
for subparser_name, subparser in subparsers.items():
if subparser_name not in remaining_arguments:
continue
for argument in remaining_arguments:
canonical_name = alias_to_subparser_name.get(argument, argument)
subparser = subparsers.get(canonical_name)
canonical_name = alias_to_subparser_name.get(subparser_name, subparser_name)
if not subparser:
continue
# If a parsed value happens to be the same as the name of a subparser, remove it from the
# remaining arguments. This prevents, for instance, "check --only extract" from triggering
@ -67,9 +68,9 @@ def parse_subparser_arguments(unparsed_arguments, subparsers):
arguments[canonical_name] = parsed
# If no actions are explicitly requested, assume defaults: prune, compact, create, and check.
# If no actions are explicitly requested, assume defaults.
if not arguments and '--help' not in unparsed_arguments and '-h' not in unparsed_arguments:
for subparser_name in ('prune', 'compact', 'create', 'check'):
for subparser_name in ('create', 'prune', 'compact', 'check'):
subparser = subparsers[subparser_name]
parsed, unused_remaining = subparser.parse_known_args(unparsed_arguments)
arguments[subparser_name] = parsed
@ -130,9 +131,7 @@ def make_parsers():
nargs='*',
dest='config_paths',
default=config_paths,
help='Configuration filenames or directories, defaults to: {}'.format(
' '.join(unexpanded_config_paths)
),
help=f"Configuration filenames or directories, defaults to: {' '.join(unexpanded_config_paths)}",
)
global_group.add_argument(
'--excludes',
@ -215,7 +214,7 @@ def make_parsers():
top_level_parser = ArgumentParser(
description='''
Simple, configuration-driven backup software for servers and workstations. If none of
the action options are given, then borgmatic defaults to: prune, compact, create, and
the action options are given, then borgmatic defaults to: create, prune, compact, and
check.
''',
parents=[global_parser],
@ -224,7 +223,7 @@ def make_parsers():
subparsers = top_level_parser.add_subparsers(
title='actions',
metavar='',
help='Specify zero or more actions. Defaults to prune, compact, create, and check. Use --help with action for details:',
help='Specify zero or more actions. Defaults to create, prune, compact, and check. Use --help with action for details:',
)
rcreate_parser = subparsers.add_parser(
'rcreate',
@ -247,6 +246,10 @@ def make_parsers():
metavar='KEY_REPOSITORY',
help='Path to an existing Borg repository whose key material should be reused (Borg 2.x+ only)',
)
rcreate_group.add_argument(
'--repository',
help='Path of the new repository to create (must be already specified in a borgmatic configuration file), defaults to the configured repository if there is only one',
)
rcreate_group.add_argument(
'--copy-crypt-key',
action='store_true',
@ -292,6 +295,12 @@ def make_parsers():
'--upgrader',
help='Upgrader type used to convert the transfered data, e.g. "From12To20" to upgrade data from Borg 1.2 to 2.0 format, defaults to no conversion',
)
transfer_group.add_argument(
'--progress',
default=False,
action='store_true',
help='Display progress as each archive is transferred',
)
transfer_group.add_argument(
'-a',
'--match-archives',
@ -322,6 +331,10 @@ def make_parsers():
add_help=False,
)
prune_group = prune_parser.add_argument_group('prune arguments')
prune_group.add_argument(
'--repository',
help='Path of specific existing repository to prune (must be already specified in a borgmatic configuration file)',
)
prune_group.add_argument(
'--stats',
dest='stats',
@ -337,11 +350,15 @@ def make_parsers():
compact_parser = subparsers.add_parser(
'compact',
aliases=SUBPARSER_ALIASES['compact'],
help='Compact segments to free space (Borg 1.2+ only)',
description='Compact segments to free space (Borg 1.2+ only)',
help='Compact segments to free space (Borg 1.2+, borgmatic 1.5.23+ only)',
description='Compact segments to free space (Borg 1.2+, borgmatic 1.5.23+ only)',
add_help=False,
)
compact_group = compact_parser.add_argument_group('compact arguments')
compact_group.add_argument(
'--repository',
help='Path of specific existing repository to compact (must be already specified in a borgmatic configuration file)',
)
compact_group.add_argument(
'--progress',
dest='progress',
@ -374,6 +391,10 @@ def make_parsers():
add_help=False,
)
create_group = create_parser.add_argument_group('create arguments')
create_group.add_argument(
'--repository',
help='Path of specific existing repository to backup to (must be already specified in a borgmatic configuration file)',
)
create_group.add_argument(
'--progress',
dest='progress',
@ -404,6 +425,10 @@ def make_parsers():
add_help=False,
)
check_group = check_parser.add_argument_group('check arguments')
check_group.add_argument(
'--repository',
help='Path of specific existing repository to check (must be already specified in a borgmatic configuration file)',
)
check_group.add_argument(
'--progress',
dest='progress',
@ -465,10 +490,9 @@ def make_parsers():
)
extract_group.add_argument(
'--strip-components',
type=int,
type=lambda number: number if number == 'all' else int(number),
metavar='NUMBER',
dest='strip_components',
help='Number of leading path components to remove from each extracted path. Skip paths with fewer elements',
help='Number of leading path components to remove from each extracted path or "all" to strip all leading path components. Skip paths with fewer elements',
)
extract_group.add_argument(
'--progress',
@ -601,7 +625,7 @@ def make_parsers():
metavar='NAME',
nargs='+',
dest='databases',
help='Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic\'s configuration',
help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration",
)
restore_group.add_argument(
'-h', '--help', action='help', help='Show this help message and exit'
@ -795,7 +819,7 @@ def make_parsers():
'borg',
aliases=SUBPARSER_ALIASES['borg'],
help='Run an arbitrary Borg command',
description='Run an arbitrary Borg command based on borgmatic\'s configuration',
description="Run an arbitrary Borg command based on borgmatic's configuration",
add_help=False,
)
borg_group = borg_parser.add_argument_group('borg arguments')
@ -833,6 +857,11 @@ def parse_arguments(*unparsed_arguments):
'The --excludes flag has been replaced with exclude_patterns in configuration.'
)
if 'create' in arguments and arguments['create'].list_files and arguments['create'].progress:
raise ValueError(
'With the create action, only one of --list (--files) and --progress flags can be used.'
)
if (
('list' in arguments and 'rinfo' in arguments and arguments['list'].json)
or ('list' in arguments and 'info' in arguments and arguments['list'].json)

View File

@ -1,5 +1,4 @@
import collections
import copy
import json
import logging
import os
@ -11,29 +10,30 @@ from subprocess import CalledProcessError
import colorama
import pkg_resources
import borgmatic.actions.borg
import borgmatic.actions.break_lock
import borgmatic.actions.check
import borgmatic.actions.compact
import borgmatic.actions.create
import borgmatic.actions.export_tar
import borgmatic.actions.extract
import borgmatic.actions.info
import borgmatic.actions.list
import borgmatic.actions.mount
import borgmatic.actions.prune
import borgmatic.actions.rcreate
import borgmatic.actions.restore
import borgmatic.actions.rinfo
import borgmatic.actions.rlist
import borgmatic.actions.transfer
import borgmatic.commands.completion
from borgmatic.borg import borg as borg_borg
from borgmatic.borg import break_lock as borg_break_lock
from borgmatic.borg import check as borg_check
from borgmatic.borg import compact as borg_compact
from borgmatic.borg import create as borg_create
from borgmatic.borg import export_tar as borg_export_tar
from borgmatic.borg import extract as borg_extract
from borgmatic.borg import feature as borg_feature
from borgmatic.borg import info as borg_info
from borgmatic.borg import list as borg_list
from borgmatic.borg import mount as borg_mount
from borgmatic.borg import prune as borg_prune
from borgmatic.borg import rcreate as borg_rcreate
from borgmatic.borg import rinfo as borg_rinfo
from borgmatic.borg import rlist as borg_rlist
from borgmatic.borg import transfer as borg_transfer
from borgmatic.borg import umount as borg_umount
from borgmatic.borg import version as borg_version
from borgmatic.commands import warning
from borgmatic.commands.arguments import parse_arguments
from borgmatic.config import checks, collect, convert, validate
from borgmatic.hooks import command, dispatch, dump, monitor
from borgmatic.logger import configure_logging, should_do_markup
from borgmatic.hooks import command, dispatch, monitor
from borgmatic.logger import add_custom_log_levels, configure_logging, should_do_markup
from borgmatic.signals import configure_signals
from borgmatic.verbosity import verbosity_to_log_level
@ -45,8 +45,8 @@ LEGACY_CONFIG_PATH = '/etc/borgmatic/config'
def run_configuration(config_filename, config, arguments):
'''
Given a config filename, the corresponding parsed config dict, and command-line arguments as a
dict from subparser name to a namespace of parsed arguments, execute the defined prune, compact,
create, check, and/or other actions.
dict from subparser name to a namespace of parsed arguments, execute the defined create, prune,
compact, check, and/or other actions.
Yield a combination of:
@ -65,15 +65,13 @@ def run_configuration(config_filename, config, arguments):
retry_wait = storage.get('retry_wait', 0)
encountered_error = None
error_repository = ''
using_primary_action = {'prune', 'compact', 'create', 'check'}.intersection(arguments)
using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments)
monitoring_log_level = verbosity_to_log_level(global_arguments.monitoring_verbosity)
try:
local_borg_version = borg_version.local_borg_version(storage, local_path)
except (OSError, CalledProcessError, ValueError) as error:
yield from log_error_records(
'{}: Error getting local Borg version'.format(config_filename), error
)
yield from log_error_records(f'{config_filename}: Error getting local Borg version', error)
return
try:
@ -101,7 +99,7 @@ def run_configuration(config_filename, config, arguments):
return
encountered_error = error
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
yield from log_error_records(f'{config_filename}: Error pinging monitor', error)
if not encountered_error:
repo_queue = Queue()
@ -109,7 +107,8 @@ def run_configuration(config_filename, config, arguments):
repo_queue.put((repo, 0),)
while not repo_queue.empty():
repository_path, retry_num = repo_queue.get()
repository, retry_num = repo_queue.get()
logger.debug(f'{repository["path"]}: Running actions for repository')
timeout = retry_num * retry_wait
if timeout:
logger.warning(f'{config_filename}: Sleeping {timeout}s before next retry')
@ -126,14 +125,14 @@ def run_configuration(config_filename, config, arguments):
local_path=local_path,
remote_path=remote_path,
local_borg_version=local_borg_version,
repository_path=repository_path,
repository=repository,
)
except (OSError, CalledProcessError, ValueError) as error:
if retry_num < retries:
repo_queue.put((repository_path, retry_num + 1),)
repo_queue.put((repository, retry_num + 1),)
tuple( # Consume the generator so as to trigger logging.
log_error_records(
'{}: Error running actions for repository'.format(repository_path),
'{}: Error running actions for repository'.format(repository['path']),
error,
levelno=logging.WARNING,
log_command_error_output=True,
@ -148,10 +147,39 @@ def run_configuration(config_filename, config, arguments):
return
yield from log_error_records(
'{}: Error running actions for repository'.format(repository_path), error
'{}: Error running actions for repository'.format(repository['path']), error
)
encountered_error = error
error_repository = repository_path
error_repository = repository['path']
except warning.BorgmaticWarning:
dispatch.call_hooks(
'ping_monitor',
hooks,
config_filename,
monitor.MONITOR_HOOK_NAMES,
monitor.State.WARN,
monitoring_log_level,
global_arguments.dry_run,
)
try:
if using_primary_action:
# send logs irrespective of error
dispatch.call_hooks(
'ping_monitor',
hooks,
config_filename,
monitor.MONITOR_HOOK_NAMES,
monitor.State.LOG,
monitoring_log_level,
global_arguments.dry_run,
)
except (OSError, CalledProcessError) as error:
if command.considered_soft_failure(config_filename, error):
return
encountered_error = error
yield from log_error_records(f'{repository["path"]}: Error pinging monitor', error)
if not encountered_error:
try:
@ -178,7 +206,7 @@ def run_configuration(config_filename, config, arguments):
return
encountered_error = error
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
yield from log_error_records(f'{config_filename}: Error pinging monitor', error)
if encountered_error and using_primary_action:
try:
@ -213,9 +241,7 @@ def run_configuration(config_filename, config, arguments):
if command.considered_soft_failure(config_filename, error):
return
yield from log_error_records(
'{}: Error running on-error hook'.format(config_filename), error
)
yield from log_error_records(f'{config_filename}: Error running on-error hook', error)
def run_actions(
@ -230,7 +256,7 @@ def run_actions(
local_path,
remote_path,
local_borg_version,
repository_path,
repository,
):
'''
Given parsed command-line arguments as an argparse.ArgumentParser instance, the configuration
@ -244,13 +270,14 @@ def run_actions(
action or a hook. Raise ValueError if the arguments or configuration passed to action are
invalid.
'''
repository = os.path.expanduser(repository_path)
add_custom_log_levels()
repository_path = os.path.expanduser(repository['path'])
global_arguments = arguments['global']
dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else ''
hook_context = {
'repository': repository_path,
# Deprecated: For backwards compatibility with borgmatic < 1.6.0.
'repositories': ','.join(location['repositories']),
'repositories': ','.join([repo['path'] for repo in location['repositories']]),
}
command.execute_hook(
@ -262,509 +289,161 @@ def run_actions(
**hook_context,
)
if 'rcreate' in arguments:
logger.info('{}: Creating repository'.format(repository))
borg_rcreate.create_repository(
global_arguments.dry_run,
repository,
storage,
local_borg_version,
arguments['rcreate'].encryption_mode,
arguments['rcreate'].source_repository,
arguments['rcreate'].copy_crypt_key,
arguments['rcreate'].append_only,
arguments['rcreate'].storage_quota,
arguments['rcreate'].make_parent_dirs,
local_path=local_path,
remote_path=remote_path,
)
if 'transfer' in arguments:
logger.info(f'{repository}: Transferring archives to repository')
borg_transfer.transfer_archives(
global_arguments.dry_run,
repository,
storage,
local_borg_version,
transfer_arguments=arguments['transfer'],
local_path=local_path,
remote_path=remote_path,
)
if 'prune' in arguments:
command.execute_hook(
hooks.get('before_prune'),
hooks.get('umask'),
config_filename,
'pre-prune',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
borg_prune.prune_archives(
global_arguments.dry_run,
repository,
storage,
retention,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
stats=arguments['prune'].stats,
list_archives=arguments['prune'].list_archives,
)
command.execute_hook(
hooks.get('after_prune'),
hooks.get('umask'),
config_filename,
'post-prune',
global_arguments.dry_run,
**hook_context,
)
if 'compact' in arguments:
command.execute_hook(
hooks.get('before_compact'),
hooks.get('umask'),
config_filename,
'pre-compact',
global_arguments.dry_run,
)
if borg_feature.available(borg_feature.Feature.COMPACT, local_borg_version):
logger.info('{}: Compacting segments{}'.format(repository, dry_run_label))
borg_compact.compact_segments(
global_arguments.dry_run,
for (action_name, action_arguments) in arguments.items():
if action_name == 'rcreate':
borgmatic.actions.rcreate.run_rcreate(
repository,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=arguments['compact'].progress,
cleanup_commits=arguments['compact'].cleanup_commits,
threshold=arguments['compact'].threshold,
action_arguments,
global_arguments,
local_path,
remote_path,
)
else: # pragma: nocover
logger.info(
'{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository)
)
command.execute_hook(
hooks.get('after_compact'),
hooks.get('umask'),
config_filename,
'post-compact',
global_arguments.dry_run,
)
if 'create' in arguments:
command.execute_hook(
hooks.get('before_backup'),
hooks.get('umask'),
config_filename,
'pre-backup',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
active_dumps = dispatch.call_hooks(
'dump_databases',
hooks,
repository,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
stream_processes = [process for processes in active_dumps.values() for process in processes]
json_output = borg_create.create_archive(
global_arguments.dry_run,
repository,
location,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=arguments['create'].progress,
stats=arguments['create'].stats,
json=arguments['create'].json,
list_files=arguments['create'].list_files,
stream_processes=stream_processes,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
config_filename,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
command.execute_hook(
hooks.get('after_backup'),
hooks.get('umask'),
config_filename,
'post-backup',
global_arguments.dry_run,
**hook_context,
)
if 'check' in arguments and checks.repository_enabled_for_checks(repository, consistency):
command.execute_hook(
hooks.get('before_check'),
hooks.get('umask'),
config_filename,
'pre-check',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Running consistency checks'.format(repository))
borg_check.check_archives(
repository,
location,
storage,
consistency,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=arguments['check'].progress,
repair=arguments['check'].repair,
only_checks=arguments['check'].only,
force=arguments['check'].force,
)
command.execute_hook(
hooks.get('after_check'),
hooks.get('umask'),
config_filename,
'post-check',
global_arguments.dry_run,
**hook_context,
)
if 'extract' in arguments:
command.execute_hook(
hooks.get('before_extract'),
hooks.get('umask'),
config_filename,
'pre-extract',
global_arguments.dry_run,
**hook_context,
)
if arguments['extract'].repository is None or validate.repositories_match(
repository, arguments['extract'].repository
):
logger.info(
'{}: Extracting archive {}'.format(repository, arguments['extract'].archive)
)
borg_extract.extract_archive(
global_arguments.dry_run,
elif action_name == 'transfer':
borgmatic.actions.transfer.run_transfer(
repository,
storage,
local_borg_version,
action_arguments,
global_arguments,
local_path,
remote_path,
)
elif action_name == 'create':
yield from borgmatic.actions.create.run_create(
config_filename,
repository,
borg_rlist.resolve_archive_name(
repository,
arguments['extract'].archive,
storage,
local_borg_version,
local_path,
remote_path,
),
arguments['extract'].paths,
location,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
destination_path=arguments['extract'].destination,
strip_components=arguments['extract'].strip_components,
progress=arguments['extract'].progress,
)
command.execute_hook(
hooks.get('after_extract'),
hooks.get('umask'),
config_filename,
'post-extract',
global_arguments.dry_run,
**hook_context,
)
if 'export-tar' in arguments:
if arguments['export-tar'].repository is None or validate.repositories_match(
repository, arguments['export-tar'].repository
):
logger.info(
'{}: Exporting archive {} as tar file'.format(
repository, arguments['export-tar'].archive
)
)
borg_export_tar.export_tar_archive(
global_arguments.dry_run,
repository,
borg_rlist.resolve_archive_name(
repository,
arguments['export-tar'].archive,
storage,
local_borg_version,
local_path,
remote_path,
),
arguments['export-tar'].paths,
arguments['export-tar'].destination,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
tar_filter=arguments['export-tar'].tar_filter,
list_files=arguments['export-tar'].list_files,
strip_components=arguments['export-tar'].strip_components,
)
if 'mount' in arguments:
if arguments['mount'].repository is None or validate.repositories_match(
repository, arguments['mount'].repository
):
if arguments['mount'].archive:
logger.info(
'{}: Mounting archive {}'.format(repository, arguments['mount'].archive)
)
else: # pragma: nocover
logger.info('{}: Mounting repository'.format(repository))
borg_mount.mount_archive(
repository,
borg_rlist.resolve_archive_name(
repository,
arguments['mount'].archive,
storage,
local_borg_version,
local_path,
remote_path,
),
arguments['mount'].mount_point,
arguments['mount'].paths,
arguments['mount'].foreground,
arguments['mount'].options,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
)
if 'restore' in arguments: # pragma: nocover
if arguments['restore'].repository is None or validate.repositories_match(
repository, arguments['restore'].repository
):
logger.info(
'{}: Restoring databases from archive {}'.format(
repository, arguments['restore'].archive
)
)
dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
restore_names = arguments['restore'].databases or []
if 'all' in restore_names:
restore_names = []
archive_name = borg_rlist.resolve_archive_name(
repository,
arguments['restore'].archive,
storage,
hook_context,
local_borg_version,
action_arguments,
global_arguments,
dry_run_label,
local_path,
remote_path,
)
found_names = set()
for hook_name, per_hook_restore_databases in hooks.items():
if hook_name not in dump.DATABASE_HOOK_NAMES:
continue
for restore_database in per_hook_restore_databases:
database_name = restore_database['name']
if restore_names and database_name not in restore_names:
continue
found_names.add(database_name)
dump_pattern = dispatch.call_hooks(
'make_database_dump_pattern',
hooks,
repository,
dump.DATABASE_HOOK_NAMES,
location,
database_name,
)[hook_name]
# Kick off a single database extract to stdout.
extract_process = borg_extract.extract_archive(
dry_run=global_arguments.dry_run,
repository=repository,
archive=archive_name,
paths=dump.convert_glob_patterns_to_borg_patterns([dump_pattern]),
location_config=location,
storage_config=storage,
local_borg_version=local_borg_version,
local_path=local_path,
remote_path=remote_path,
destination_path='/',
# A directory format dump isn't a single file, and therefore can't extract
# to stdout. In this case, the extract_process return value is None.
extract_to_stdout=bool(restore_database.get('format') != 'directory'),
)
# Run a single database restore, consuming the extract stdout (if any).
dispatch.call_hooks(
'restore_database_dump',
{hook_name: [restore_database]},
repository,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
extract_process,
)
dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
elif action_name == 'prune':
borgmatic.actions.prune.run_prune(
config_filename,
repository,
storage,
retention,
hooks,
repository,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
hook_context,
local_borg_version,
action_arguments,
global_arguments,
dry_run_label,
local_path,
remote_path,
)
if not restore_names and not found_names:
raise ValueError('No databases were found to restore')
missing_names = sorted(set(restore_names) - found_names)
if missing_names:
raise ValueError(
'Cannot restore database(s) {} missing from borgmatic\'s configuration'.format(
', '.join(missing_names)
)
elif action_name == 'compact':
borgmatic.actions.compact.run_compact(
config_filename,
repository,
storage,
retention,
hooks,
hook_context,
local_borg_version,
action_arguments,
global_arguments,
dry_run_label,
local_path,
remote_path,
)
elif action_name == 'check':
if checks.repository_enabled_for_checks(repository, consistency):
borgmatic.actions.check.run_check(
config_filename,
repository,
location,
storage,
consistency,
hooks,
hook_context,
local_borg_version,
action_arguments,
global_arguments,
local_path,
remote_path,
)
if 'rlist' in arguments:
if arguments['rlist'].repository is None or validate.repositories_match(
repository, arguments['rlist'].repository
):
rlist_arguments = copy.copy(arguments['rlist'])
if not rlist_arguments.json: # pragma: nocover
logger.warning('{}: Listing repository'.format(repository))
json_output = borg_rlist.list_repository(
elif action_name == 'extract':
borgmatic.actions.extract.run_extract(
config_filename,
repository,
location,
storage,
hooks,
hook_context,
local_borg_version,
rlist_arguments=rlist_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
if 'list' in arguments:
if arguments['list'].repository is None or validate.repositories_match(
repository, arguments['list'].repository
):
list_arguments = copy.copy(arguments['list'])
if not list_arguments.json: # pragma: nocover
if list_arguments.find_paths:
logger.warning('{}: Searching archives'.format(repository))
elif not list_arguments.archive:
logger.warning('{}: Listing archives'.format(repository))
list_arguments.archive = borg_rlist.resolve_archive_name(
repository,
list_arguments.archive,
storage,
local_borg_version,
action_arguments,
global_arguments,
local_path,
remote_path,
)
json_output = borg_list.list_archive(
elif action_name == 'export-tar':
borgmatic.actions.export_tar.run_export_tar(
repository,
storage,
local_borg_version,
list_arguments=list_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
if 'rinfo' in arguments:
if arguments['rinfo'].repository is None or validate.repositories_match(
repository, arguments['rinfo'].repository
):
rinfo_arguments = copy.copy(arguments['rinfo'])
if not rinfo_arguments.json: # pragma: nocover
logger.warning('{}: Displaying repository summary information'.format(repository))
json_output = borg_rinfo.display_repository_info(
repository,
storage,
local_borg_version,
rinfo_arguments=rinfo_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
if 'info' in arguments:
if arguments['info'].repository is None or validate.repositories_match(
repository, arguments['info'].repository
):
info_arguments = copy.copy(arguments['info'])
if not info_arguments.json: # pragma: nocover
logger.warning('{}: Displaying archive summary information'.format(repository))
info_arguments.archive = borg_rlist.resolve_archive_name(
repository,
info_arguments.archive,
storage,
local_borg_version,
action_arguments,
global_arguments,
local_path,
remote_path,
)
json_output = borg_info.display_archives_info(
elif action_name == 'mount':
borgmatic.actions.mount.run_mount(
repository,
storage,
local_borg_version,
info_arguments=info_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
if 'break-lock' in arguments:
if arguments['break-lock'].repository is None or validate.repositories_match(
repository, arguments['break-lock'].repository
):
logger.warning(f'{repository}: Breaking repository and cache locks')
borg_break_lock.break_lock(
repository,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
)
if 'borg' in arguments:
if arguments['borg'].repository is None or validate.repositories_match(
repository, arguments['borg'].repository
):
logger.warning('{}: Running arbitrary Borg command'.format(repository))
archive_name = borg_rlist.resolve_archive_name(
repository,
arguments['borg'].archive,
storage,
local_borg_version,
arguments['mount'],
local_path,
remote_path,
)
borg_borg.run_arbitrary_borg(
elif action_name == 'restore':
borgmatic.actions.restore.run_restore(
repository,
location,
storage,
hooks,
local_borg_version,
action_arguments,
global_arguments,
local_path,
remote_path,
)
elif action_name == 'rlist':
yield from borgmatic.actions.rlist.run_rlist(
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
)
elif action_name == 'list':
yield from borgmatic.actions.list.run_list(
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
)
elif action_name == 'rinfo':
yield from borgmatic.actions.rinfo.run_rinfo(
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
)
elif action_name == 'info':
yield from borgmatic.actions.info.run_info(
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
)
elif action_name == 'break-lock':
borgmatic.actions.break_lock.run_break_lock(
repository,
storage,
local_borg_version,
options=arguments['borg'].options,
archive=archive_name,
local_path=local_path,
remote_path=remote_path,
arguments['break-lock'],
local_path,
remote_path,
)
elif action_name == 'borg':
borgmatic.actions.borg.run_borg(
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
)
command.execute_hook(
@ -801,9 +480,7 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True):
dict(
levelno=logging.WARNING,
levelname='WARNING',
msg='{}: Insufficient permissions to read configuration file'.format(
config_filename
),
msg=f'{config_filename}: Insufficient permissions to read configuration file',
)
),
]
@ -815,7 +492,7 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True):
dict(
levelno=logging.CRITICAL,
levelname='CRITICAL',
msg='{}: Error parsing configuration file'.format(config_filename),
msg=f'{config_filename}: Error parsing configuration file',
)
),
logging.makeLogRecord(
@ -916,9 +593,7 @@ def collect_configuration_run_summary_logs(configs, arguments):
if not configs:
yield from log_error_records(
'{}: No valid configuration files found'.format(
' '.join(arguments['global'].config_paths)
)
f"{' '.join(arguments['global'].config_paths)}: No valid configuration files found",
)
return
@ -944,23 +619,21 @@ def collect_configuration_run_summary_logs(configs, arguments):
error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
if error_logs:
yield from log_error_records(
'{}: Error running configuration file'.format(config_filename)
)
yield from log_error_records(f'{config_filename}: An error occurred')
yield from error_logs
else:
yield logging.makeLogRecord(
dict(
levelno=logging.INFO,
levelname='INFO',
msg='{}: Successfully ran configuration file'.format(config_filename),
msg=f'{config_filename}: Successfully ran configuration file',
)
)
if results:
json_results.extend(results)
if 'umount' in arguments:
logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point))
logger.info(f"Unmounting mount point {arguments['umount'].mount_point}")
try:
borg_umount.unmount_archive(
mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs),
@ -1008,7 +681,7 @@ def main(): # pragma: no cover
if error.code == 0:
raise error
configure_logging(logging.CRITICAL)
logger.critical('Error parsing arguments: {}'.format(' '.join(sys.argv)))
logger.critical(f"Error parsing arguments: {' '.join(sys.argv)}")
exit_with_help_link()
global_arguments = arguments['global']
@ -1041,7 +714,7 @@ def main(): # pragma: no cover
)
except (FileNotFoundError, PermissionError) as error:
configure_logging(logging.CRITICAL)
logger.critical('Error configuring logging: {}'.format(error))
logger.critical(f'Error configuring logging: {error}')
exit_with_help_link()
logger.debug('Ensuring legacy configuration is upgraded')

View File

@ -34,7 +34,7 @@ def bash_completion():
' local this_script="$(cat "$BASH_SOURCE" 2> /dev/null)"',
' local installed_script="$(borgmatic --bash-completion 2> /dev/null)"',
' if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ];'
' then cat << EOF\n%s\nEOF' % UPGRADE_MESSAGE,
f' then cat << EOF\n{UPGRADE_MESSAGE}\nEOF',
' fi',
'}',
'complete_borgmatic() {',
@ -48,7 +48,7 @@ def bash_completion():
for action, subparser in subparsers.choices.items()
)
+ (
' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))'
' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))' # noqa: FS003
% (actions, global_flags),
' (check_version &)',
'}',

View File

@ -28,9 +28,7 @@ def parse_arguments(*arguments):
'--source-config',
dest='source_config_filename',
default=DEFAULT_SOURCE_CONFIG_FILENAME,
help='Source INI-style configuration filename. Default: {}'.format(
DEFAULT_SOURCE_CONFIG_FILENAME
),
help=f'Source INI-style configuration filename. Default: {DEFAULT_SOURCE_CONFIG_FILENAME}',
)
parser.add_argument(
'-e',
@ -46,9 +44,7 @@ def parse_arguments(*arguments):
'--destination-config',
dest='destination_config_filename',
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
help='Destination YAML configuration filename. Default: {}'.format(
DEFAULT_DESTINATION_CONFIG_FILENAME
),
help=f'Destination YAML configuration filename. Default: {DEFAULT_DESTINATION_CONFIG_FILENAME}',
)
return parser.parse_args(arguments)
@ -59,19 +55,15 @@ TEXT_WRAP_CHARACTERS = 80
def display_result(args): # pragma: no cover
result_lines = textwrap.wrap(
'Your borgmatic configuration has been upgraded. Please review the result in {}.'.format(
args.destination_config_filename
),
f'Your borgmatic configuration has been upgraded. Please review the result in {args.destination_config_filename}.',
TEXT_WRAP_CHARACTERS,
)
excludes_phrase = (
f' and {args.source_excludes_filename}' if args.source_excludes_filename else ''
)
delete_lines = textwrap.wrap(
'Once you are satisfied, you can safely delete {}{}.'.format(
args.source_config_filename,
' and {}'.format(args.source_excludes_filename)
if args.source_excludes_filename
else '',
),
f'Once you are satisfied, you can safely delete {args.source_config_filename}{excludes_phrase}.',
TEXT_WRAP_CHARACTERS,
)

View File

@ -23,9 +23,7 @@ def parse_arguments(*arguments):
'--destination',
dest='destination_filename',
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
help='Destination YAML configuration file, default: {}'.format(
DEFAULT_DESTINATION_CONFIG_FILENAME
),
help=f'Destination YAML configuration file, default: {DEFAULT_DESTINATION_CONFIG_FILENAME}',
)
parser.add_argument(
'--overwrite',
@ -48,17 +46,13 @@ def main(): # pragma: no cover
overwrite=args.overwrite,
)
print('Generated a sample configuration file at {}.'.format(args.destination_filename))
print(f'Generated a sample configuration file at {args.destination_filename}.')
print()
if args.source_filename:
print(
'Merged in the contents of configuration file at {}.'.format(args.source_filename)
)
print(f'Merged in the contents of configuration file at {args.source_filename}.')
print('To review the changes made, run:')
print()
print(
' diff --unified {} {}'.format(args.source_filename, args.destination_filename)
)
print(f' diff --unified {args.source_filename} {args.destination_filename}')
print()
print('This includes all available configuration options with example values. The few')
print('required options are indicated. Please edit the file to suit your needs.')

View File

@ -21,9 +21,7 @@ def parse_arguments(*arguments):
nargs='+',
dest='config_paths',
default=config_paths,
help='Configuration filenames or directories, defaults to: {}'.format(
' '.join(config_paths)
),
help=f'Configuration filenames or directories, defaults to: {config_paths}',
)
return parser.parse_args(arguments)
@ -44,13 +42,11 @@ def main(): # pragma: no cover
try:
validate.parse_configuration(config_filename, validate.schema_filename())
except (ValueError, OSError, validate.Validation_error) as error:
logging.critical('{}: Error parsing configuration file'.format(config_filename))
logging.critical(f'{config_filename}: Error parsing configuration file')
logging.critical(error)
found_issues = True
if found_issues:
sys.exit(1)
else:
logger.info(
'All given configuration files are valid: {}'.format(', '.join(config_filenames))
)
logger.info(f"All given configuration files are valid: {', '.join(config_filenames)}")

View File

@ -0,0 +1,3 @@
class BorgmaticWarning(Warning):
pass

View File

@ -16,8 +16,8 @@ def get_default_config_paths(expand_home=True):
return [
'/etc/borgmatic/config.yaml',
'/etc/borgmatic.d',
'%s/borgmatic/config.yaml' % user_config_directory,
'%s/borgmatic.d' % user_config_directory,
os.path.join(user_config_directory, 'borgmatic/config.yaml'),
os.path.join(user_config_directory, 'borgmatic.d'),
]

View File

@ -14,11 +14,14 @@ def _resolve_string(matcher):
if matcher.group('escape') is not None:
# in case of escaped envvar, unescape it
return matcher.group('variable')
# resolve the env var
name, default = matcher.group('name'), matcher.group('default')
out = os.getenv(name, default=default)
if out is None:
raise ValueError('Cannot find variable ${name} in environment'.format(name=name))
raise ValueError(f'Cannot find variable {name} in environment')
return out

View File

@ -48,7 +48,7 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
config, schema, indent=indent, skip_first=parent_is_sequence
)
else:
raise ValueError('Schema at level {} is unsupported: {}'.format(level, schema))
raise ValueError(f'Schema at level {level} is unsupported: {schema}')
return config
@ -84,7 +84,7 @@ def _comment_out_optional_configuration(rendered_config):
for line in rendered_config.split('\n'):
# Upon encountering an optional configuration option, comment out lines until the next blank
# line.
if line.strip().startswith('# {}'.format(COMMENTED_OUT_SENTINEL)):
if line.strip().startswith(f'# {COMMENTED_OUT_SENTINEL}'):
optional = True
continue
@ -117,9 +117,7 @@ def write_configuration(config_filename, rendered_config, mode=0o600, overwrite=
'''
if not overwrite and os.path.exists(config_filename):
raise FileExistsError(
'{} already exists. Aborting. Use --overwrite to replace the file.'.format(
config_filename
)
f'{config_filename} already exists. Aborting. Use --overwrite to replace the file.'
)
try:
@ -218,7 +216,7 @@ def remove_commented_out_sentinel(config, field_name):
except KeyError:
return
if last_comment_value == '# {}\n'.format(COMMENTED_OUT_SENTINEL):
if last_comment_value == f'# {COMMENTED_OUT_SENTINEL}\n':
config.ca.items[field_name][RUAMEL_YAML_COMMENTS_INDEX].pop()

View File

@ -70,13 +70,11 @@ def validate_configuration_format(parser, config_format):
section_format.name for section_format in config_format
)
if unknown_section_names:
raise ValueError(
'Unknown config sections found: {}'.format(', '.join(unknown_section_names))
)
raise ValueError(f"Unknown config sections found: {', '.join(unknown_section_names)}")
missing_section_names = set(required_section_names) - section_names
if missing_section_names:
raise ValueError('Missing config sections: {}'.format(', '.join(missing_section_names)))
raise ValueError(f"Missing config sections: {', '.join(missing_section_names)}")
for section_format in config_format:
if section_format.name not in section_names:
@ -91,9 +89,7 @@ def validate_configuration_format(parser, config_format):
if unexpected_option_names:
raise ValueError(
'Unexpected options found in config section {}: {}'.format(
section_format.name, ', '.join(sorted(unexpected_option_names))
)
f"Unexpected options found in config section {section_format.name}: {', '.join(sorted(unexpected_option_names))}",
)
missing_option_names = tuple(
@ -105,9 +101,7 @@ def validate_configuration_format(parser, config_format):
if missing_option_names:
raise ValueError(
'Required options missing from config section {}: {}'.format(
section_format.name, ', '.join(missing_option_names)
)
f"Required options missing from config section {section_format.name}: {', '.join(missing_option_names)}",
)
@ -137,7 +131,7 @@ def parse_configuration(config_filename, config_format):
'''
parser = RawConfigParser()
if not parser.read(config_filename):
raise ValueError('Configuration file cannot be opened: {}'.format(config_filename))
raise ValueError(f'Configuration file cannot be opened: {config_filename}')
validate_configuration_format(parser, config_format)

View File

@ -1,3 +1,5 @@
import functools
import json
import logging
import os
@ -6,43 +8,17 @@ import ruamel.yaml
logger = logging.getLogger(__name__)
class Yaml_with_loader_stream(ruamel.yaml.YAML):
def include_configuration(loader, filename_node, include_directory):
'''
A derived class of ruamel.yaml.YAML that simply tacks the loaded stream (file object) onto the
loader class so that it's available anywhere that's passed a loader (in this case,
include_configuration() below).
'''
def get_constructor_parser(self, stream):
constructor, parser = super(Yaml_with_loader_stream, self).get_constructor_parser(stream)
constructor.loader.stream = stream
return constructor, parser
def load_configuration(filename):
'''
Load the given configuration file and return its contents as a data structure of nested dicts
and lists.
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
if there are too many recursive includes.
'''
yaml = Yaml_with_loader_stream(typ='safe')
yaml.Constructor = Include_constructor
return yaml.load(open(filename))
def include_configuration(loader, filename_node):
'''
Load the given YAML filename (ignoring the given loader so we can use our own) and return its
contents as a data structure of nested dicts and lists. If the filename is relative, probe for
it within 1. the current working directory and 2. the directory containing the YAML file doing
the including.
Given a ruamel.yaml.loader.Loader, a ruamel.yaml.serializer.ScalarNode containing the included
filename, and an include directory path to search for matching files, load the given YAML
filename (ignoring the given loader so we can use our own) and return its contents as a data
structure of nested dicts and lists. If the filename is relative, probe for it within 1. the
current working directory and 2. the given include directory.
Raise FileNotFoundError if an included file was not found.
'''
include_directories = [os.getcwd(), os.path.abspath(os.path.dirname(loader.stream.name))]
include_directories = [os.getcwd(), os.path.abspath(include_directory)]
include_filename = os.path.expanduser(filename_node.value)
if not os.path.isabs(include_filename):
@ -62,6 +38,83 @@ def include_configuration(loader, filename_node):
return load_configuration(include_filename)
class Include_constructor(ruamel.yaml.SafeConstructor):
'''
A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including
separate YAML configuration files. Example syntax: `retention: !include common.yaml`
'''
def __init__(self, preserve_quotes=None, loader=None, include_directory=None):
super(Include_constructor, self).__init__(preserve_quotes, loader)
self.add_constructor(
'!include',
functools.partial(include_configuration, include_directory=include_directory),
)
def flatten_mapping(self, node):
'''
Support the special case of deep merging included configuration into an existing mapping
using the YAML '<<' merge key. Example syntax:
```
retention:
keep_daily: 1
<<: !include common.yaml
```
These includes are deep merged into the current configuration file. For instance, in this
example, any "retention" options in common.yaml will get merged into the "retention" section
in the example configuration file.
'''
representer = ruamel.yaml.representer.SafeRepresenter()
for index, (key_node, value_node) in enumerate(node.value):
if key_node.tag == u'tag:yaml.org,2002:merge' and value_node.tag == '!include':
included_value = representer.represent_data(self.construct_object(value_node))
node.value[index] = (key_node, included_value)
super(Include_constructor, self).flatten_mapping(node)
node.value = deep_merge_nodes(node.value)
def load_configuration(filename):
'''
Load the given configuration file and return its contents as a data structure of nested dicts
and lists. Also, replace any "{constant}" strings with the value of the "constant" key in the
"constants" section of the configuration file.
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
if there are too many recursive includes.
'''
# Use an embedded derived class for the include constructor so as to capture the filename
# value. (functools.partial doesn't work for this use case because yaml.Constructor has to be
# an actual class.)
class Include_constructor_with_include_directory(Include_constructor):
def __init__(self, preserve_quotes=None, loader=None):
super(Include_constructor_with_include_directory, self).__init__(
preserve_quotes, loader, include_directory=os.path.dirname(filename)
)
yaml = ruamel.yaml.YAML(typ='safe')
yaml.Constructor = Include_constructor_with_include_directory
with open(filename) as file:
file_contents = file.read()
config = yaml.load(file_contents)
if config and 'constants' in config:
for key, value in config['constants'].items():
value = json.dumps(value)
file_contents = file_contents.replace(f'{{{key}}}', value.strip('"'))
config = yaml.load(file_contents)
del config['constants']
return config
DELETED_NODE = object()
@ -175,41 +228,3 @@ def deep_merge_nodes(nodes):
return [
replaced_nodes.get(node, node) for node in nodes if replaced_nodes.get(node) != DELETED_NODE
]
class Include_constructor(ruamel.yaml.SafeConstructor):
'''
A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including
separate YAML configuration files. Example syntax: `retention: !include common.yaml`
'''
def __init__(self, preserve_quotes=None, loader=None):
super(Include_constructor, self).__init__(preserve_quotes, loader)
self.add_constructor('!include', include_configuration)
def flatten_mapping(self, node):
'''
Support the special case of deep merging included configuration into an existing mapping
using the YAML '<<' merge key. Example syntax:
```
retention:
keep_daily: 1
<<: !include common.yaml
```
These includes are deep merged into the current configuration file. For instance, in this
example, any "retention" options in common.yaml will get merged into the "retention" section
in the example configuration file.
'''
representer = ruamel.yaml.representer.SafeRepresenter()
for index, (key_node, value_node) in enumerate(node.value):
if key_node.tag == u'tag:yaml.org,2002:merge' and value_node.tag == '!include':
included_value = representer.represent_data(self.construct_object(value_node))
node.value[index] = (key_node, included_value)
super(Include_constructor, self).flatten_mapping(node)
node.value = deep_merge_nodes(node.value)

View File

@ -1,4 +1,5 @@
import logging
import os
def normalize(config_filename, config):
@ -56,9 +57,15 @@ def normalize(config_filename, config):
# Upgrade remote repositories to ssh:// syntax, required in Borg 2.
repositories = location.get('repositories')
if repositories:
if isinstance(repositories[0], str):
config['location']['repositories'] = [
{'path': repository} for repository in repositories
]
repositories = config['location']['repositories']
config['location']['repositories'] = []
for repository in repositories:
if '~' in repository:
for repository_dict in repositories:
repository_path = repository_dict['path']
if '~' in repository_path:
logs.append(
logging.makeLogRecord(
dict(
@ -68,21 +75,31 @@ def normalize(config_filename, config):
)
)
)
if ':' in repository and not repository.startswith('ssh://'):
rewritten_repository = (
f"ssh://{repository.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}"
)
logs.append(
logging.makeLogRecord(
dict(
levelno=logging.WARNING,
levelname='WARNING',
msg=f'{config_filename}: Remote repository paths without ssh:// syntax are deprecated. Interpreting "{repository}" as "{rewritten_repository}"',
if ':' in repository_path:
if repository_path.startswith('file://'):
updated_repository_path = os.path.abspath(
repository_path.partition('file://')[-1]
)
config['location']['repositories'].append(
dict(repository_dict, path=updated_repository_path,)
)
elif repository_path.startswith('ssh://'):
config['location']['repositories'].append(repository_dict)
else:
rewritten_repository_path = f"ssh://{repository_path.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}"
logs.append(
logging.makeLogRecord(
dict(
levelno=logging.WARNING,
levelname='WARNING',
msg=f'{config_filename}: Remote repository paths without ssh:// syntax are deprecated. Interpreting "{repository_path}" as "{rewritten_repository_path}"',
)
)
)
)
config['location']['repositories'].append(rewritten_repository)
config['location']['repositories'].append(
dict(repository_dict, path=rewritten_repository_path,)
)
else:
config['location']['repositories'].append(repository)
config['location']['repositories'].append(repository_dict)
return logs

View File

@ -3,6 +3,17 @@ required:
- location
additionalProperties: false
properties:
constants:
type: object
description: |
Constants to use in the configuration file. All occurences of the
constant name within culy braces will be replaced with the value.
For example, if you have a constant named "hostname" with the value
"myhostname", then the string "{hostname}" will be replaced with
"myhostname" in the configuration file.
example:
hostname: myhostname
prefix: myprefix
location:
type: object
description: |
@ -29,19 +40,32 @@ properties:
repositories:
type: array
items:
type: string
type: object
required:
- path
properties:
path:
type: string
example: ssh://user@backupserver/./{fqdn}
label:
type: string
example: backupserver
description: |
Paths to local or remote repositories (required). Tildes are
expanded. Multiple repositories are backed up to in
sequence. Borg placeholders can be used. See the output of
"borg help placeholders" for details. See ssh_command for
SSH options like identity file or port. If systemd service
is used, then add local repository paths in the systemd
service file to the ReadWritePaths list.
A required list of local or remote repositories with paths
and optional labels (which can be used with the --repository
flag to select a repository). Tildes are expanded. Multiple
repositories are backed up to in sequence. Borg placeholders
can be used. See the output of "borg help placeholders" for
details. See ssh_command for SSH options like identity file
or port. If systemd service is used, then add local
repository paths in the systemd service file to the
ReadWritePaths list. Prior to borgmatic 1.7.10, repositories
was just a list of plain path strings.
example:
- ssh://user@backupserver/./sourcehostname.borg
- ssh://user@backupserver/./{fqdn}
- /var/local/backups/local.borg
- path: ssh://user@backupserver/./sourcehostname.borg
label: backupserver
- path: /mnt/backup
label: local
working_directory:
type: string
description: |
@ -202,6 +226,12 @@ properties:
path prevents "borgmatic restore" from finding any database
dumps created before the change. Defaults to ~/.borgmatic
example: /tmp/borgmatic
source_directories_must_exist:
type: boolean
description: |
If true, then source directories must exist, otherwise an
error is raised. Defaults to false.
example: true
storage:
type: object
description: |
@ -240,6 +270,16 @@ properties:
for details. Defaults to checkpoints every 1800 seconds (30
minutes).
example: 1800
checkpoint_volume:
type: integer
description: |
Number of backed up bytes between each checkpoint during a
long-running backup. Only supported with Borg 2+. See
https://borgbackup.readthedocs.io/en/stable/faq.html
for details. Defaults to only time-based checkpointing (see
"checkpoint_interval") instead of volume-based
checkpointing.
example: 1048576
chunker_params:
type: string
description: |
@ -305,6 +345,12 @@ properties:
Path for Borg cache files. Defaults to
$borg_base_directory/.cache/borg
example: /path/to/base/cache
borg_files_cache_ttl:
type: integer
description: |
Maximum time to live (ttl) for entries in the Borg files
cache.
example: 20
borg_security_directory:
type: string
description: |
@ -359,6 +405,11 @@ properties:
description: |
Extra command-line options to pass to "borg init".
example: "--extra-option"
create:
type: string
description: |
Extra command-line options to pass to "borg create".
example: "--extra-option"
prune:
type: string
description: |
@ -369,11 +420,6 @@ properties:
description: |
Extra command-line options to pass to "borg compact".
example: "--extra-option"
create:
type: string
description: |
Extra command-line options to pass to "borg create".
example: "--extra-option"
check:
type: string
description: |
@ -492,12 +538,12 @@ properties:
items:
type: string
description: |
Paths to a subset of the repositories in the location
section on which to run consistency checks. Handy in case
some of your repositories are very large, and so running
consistency checks on them would take too long. Defaults to
running consistency checks on all repositories configured in
the location section.
Paths or labels for a subset of the repositories in the
location section on which to run consistency checks. Handy
in case some of your repositories are very large, and so
running consistency checks on them would take too long.
Defaults to running consistency checks on all repositories
configured in the location section.
example:
- user@backupserver:sourcehostname.borg
check_last:
@ -653,11 +699,11 @@ properties:
type: string
description: |
List of one or more shell commands or scripts to execute
when an exception occurs during a "prune", "compact",
"create", or "check" action or an associated before/after
when an exception occurs during a "create", "prune",
"compact", or "check" action or an associated before/after
hook.
example:
- echo "Error during prune/compact/create/check."
- echo "Error during create/prune/compact/check."
before_everything:
type: array
items:
@ -691,10 +737,13 @@ properties:
type: string
description: |
Database name (required if using this hook). Or
"all" to dump all databases on the host. Note
that using this database hook implicitly enables
both read_special and one_file_system (see
above) to support dump and restore streaming.
"all" to dump all databases on the host. (Also
set the "format" to dump each database to a
separate file instead of one combined file.)
Note that using this database hook implicitly
enables both read_special and one_file_system
(see above) to support dump and restore
streaming.
example: users
hostname:
type: string
@ -729,9 +778,14 @@ properties:
description: |
Database dump output format. One of "plain",
"custom", "directory", or "tar". Defaults to
"custom" (unlike raw pg_dump). See pg_dump
documentation for details. Note that format is
ignored when the database name is "all".
"custom" (unlike raw pg_dump) for a single
database. Or, when database name is "all" and
format is blank, dumps all databases to a single
file. But if a format is specified with an "all"
database name, dumps each database to a separate
file of that format, allowing more convenient
restores of individual databases. See the
pg_dump documentation for more about formats.
example: directory
ssl_mode:
type: string
@ -764,6 +818,32 @@ properties:
description: |
Path to a certificate revocation list.
example: "/root/.postgresql/root.crl"
pg_dump_command:
type: string
description: |
Command to use instead of "pg_dump" or
"pg_dumpall". This can be used to run a specific
pg_dump version (e.g., one inside a running
docker container). Defaults to "pg_dump" for
single database dump or "pg_dumpall" to dump
all databases.
example: docker exec my_pg_container pg_dump
pg_restore_command:
type: string
description: |
Command to use instead of "pg_restore". This
can be used to run a specific pg_restore
version (e.g., one inside a running docker
container). Defaults to "pg_restore".
example: docker exec my_pg_container pg_restore
psql_command:
type: string
description: |
Command to use instead of "psql". This can be
used to run a specific psql version (e.g.,
one inside a running docker container).
Defaults to "psql".
example: docker exec my_pg_container psql
options:
type: string
description: |
@ -772,6 +852,30 @@ properties:
any validation on them. See pg_dump
documentation for details.
example: --role=someone
list_options:
type: string
description: |
Additional psql options to pass directly to the
psql command that lists available databases,
without performing any validation on them. See
psql documentation for details.
example: --role=someone
restore_options:
type: string
description: |
Additional pg_restore/psql options to pass
directly to the restore command, without
performing any validation on them. See
pg_restore/psql documentation for details.
example: --role=someone
analyze_options:
type: string
description: |
Additional psql options to pass directly to the
analyze command run after a restore, without
performing any validation on them. See psql
documentation for details.
example: --role=someone
description: |
List of one or more PostgreSQL databases to dump before
creating a backup, run once per configuration file. The
@ -821,14 +925,26 @@ properties:
configured to trust the configured username
without a password.
example: trustsome1
list_options:
format:
type: string
enum: ['sql']
description: |
Additional mysql options to pass directly to
the mysql command that lists available
databases, without performing any validation on
them. See mysql documentation for details.
example: --defaults-extra-file=my.cnf
Database dump output format. Currenly only "sql"
is supported. Defaults to "sql" for a single
database. Or, when database name is "all" and
format is blank, dumps all databases to a single
file. But if a format is specified with an "all"
database name, dumps each database to a separate
file of that format, allowing more convenient
restores of individual databases.
example: directory
add_drop_database:
type: boolean
description: |
Use the "--add-drop-database" flag with
mysqldump, causing the database to be dropped
right before restore. Defaults to true.
example: false
options:
type: string
description: |
@ -837,6 +953,22 @@ properties:
validation on them. See mysqldump documentation
for details.
example: --skip-comments
list_options:
type: string
description: |
Additional mysql options to pass directly to
the mysql command that lists available
databases, without performing any validation on
them. See mysql documentation for details.
example: --defaults-extra-file=my.cnf
restore_options:
type: string
description: |
Additional mysql options to pass directly to
the mysql command that restores database dumps,
without performing any validation on them. See
mysql documentation for details.
example: --defaults-extra-file=my.cnf
description: |
List of one or more MySQL/MariaDB databases to dump before
creating a backup, run once per configuration file. The
@ -845,6 +977,31 @@ properties:
mysqldump/mysql commands (from either MySQL or MariaDB). See
https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html or
https://mariadb.com/kb/en/library/mysqldump/ for details.
sqlite_databases:
type: array
items:
type: object
required: ['path','name']
additionalProperties: false
properties:
name:
type: string
description: |
This is used to tag the database dump file
with a name. It is not the path to the database
file itself. The name "all" has no special
meaning for SQLite databases.
example: users
path:
type: string
description: |
Path to the SQLite database file to dump. If
relative, it is relative to the current working
directory. Note that using this
database hook implicitly enables both
read_special and one_file_system (see above) to
support dump and restore streaming.
example: /var/lib/sqlite/users.db
mongodb_databases:
type: array
items:
@ -909,7 +1066,15 @@ properties:
directly to the dump command, without performing
any validation on them. See mongodump
documentation for details.
example: --role=someone
example: --dumpDbUsersAndRoles
restore_options:
type: string
description: |
Additional mongorestore options to pass
directly to the dump command, without performing
any validation on them. See mongorestore
documentation for details.
example: --restoreDbUsersAndRoles
description: |
List of one or more MongoDB databases to dump before
creating a backup, run once per configuration file. The
@ -935,6 +1100,16 @@ properties:
description: |
The address of your self-hosted ntfy.sh instance.
example: https://ntfy.your-domain.com
username:
type: string
description: |
The username used for authentication.
example: testuser
password:
type: string
description: |
The password used for authentication.
example: fakepassword
start:
type: object
properties:
@ -1004,6 +1179,29 @@ properties:
description: |
Tags to attach to the message.
example: incoming_envelope
warn:
type: object
properties:
title:
type: string
description: |
The title of the message.
example: Ping!
message:
type: string
description: |
The message body to publish.
example: Your backups ended with warnings
priority:
type: string
description: |
The priority to set.
example: urgent
tags:
type: string
description: |
Tags to attach to the message.
example: incoming_envelope
states:
type: array
items:
@ -1012,6 +1210,7 @@ properties:
- start
- finish
- fail
- warn
uniqueItems: true
description: |
List of one or more monitoring states to ping for:
@ -1029,7 +1228,7 @@ properties:
type: string
description: |
Healthchecks ping URL or UUID to notify when a
backup begins, ends, or errors.
backup begins, ends, errors or just to send logs.
example: https://hc-ping.com/your-uuid-here
verify_tls:
type: boolean
@ -1041,7 +1240,8 @@ properties:
type: boolean
description: |
Send borgmatic logs to Healthchecks as part the
"finish" state. Defaults to true.
"finish", "fail", and "log" states. Defaults to
true.
example: false
ping_body_limit:
type: integer
@ -1060,10 +1260,11 @@ properties:
- start
- finish
- fail
- log
uniqueItems: true
description: |
List of one or more monitoring states to ping for:
"start", "finish", and/or "fail". Defaults to
"start", "finish", "fail", and/or "log". Defaults to
pinging for all states.
example:
- finish

View File

@ -20,9 +20,9 @@ def format_json_error_path_element(path_element):
Given a path element into a JSON data structure, format it for display as a string.
'''
if isinstance(path_element, int):
return str('[{}]'.format(path_element))
return str(f'[{path_element}]')
return str('.{}'.format(path_element))
return str(f'.{path_element}')
def format_json_error(error):
@ -30,10 +30,10 @@ def format_json_error(error):
Given an instance of jsonschema.exceptions.ValidationError, format it for display as a string.
'''
if not error.path:
return 'At the top level: {}'.format(error.message)
return f'At the top level: {error.message}'
formatted_path = ''.join(format_json_error_path_element(element) for element in error.path)
return "At '{}': {}".format(formatted_path.lstrip('.'), error.message)
return f"At '{formatted_path.lstrip('.')}': {error.message}"
class Validation_error(ValueError):
@ -54,9 +54,10 @@ class Validation_error(ValueError):
'''
Render a validation error as a user-facing string.
'''
return 'An error occurred while parsing a configuration file at {}:\n'.format(
self.config_filename
) + '\n'.join(error for error in self.errors)
return (
f'An error occurred while parsing a configuration file at {self.config_filename}:\n'
+ '\n'.join(error for error in self.errors)
)
def apply_logical_validation(config_filename, parsed_configuration):
@ -68,13 +69,14 @@ def apply_logical_validation(config_filename, parsed_configuration):
location_repositories = parsed_configuration.get('location', {}).get('repositories')
check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', [])
for repository in check_repositories:
if repository not in location_repositories:
if not any(
repositories_match(repository, config_repository)
for config_repository in location_repositories
):
raise Validation_error(
config_filename,
(
'Unknown repository in the "consistency" section\'s "check_repositories": {}'.format(
repository
),
f'Unknown repository in the "consistency" section\'s "check_repositories": {repository}',
),
)
@ -126,18 +128,29 @@ def normalize_repository_path(repository):
'''
Given a repository path, return the absolute path of it (for local repositories).
'''
# A colon in the repository indicates it's a remote repository. Bail.
if ':' in repository:
# A colon in the repository could mean that it's either a file:// URL or a remote repository.
# If it's a remote repository, we don't want to normalize it. If it's a file:// URL, we do.
if ':' not in repository:
return os.path.abspath(repository)
elif repository.startswith('file://'):
return os.path.abspath(repository.partition('file://')[-1])
else:
return repository
return os.path.abspath(repository)
def repositories_match(first, second):
'''
Given two repository paths (relative and/or absolute), return whether they match.
Given two repository dicts with keys 'path' (relative and/or absolute),
and 'label', or two repository paths, return whether they match.
'''
return normalize_repository_path(first) == normalize_repository_path(second)
if isinstance(first, str):
first = {'path': first, 'label': first}
if isinstance(second, str):
second = {'path': second, 'label': second}
return (first.get('label') == second.get('label')) or (
normalize_repository_path(first.get('path'))
== normalize_repository_path(second.get('path'))
)
def guard_configuration_contains_repository(repository, configurations):
@ -157,14 +170,14 @@ def guard_configuration_contains_repository(repository, configurations):
config_repository
for config in configurations.values()
for config_repository in config['location']['repositories']
if repositories_match(repository, config_repository)
if repositories_match(config_repository, repository)
)
)
if count == 0:
raise ValueError('Repository {} not found in configuration files'.format(repository))
raise ValueError(f'Repository {repository} not found in configuration files')
if count > 1:
raise ValueError('Repository {} found in multiple configuration files'.format(repository))
raise ValueError(f'Repository {repository} found in multiple configuration files')
def guard_single_repository_selected(repository, configurations):
@ -186,5 +199,5 @@ def guard_single_repository_selected(repository, configurations):
if count != 1:
raise ValueError(
'Can\'t determine which repository to use. Use --repository to disambiguate'
"Can't determine which repository to use. Use --repository to disambiguate"
)

View File

@ -3,15 +3,17 @@ import logging
import os
import select
import subprocess
from borgmatic.commands import warning
logger = logging.getLogger(__name__)
ERROR_OUTPUT_MAX_LINE_COUNT = 25
BORG_ERROR_EXIT_CODE = 2
BORG_WARNING_EXIT_CODE = 1
def exit_code_indicates_error(process, exit_code, borg_local_path=None):
def exit_code_indicates_error(command, exit_code, borg_local_path=None):
'''
Return True if the given exit code from running a command corresponds to an error. If a Borg
local path is given and matches the process' command, then treat exit code 1 as a warning
@ -20,13 +22,24 @@ def exit_code_indicates_error(process, exit_code, borg_local_path=None):
if exit_code is None:
return False
command = process.args.split(' ') if isinstance(process.args, str) else process.args
if borg_local_path and command[0] == borg_local_path:
return bool(exit_code < 0 or exit_code >= BORG_ERROR_EXIT_CODE)
return bool(exit_code != 0)
def exit_code_indicates_borg_warning(process, exit_code, borg_local_path=None):
'''
Return True if the given exit code from running a borg command corresponds to an borg warning.
'''
if exit_code is None:
return False
command = process.args.split(' ') if isinstance(process.args, str) else process.args
if borg_local_path and command[0] == borg_local_path:
return bool(exit_code == BORG_WARNING_EXIT_CODE)
return False
def command_for_process(process):
'''
@ -45,11 +58,29 @@ def output_buffer_for_process(process, exclude_stdouts):
return process.stderr if process.stdout in exclude_stdouts else process.stdout
def append_last_lines(last_lines, captured_output, line, output_log_level):
'''
Given a rolling list of last lines, a list of captured output, a line to append, and an output
log level, append the line to the last lines and (if necessary) the captured output. Then log
the line at the requested output log level.
'''
last_lines.append(line)
if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT:
last_lines.pop(0)
if output_log_level is None:
captured_output.append(line)
else:
logger.log(output_log_level, line)
def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
'''
Given a sequence of subprocess.Popen() instances for multiple processes, log the output for each
process with the requested log level. Additionally, raise a CalledProcessError if a process
exits with an error (or a warning for exit code 1, if that process matches the Borg local path).
exits with an error (or a warning for exit code 1, if that process does not match the Borg local
path).
If output log level is None, then instead of logging, capture output for each process and return
it as a dict from the process to its output.
@ -99,15 +130,12 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
# Keep the last few lines of output in case the process errors, and we need the output for
# the exception below.
last_lines = buffer_last_lines[ready_buffer]
last_lines.append(line)
if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT:
last_lines.pop(0)
if output_log_level is None:
captured_outputs[ready_process].append(line)
else:
logger.log(output_log_level, line)
append_last_lines(
buffer_last_lines[ready_buffer],
captured_outputs[ready_process],
line,
output_log_level,
)
if not still_running:
break
@ -120,13 +148,28 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
if exit_code is None:
still_running = True
command = process.args.split(' ') if isinstance(process.args, str) else process.args
if exit_code_indicates_borg_warning(process, exit_code, borg_local_path):
raise warning.BorgmaticWarning()
# If any process errors, then raise accordingly.
if exit_code_indicates_error(process, exit_code, borg_local_path):
if exit_code_indicates_error(command, exit_code, borg_local_path):
# If an error occurs, include its output in the raised exception so that we don't
# inadvertently hide error output.
output_buffer = output_buffer_for_process(process, exclude_stdouts)
last_lines = buffer_last_lines[output_buffer] if output_buffer else []
# Collect any straggling output lines that came in since we last gathered output.
while output_buffer: # pragma: no cover
line = output_buffer.readline().rstrip().decode()
if not line:
break
append_last_lines(
last_lines, captured_outputs[process], line, output_log_level=logging.ERROR
)
if len(last_lines) == ERROR_OUTPUT_MAX_LINE_COUNT:
last_lines.insert(0, '...')
@ -154,8 +197,8 @@ def log_command(full_command, input_file=None, output_file=None):
'''
logger.debug(
' '.join(full_command)
+ (' < {}'.format(getattr(input_file, 'name', '')) if input_file else '')
+ (' > {}'.format(getattr(output_file, 'name', '')) if output_file else '')
+ (f" < {getattr(input_file, 'name', '')}" if input_file else '')
+ (f" > {getattr(output_file, 'name', '')}" if output_file else '')
)
@ -194,6 +237,7 @@ def execute_command(
do_not_capture = bool(output_file is DO_NOT_CAPTURE)
command = ' '.join(full_command) if shell else full_command
""" logging.warning(f"{do_not_capture, output_file, shell=}") """
process = subprocess.Popen(
command,
stdin=input_file,
@ -203,6 +247,13 @@ def execute_command(
env=environment,
cwd=working_directory,
)
<<<<<<< HEAD
""" logging.warning(f"{process.args}") """
""" logging.warning(f"{process.wait()}") """
=======
>>>>>>> ff68541 (Add ntfy warning)
if not run_to_completion:
return process
@ -227,13 +278,18 @@ def execute_command_and_capture_output(
environment = {**os.environ, **extra_environment} if extra_environment else None
command = ' '.join(full_command) if shell else full_command
output = subprocess.check_output(
command,
stderr=subprocess.STDOUT if capture_stderr else None,
shell=shell,
env=environment,
cwd=working_directory,
)
try:
output = subprocess.check_output(
command,
stderr=subprocess.STDOUT if capture_stderr else None,
shell=shell,
env=environment,
cwd=working_directory,
)
except subprocess.CalledProcessError as error:
if exit_code_indicates_error(command, error.returncode):
raise
output = error.output
return output.decode() if output is not None else None

View File

@ -16,7 +16,7 @@ def interpolate_context(config_filename, hook_description, command, context):
names/values, interpolate the values by "{name}" into the command and return the result.
'''
for name, value in context.items():
command = command.replace('{%s}' % name, str(value))
command = command.replace(f'{{{name}}}', str(value))
for unsupported_variable in re.findall(r'{\w+}', command):
logger.warning(
@ -38,7 +38,7 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
Raise subprocesses.CalledProcessError if an error occurs in a hook.
'''
if not commands:
logger.debug('{}: No commands to run for {} hook'.format(config_filename, description))
logger.debug(f'{config_filename}: No commands to run for {description} hook')
return
dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
@ -49,19 +49,15 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
]
if len(commands) == 1:
logger.info(
'{}: Running command for {} hook{}'.format(config_filename, description, dry_run_label)
)
logger.info(f'{config_filename}: Running command for {description} hook{dry_run_label}')
else:
logger.info(
'{}: Running {} commands for {} hook{}'.format(
config_filename, len(commands), description, dry_run_label
)
f'{config_filename}: Running {len(commands)} commands for {description} hook{dry_run_label}',
)
if umask:
parsed_umask = int(str(umask), 8)
logger.debug('{}: Set hook umask to {}'.format(config_filename, oct(parsed_umask)))
logger.debug(f'{config_filename}: Set hook umask to {oct(parsed_umask)}')
original_umask = os.umask(parsed_umask)
else:
original_umask = None
@ -93,9 +89,7 @@ def considered_soft_failure(config_filename, error):
if exit_code == SOFT_FAIL_EXIT_CODE:
logger.info(
'{}: Command hook exited with soft failure exit code ({}); skipping remaining actions'.format(
config_filename, SOFT_FAIL_EXIT_CODE
)
f'{config_filename}: Command hook exited with soft failure exit code ({SOFT_FAIL_EXIT_CODE}); skipping remaining actions',
)
return True

View File

@ -27,18 +27,22 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration
filename in any log entries. If this is a dry run, then don't actually ping anything.
'''
if state not in MONITOR_STATE_TO_CRONHUB:
logger.debug(
f'{config_filename}: Ignoring unsupported monitoring {state.name.lower()} in Cronhub hook'
)
return
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
formatted_state = '/{}/'.format(MONITOR_STATE_TO_CRONHUB[state])
formatted_state = f'/{MONITOR_STATE_TO_CRONHUB[state]}/'
ping_url = (
hook_config['ping_url']
.replace('/start/', formatted_state)
.replace('/ping/', formatted_state)
)
logger.info(
'{}: Pinging Cronhub {}{}'.format(config_filename, state.name.lower(), dry_run_label)
)
logger.debug('{}: Using Cronhub ping URL {}'.format(config_filename, ping_url))
logger.info(f'{config_filename}: Pinging Cronhub {state.name.lower()}{dry_run_label}')
logger.debug(f'{config_filename}: Using Cronhub ping URL {ping_url}')
if not dry_run:
logging.getLogger('urllib3').setLevel(logging.ERROR)

View File

@ -27,13 +27,17 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration
filename in any log entries. If this is a dry run, then don't actually ping anything.
'''
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
ping_url = '{}/{}'.format(hook_config['ping_url'], MONITOR_STATE_TO_CRONITOR[state])
if state not in MONITOR_STATE_TO_CRONITOR:
logger.debug(
f'{config_filename}: Ignoring unsupported monitoring {state.name.lower()} in Cronitor hook'
)
return
logger.info(
'{}: Pinging Cronitor {}{}'.format(config_filename, state.name.lower(), dry_run_label)
)
logger.debug('{}: Using Cronitor ping URL {}'.format(config_filename, ping_url))
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
ping_url = f"{hook_config['ping_url']}/{MONITOR_STATE_TO_CRONITOR[state]}"
logger.info(f'{config_filename}: Pinging Cronitor {state.name.lower()}{dry_run_label}')
logger.debug(f'{config_filename}: Using Cronitor ping URL {ping_url}')
if not dry_run:
logging.getLogger('urllib3').setLevel(logging.ERROR)

View File

@ -9,6 +9,7 @@ from borgmatic.hooks import (
ntfy,
pagerduty,
postgresql,
sqlite,
)
logger = logging.getLogger(__name__)
@ -22,6 +23,7 @@ HOOK_NAME_TO_MODULE = {
'ntfy': ntfy,
'pagerduty': pagerduty,
'postgresql_databases': postgresql,
'sqlite_databases': sqlite,
}
@ -41,9 +43,9 @@ def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
try:
module = HOOK_NAME_TO_MODULE[hook_name]
except KeyError:
raise ValueError('Unknown hook name: {}'.format(hook_name))
raise ValueError(f'Unknown hook name: {hook_name}')
logger.debug('{}: Calling {} hook function {}'.format(log_prefix, hook_name, function_name))
logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}')
return getattr(module, function_name)(config, log_prefix, *args, **kwargs)

View File

@ -6,7 +6,12 @@ from borgmatic.borg.state import DEFAULT_BORGMATIC_SOURCE_DIRECTORY
logger = logging.getLogger(__name__)
DATABASE_HOOK_NAMES = ('postgresql_databases', 'mysql_databases', 'mongodb_databases')
DATABASE_HOOK_NAMES = (
'postgresql_databases',
'mysql_databases',
'mongodb_databases',
'sqlite_databases',
)
def make_database_dump_path(borgmatic_source_directory, database_hook_name):
@ -28,7 +33,7 @@ def make_database_dump_filename(dump_path, name, hostname=None):
Raise ValueError if the database name is invalid.
'''
if os.path.sep in name:
raise ValueError('Invalid database name {}'.format(name))
raise ValueError(f'Invalid database name {name}')
return os.path.join(os.path.expanduser(dump_path), hostname or 'localhost', name)
@ -55,9 +60,7 @@ def remove_database_dumps(dump_path, database_type_name, log_prefix, dry_run):
'''
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
logger.debug(
'{}: Removing {} database dumps{}'.format(log_prefix, database_type_name, dry_run_label)
)
logger.debug(f'{log_prefix}: Removing {database_type_name} database dumps{dry_run_label}')
expanded_path = os.path.expanduser(dump_path)
@ -73,4 +76,4 @@ def convert_glob_patterns_to_borg_patterns(patterns):
Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
patterns like "sh:etc/*".
'''
return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns]
return [f'sh:{pattern.lstrip(os.path.sep)}' for pattern in patterns]

View File

@ -10,6 +10,7 @@ MONITOR_STATE_TO_HEALTHCHECKS = {
monitor.State.START: 'start',
monitor.State.FINISH: None, # Healthchecks doesn't append to the URL for the finished state.
monitor.State.FAIL: 'fail',
monitor.State.LOG: 'log',
}
PAYLOAD_TRUNCATION_INDICATOR = '...\n'
@ -98,7 +99,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
ping_url = (
hook_config['ping_url']
if hook_config['ping_url'].startswith('http')
else 'https://hc-ping.com/{}'.format(hook_config['ping_url'])
else f"https://hc-ping.com/{hook_config['ping_url']}"
)
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
@ -110,14 +111,12 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
healthchecks_state = MONITOR_STATE_TO_HEALTHCHECKS.get(state)
if healthchecks_state:
ping_url = '{}/{}'.format(ping_url, healthchecks_state)
ping_url = f'{ping_url}/{healthchecks_state}'
logger.info(
'{}: Pinging Healthchecks {}{}'.format(config_filename, state.name.lower(), dry_run_label)
)
logger.debug('{}: Using Healthchecks ping URL {}'.format(config_filename, ping_url))
logger.info(f'{config_filename}: Pinging Healthchecks {state.name.lower()}{dry_run_label}')
logger.debug(f'{config_filename}: Using Healthchecks ping URL {ping_url}')
if state in (monitor.State.FINISH, monitor.State.FAIL):
if state in (monitor.State.FINISH, monitor.State.FAIL, monitor.State.LOG):
payload = format_buffered_logs_for_payload()
else:
payload = ''

View File

@ -27,7 +27,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
'''
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
logger.info('{}: Dumping MongoDB databases{}'.format(log_prefix, dry_run_label))
logger.info(f'{log_prefix}: Dumping MongoDB databases{dry_run_label}')
processes = []
for database in databases:
@ -38,20 +38,19 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
dump_format = database.get('format', 'archive')
logger.debug(
'{}: Dumping MongoDB database {} to {}{}'.format(
log_prefix, name, dump_filename, dry_run_label
)
f'{log_prefix}: Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
)
if dry_run:
continue
command = build_dump_command(database, dump_filename, dump_format)
if dump_format == 'directory':
dump.create_parent_directory_for_dump(dump_filename)
execute_command(command, shell=True)
else:
dump.create_named_pipe_for_dump(dump_filename)
command = build_dump_command(database, dump_filename, dump_format)
processes.append(execute_command(command, shell=True, run_to_completion=False))
processes.append(execute_command(command, shell=True, run_to_completion=False))
return processes
@ -61,9 +60,9 @@ def build_dump_command(database, dump_filename, dump_format):
Return the mongodump command from a single database configuration.
'''
all_databases = database['name'] == 'all'
command = ['mongodump', '--archive']
command = ['mongodump']
if dump_format == 'directory':
command.append(dump_filename)
command.extend(('--out', dump_filename))
if 'hostname' in database:
command.extend(('--host', database['hostname']))
if 'port' in database:
@ -79,7 +78,7 @@ def build_dump_command(database, dump_filename, dump_format):
if 'options' in database:
command.extend(database['options'].split(' '))
if dump_format != 'directory':
command.extend(('>', dump_filename))
command.extend(('--archive', '>', dump_filename))
return command
@ -125,9 +124,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
)
restore_command = build_restore_command(extract_process, database, dump_filename)
logger.debug(
'{}: Restoring MongoDB database {}{}'.format(log_prefix, database['name'], dry_run_label)
)
logger.debug(f"{log_prefix}: Restoring MongoDB database {database['name']}{dry_run_label}")
if dry_run:
return
@ -145,9 +142,11 @@ def build_restore_command(extract_process, database, dump_filename):
'''
Return the mongorestore command from a single database configuration.
'''
command = ['mongorestore', '--archive']
if not extract_process:
command.append(dump_filename)
command = ['mongorestore']
if extract_process:
command.append('--archive')
else:
command.extend(('--dir', dump_filename))
if database['name'] != 'all':
command.extend(('--drop', '--db', database['name']))
if 'hostname' in database:
@ -160,4 +159,6 @@ def build_restore_command(extract_process, database, dump_filename):
command.extend(('--password', database['password']))
if 'authentication_database' in database:
command.extend(('--authenticationDatabase', database['authentication_database']))
if 'restore_options' in database:
command.extend(database['restore_options'].split(' '))
return command

View File

@ -7,3 +7,5 @@ class State(Enum):
START = 1
FINISH = 2
FAIL = 3
LOG = 4
WARN = 5

View File

@ -1,4 +1,6 @@
import copy
import logging
import os
from borgmatic.execute import (
execute_command,
@ -22,16 +24,16 @@ def make_dump_path(location_config): # pragma: no cover
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
def database_names_to_dump(database, extra_environment, log_prefix, dry_run_label):
def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
'''
Given a requested database name, return the corresponding sequence of database names to dump.
Given a requested database config, return the corresponding sequence of database names to dump.
In the case of "all", query for the names of databases on the configured host and return them,
excluding any system databases that will cause problems during restore.
'''
requested_name = database['name']
if requested_name != 'all':
return (requested_name,)
if database['name'] != 'all':
return (database['name'],)
if dry_run:
return ()
show_command = (
('mysql',)
@ -43,9 +45,7 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run_labe
+ ('--skip-column-names', '--batch')
+ ('--execute', 'show schemas')
)
logger.debug(
'{}: Querying for "all" MySQL databases to dump{}'.format(log_prefix, dry_run_label)
)
logger.debug(f'{log_prefix}: Querying for "all" MySQL databases to dump')
show_output = execute_command_and_capture_output(
show_command, extra_environment=extra_environment
)
@ -57,6 +57,53 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run_labe
)
def execute_dump_command(
database, log_prefix, dump_path, database_names, extra_environment, dry_run, dry_run_label
):
'''
Kick off a dump for the given MySQL/MariaDB database (provided as a configuration dict) to a
named pipe constructed from the given dump path and database names. Use the given log prefix in
any log entries.
Return a subprocess.Popen instance for the dump process ready to spew to a named pipe. But if
this is a dry run, then don't actually dump anything and return None.
'''
database_name = database['name']
dump_filename = dump.make_database_dump_filename(
dump_path, database['name'], database.get('hostname')
)
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of MySQL database "{database_name}" to {dump_filename}'
)
return None
dump_command = (
('mysqldump',)
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
+ (('--add-drop-database',) if database.get('add_drop_database', True) else ())
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
+ (('--user', database['username']) if 'username' in database else ())
+ ('--databases',)
+ database_names
+ ('--result-file', dump_filename)
)
logger.debug(
f'{log_prefix}: Dumping MySQL database "{database_name}" to {dump_filename}{dry_run_label}'
)
if dry_run:
return None
dump.create_named_pipe_for_dump(dump_filename)
return execute_command(
dump_command, extra_environment=extra_environment, run_to_completion=False,
)
def dump_databases(databases, log_prefix, location_config, dry_run):
'''
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
@ -70,55 +117,50 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label))
logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}')
for database in databases:
requested_name = database['name']
dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), requested_name, database.get('hostname')
)
dump_path = make_dump_path(location_config)
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run_label
database, extra_environment, log_prefix, dry_run
)
if not dump_database_names:
if dry_run:
continue
raise ValueError('Cannot find any MySQL databases to dump.')
dump_command = (
('mysqldump',)
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
+ ('--add-drop-database',)
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
+ (('--user', database['username']) if 'username' in database else ())
+ ('--databases',)
+ dump_database_names
# Use shell redirection rather than execute_command(output_file=open(...)) to prevent
# the open() call on a named pipe from hanging the main borgmatic process.
+ ('>', dump_filename)
)
logger.debug(
'{}: Dumping MySQL database {} to {}{}'.format(
log_prefix, requested_name, dump_filename, dry_run_label
if database['name'] == 'all' and database.get('format'):
for dump_name in dump_database_names:
renamed_database = copy.copy(database)
renamed_database['name'] = dump_name
processes.append(
execute_dump_command(
renamed_database,
log_prefix,
dump_path,
(dump_name,),
extra_environment,
dry_run,
dry_run_label,
)
)
else:
processes.append(
execute_dump_command(
database,
log_prefix,
dump_path,
dump_database_names,
extra_environment,
dry_run,
dry_run_label,
)
)
)
if dry_run:
continue
dump.create_named_pipe_for_dump(dump_filename)
processes.append(
execute_command(
dump_command,
shell=True,
extra_environment=extra_environment,
run_to_completion=False,
)
)
return processes
return [process for process in processes if process]
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
@ -157,6 +199,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
database = database_config[0]
restore_command = (
('mysql', '--batch')
+ (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ())
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
@ -164,9 +207,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
)
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
logger.debug(
'{}: Restoring MySQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
)
logger.debug(f"{log_prefix}: Restoring MySQL database {database['name']}{dry_run_label}")
if dry_run:
return

View File

@ -2,16 +2,8 @@ import logging
import requests
from borgmatic.hooks import monitor
logger = logging.getLogger(__name__)
MONITOR_STATE_TO_NTFY = {
monitor.State.START: None,
monitor.State.FINISH: None,
monitor.State.FAIL: None,
}
def initialize_monitor(
ping_url, config_filename, monitoring_log_level, dry_run
@ -56,14 +48,30 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
'X-Tags': state_config.get('tags'),
}
username = hook_config.get('username')
password = hook_config.get('password')
auth = None
if (username and password) is not None:
auth = requests.auth.HTTPBasicAuth(username, password)
logger.info(f'{config_filename}: Using basic auth with user {username} for ntfy')
elif username is not None:
logger.warning(
f'{config_filename}: Password missing for ntfy authentication, defaulting to no auth'
)
elif password is not None:
logger.warning(
f'{config_filename}: Username missing for ntfy authentication, defaulting to no auth'
)
if not dry_run:
logging.getLogger('urllib3').setLevel(logging.ERROR)
try:
response = requests.post(f'{base_url}/{topic}', headers=headers)
response = requests.post(f'{base_url}/{topic}', headers=headers, auth=auth)
if not response.ok:
response.raise_for_status()
except requests.exceptions.RequestException as error:
logger.warning(f'{config_filename}: Ntfy error: {error}')
logger.warning(f'{config_filename}: ntfy error: {error}')
def destroy_monitor(

View File

@ -29,14 +29,12 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
'''
if state != monitor.State.FAIL:
logger.debug(
'{}: Ignoring unsupported monitoring {} in PagerDuty hook'.format(
config_filename, state.name.lower()
)
f'{config_filename}: Ignoring unsupported monitoring {state.name.lower()} in PagerDuty hook',
)
return
dry_run_label = ' (dry run; not actually sending)' if dry_run else ''
logger.info('{}: Sending failure event to PagerDuty {}'.format(config_filename, dry_run_label))
logger.info(f'{config_filename}: Sending failure event to PagerDuty {dry_run_label}')
if dry_run:
return
@ -50,7 +48,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
'routing_key': hook_config['integration_key'],
'event_action': 'trigger',
'payload': {
'summary': 'backup failed on {}'.format(hostname),
'summary': f'backup failed on {hostname}',
'severity': 'error',
'source': hostname,
'timestamp': local_timestamp,
@ -65,7 +63,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
},
}
)
logger.debug('{}: Using PagerDuty payload: {}'.format(config_filename, payload))
logger.debug(f'{config_filename}: Using PagerDuty payload: {payload}')
logging.getLogger('urllib3').setLevel(logging.ERROR)
try:

View File

@ -1,6 +1,12 @@
import csv
import logging
import os
from borgmatic.execute import execute_command, execute_command_with_processes
from borgmatic.execute import (
execute_command,
execute_command_and_capture_output,
execute_command_with_processes,
)
from borgmatic.hooks import dump
logger = logging.getLogger(__name__)
@ -34,6 +40,44 @@ def make_extra_environment(database):
return extra
EXCLUDED_DATABASE_NAMES = ('template0', 'template1')
def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
'''
Given a requested database config, return the corresponding sequence of database names to dump.
In the case of "all" when a database format is given, query for the names of databases on the
configured host and return them. For "all" without a database format, just return a sequence
containing "all".
'''
requested_name = database['name']
if requested_name != 'all':
return (requested_name,)
if not database.get('format'):
return ('all',)
if dry_run:
return ()
list_command = (
('psql', '--list', '--no-password', '--csv', '--tuples-only')
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ())
+ (tuple(database['list_options'].split(' ')) if 'list_options' in database else ())
)
logger.debug(f'{log_prefix}: Querying for "all" PostgreSQL databases to dump')
list_output = execute_command_and_capture_output(
list_command, extra_environment=extra_environment
)
return tuple(
row[0]
for row in csv.reader(list_output.splitlines(), delimiter=',', quotechar='"')
if row[0] not in EXCLUDED_DATABASE_NAMES
)
def dump_databases(databases, log_prefix, location_config, dry_run):
'''
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
@ -43,58 +87,76 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
Raise ValueError if the databases to dump cannot be determined.
'''
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label))
logger.info(f'{log_prefix}: Dumping PostgreSQL databases{dry_run_label}')
for database in databases:
name = database['name']
dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), name, database.get('hostname')
)
all_databases = bool(name == 'all')
dump_format = database.get('format', 'custom')
command = (
(
'pg_dumpall' if all_databases else 'pg_dump',
'--no-password',
'--clean',
'--if-exists',
)
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ())
+ (() if all_databases else ('--format', dump_format))
+ (('--file', dump_filename) if dump_format == 'directory' else ())
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
+ (() if all_databases else (name,))
# Use shell redirection rather than the --file flag to sidestep synchronization issues
# when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
# format in a particular, a named destination is required, and redirection doesn't work.
+ (('>', dump_filename) if dump_format != 'directory' else ())
)
extra_environment = make_extra_environment(database)
logger.debug(
'{}: Dumping PostgreSQL database {} to {}{}'.format(
log_prefix, name, dump_filename, dry_run_label
)
dump_path = make_dump_path(location_config)
dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run
)
if dry_run:
continue
if dump_format == 'directory':
dump.create_parent_directory_for_dump(dump_filename)
else:
dump.create_named_pipe_for_dump(dump_filename)
if not dump_database_names:
if dry_run:
continue
processes.append(
execute_command(
command, shell=True, extra_environment=extra_environment, run_to_completion=False
raise ValueError('Cannot find any PostgreSQL databases to dump.')
for database_name in dump_database_names:
dump_format = database.get('format', None if database_name == 'all' else 'custom')
default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
dump_command = database.get('pg_dump_command') or default_dump_command
dump_filename = dump.make_database_dump_filename(
dump_path, database_name, database.get('hostname')
)
)
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
)
continue
command = (
(dump_command, '--no-password', '--clean', '--if-exists',)
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ())
+ (('--format', dump_format) if dump_format else ())
+ (('--file', dump_filename) if dump_format == 'directory' else ())
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
+ (() if database_name == 'all' else (database_name,))
# Use shell redirection rather than the --file flag to sidestep synchronization issues
# when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
# format in a particular, a named destination is required, and redirection doesn't work.
+ (('>', dump_filename) if dump_format != 'directory' else ())
)
logger.debug(
f'{log_prefix}: Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
)
if dry_run:
continue
if dump_format == 'directory':
dump.create_parent_directory_for_dump(dump_filename)
execute_command(
command, shell=True, extra_environment=extra_environment,
)
else:
dump.create_named_pipe_for_dump(dump_filename)
processes.append(
execute_command(
command,
shell=True,
extra_environment=extra_environment,
run_to_completion=False,
)
)
return processes
@ -140,16 +202,19 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), database['name'], database.get('hostname')
)
psql_command = database.get('psql_command') or 'psql'
analyze_command = (
('psql', '--no-password', '--quiet')
(psql_command, '--no-password', '--quiet')
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ())
+ (('--dbname', database['name']) if not all_databases else ())
+ (tuple(database['analyze_options'].split(' ')) if 'analyze_options' in database else ())
+ ('--command', 'ANALYZE')
)
pg_restore_command = database.get('pg_restore_command') or 'pg_restore'
restore_command = (
('psql' if all_databases else 'pg_restore', '--no-password')
(psql_command if all_databases else pg_restore_command, '--no-password')
+ (
('--if-exists', '--exit-on-error', '--clean', '--dbname', database['name'])
if not all_databases
@ -158,13 +223,12 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ())
+ (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ())
+ (() if extract_process else (dump_filename,))
)
extra_environment = make_extra_environment(database)
logger.debug(
'{}: Restoring PostgreSQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
)
logger.debug(f"{log_prefix}: Restoring PostgreSQL database {database['name']}{dry_run_label}")
if dry_run:
return

125
borgmatic/hooks/sqlite.py Normal file
View File

@ -0,0 +1,125 @@
import logging
import os
from borgmatic.execute import execute_command, execute_command_with_processes
from borgmatic.hooks import dump
logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover
'''
Make the dump path from the given location configuration and the name of this hook.
'''
return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'sqlite_databases'
)
def dump_databases(databases, log_prefix, location_config, dry_run):
'''
Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of
configuration dicts, as per the configuration schema. Use the given log prefix in any log
entries. Use the given location configuration dict to construct the destination path. If this
is a dry run, then don't actually dump anything.
'''
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = []
logger.info(f'{log_prefix}: Dumping SQLite databases{dry_run_label}')
for database in databases:
database_path = database['path']
if database['name'] == 'all':
logger.warning('The "all" database name has no meaning for SQLite3 databases')
if not os.path.exists(database_path):
logger.warning(
f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped'
)
dump_path = make_dump_path(location_config)
dump_filename = dump.make_database_dump_filename(dump_path, database['name'])
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
)
continue
command = (
'sqlite3',
database_path,
'.dump',
'>',
dump_filename,
)
logger.debug(
f'{log_prefix}: Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
)
if dry_run:
continue
dump.create_parent_directory_for_dump(dump_filename)
processes.append(execute_command(command, shell=True, run_to_completion=False))
return processes
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
'''
Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a
sequence of configuration dicts, as per the configuration schema. Use the given log prefix in
any log entries. Use the given location configuration dict to construct the destination path.
If this is a dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(location_config), 'SQLite', log_prefix, dry_run)
def make_database_dump_pattern(
databases, log_prefix, location_config, name=None
): # pragma: no cover
'''
Make a pattern that matches the given SQLite3 databases. The databases are supplied as a
sequence of configuration dicts, as per the configuration schema.
'''
return dump.make_database_dump_filename(make_dump_path(location_config), name)
def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process):
'''
Restore the given SQLite3 database from an extract stream. The database is supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema.
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
output to consume.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
if len(database_config) != 1:
raise ValueError('The database configuration value is invalid')
database_path = database_config[0]['path']
logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}')
if dry_run:
return
try:
os.remove(database_path)
logger.warning(f'{log_prefix}: Removed existing SQLite database at {database_path}')
except FileNotFoundError: # pragma: no cover
pass
restore_command = (
'sqlite3',
database_path,
)
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
# if the restore paths don't exist in the archive.
execute_command_with_processes(
restore_command,
[extract_process],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout,
)

View File

@ -85,18 +85,19 @@ class Multi_stream_handler(logging.Handler):
handler.setLevel(level)
LOG_LEVEL_TO_COLOR = {
logging.CRITICAL: colorama.Fore.RED,
logging.ERROR: colorama.Fore.RED,
logging.WARN: colorama.Fore.YELLOW,
logging.INFO: colorama.Fore.GREEN,
logging.DEBUG: colorama.Fore.CYAN,
}
class Console_color_formatter(logging.Formatter):
def format(self, record):
color = LOG_LEVEL_TO_COLOR.get(record.levelno)
add_custom_log_levels()
color = {
logging.CRITICAL: colorama.Fore.RED,
logging.ERROR: colorama.Fore.RED,
logging.WARN: colorama.Fore.YELLOW,
logging.ANSWER: colorama.Fore.MAGENTA,
logging.INFO: colorama.Fore.GREEN,
logging.DEBUG: colorama.Fore.CYAN,
}.get(record.levelno)
return color_text(color, record.msg)
@ -107,7 +108,46 @@ def color_text(color, message):
if not color:
return message
return '{}{}{}'.format(color, message, colorama.Style.RESET_ALL)
return f'{color}{message}{colorama.Style.RESET_ALL}'
def add_logging_level(level_name, level_number):
'''
Globally add a custom logging level based on the given (all uppercase) level name and number.
Do this idempotently.
Inspired by https://stackoverflow.com/questions/2183233/how-to-add-a-custom-loglevel-to-pythons-logging-facility/35804945#35804945
'''
method_name = level_name.lower()
if not hasattr(logging, level_name):
logging.addLevelName(level_number, level_name)
setattr(logging, level_name, level_number)
if not hasattr(logging, method_name):
def log_for_level(self, message, *args, **kwargs): # pragma: no cover
if self.isEnabledFor(level_number):
self._log(level_number, message, args, **kwargs)
setattr(logging.getLoggerClass(), method_name, log_for_level)
if not hasattr(logging.getLoggerClass(), method_name):
def log_to_root(message, *args, **kwargs): # pragma: no cover
logging.log(level_number, message, *args, **kwargs)
setattr(logging, method_name, log_to_root)
ANSWER = logging.WARN - 5
def add_custom_log_levels(): # pragma: no cover
'''
Add a custom log level between WARN and INFO for user-requested answers.
'''
add_logging_level('ANSWER', ANSWER)
def configure_logging(
@ -130,6 +170,8 @@ def configure_logging(
if monitoring_log_level is None:
monitoring_log_level = console_log_level
add_custom_log_levels()
# Log certain log levels to console stderr and others to stdout. This supports use cases like
# grepping (non-error) output.
console_error_handler = logging.StreamHandler(sys.stderr)
@ -138,7 +180,8 @@ def configure_logging(
{
logging.CRITICAL: console_error_handler,
logging.ERROR: console_error_handler,
logging.WARN: console_standard_handler,
logging.WARN: console_error_handler,
logging.ANSWER: console_standard_handler,
logging.INFO: console_standard_handler,
logging.DEBUG: console_standard_handler,
}

View File

@ -1,7 +1,9 @@
import logging
import borgmatic.logger
VERBOSITY_ERROR = -1
VERBOSITY_WARNING = 0
VERBOSITY_ANSWER = 0
VERBOSITY_SOME = 1
VERBOSITY_LOTS = 2
@ -10,9 +12,11 @@ def verbosity_to_log_level(verbosity):
'''
Given a borgmatic verbosity value, return the corresponding Python log level.
'''
borgmatic.logger.add_custom_log_levels()
return {
VERBOSITY_ERROR: logging.ERROR,
VERBOSITY_WARNING: logging.WARNING,
VERBOSITY_ANSWER: logging.ANSWER,
VERBOSITY_SOME: logging.INFO,
VERBOSITY_LOTS: logging.DEBUG,
}.get(verbosity, logging.WARNING)

View File

@ -1,14 +1,14 @@
FROM alpine:3.16.0 as borgmatic
FROM alpine:3.17.1 as borgmatic
COPY . /app
RUN apk add --no-cache py3-pip py3-ruamel.yaml py3-ruamel.yaml.clib
RUN pip install --no-cache /app && generate-borgmatic-config && chmod +r /etc/borgmatic/config.yaml
RUN borgmatic --help > /command-line.txt \
&& for action in rcreate transfer prune compact create check extract export-tar mount umount restore rlist list rinfo info break-lock borg; do \
&& for action in rcreate transfer create prune compact check extract export-tar mount umount restore rlist list rinfo info break-lock borg; do \
echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \
&& borgmatic "$action" --help >> /command-line.txt; done
FROM node:18.4.0-alpine as html
FROM node:19.5.0-alpine as html
ARG ENVIRONMENT=production
@ -18,6 +18,7 @@ RUN npm install @11ty/eleventy \
@11ty/eleventy-plugin-syntaxhighlight \
@11ty/eleventy-plugin-inclusive-language \
@11ty/eleventy-navigation \
eleventy-plugin-code-clipboard \
markdown-it \
markdown-it-anchor \
markdown-it-replace-link
@ -27,7 +28,7 @@ COPY . /source
RUN NODE_ENV=${ENVIRONMENT} npx eleventy --input=/source/docs --output=/output/docs \
&& mv /output/docs/index.html /output/index.html
FROM nginx:1.22.0-alpine
FROM nginx:1.22.1-alpine
COPY --from=html /output /usr/share/nginx/html
COPY --from=borgmatic /etc/borgmatic/config.yaml /usr/share/nginx/html/docs/reference/config.yaml

View File

@ -63,11 +63,6 @@
top: -2px;
bottom: 2px;
}
@media (prefers-color-scheme: dark) {
.inlinelist .inlinelist-item code:before {
border-left-color: rgba(0,0,0,.8);
}
}
}
a.buzzword {
text-decoration: underline;
@ -91,26 +86,9 @@ a.buzzword {
.buzzword {
background-color: #f7f7f7;
}
@media (prefers-color-scheme: dark) {
.buzzword-list li,
.buzzword {
background-color: #080808;
}
}
.inlinelist .inlinelist-item {
background-color: #e9e9e9;
}
@media (prefers-color-scheme: dark) {
.inlinelist .inlinelist-item {
background-color: #000;
}
.inlinelist .inlinelist-item a {
color: #fff;
}
.inlinelist .inlinelist-item code {
color: inherit;
}
}
.inlinelist .inlinelist-item:hover,
.inlinelist .inlinelist-item:focus,
.buzzword-list li:hover,
@ -217,12 +195,6 @@ main p a.buzzword {
height: 1.75em;
font-weight: 600;
}
@media (prefers-color-scheme: dark) {
.numberflag {
background-color: #00bcd4;
color: #222;
}
}
h1 .numberflag,
h2 .numberflag,
h3 .numberflag,
@ -244,11 +216,6 @@ h2 .numberflag:after {
background-color: #fff;
width: calc(100% + 0.4em); /* 16px /40 */
}
@media (prefers-color-scheme: dark) {
h2 .numberflag:after {
background-color: #222;
}
}
/* Super featured list on home page */
.list-superfeatured .avatar {

View File

@ -12,16 +12,6 @@
line-height: 1.285714285714; /* 18px /14 */
font-family: system-ui, -apple-system, sans-serif;
}
@media (prefers-color-scheme: dark) {
.minilink {
background-color: #222;
/*
!important to override .elv-callout a
see _includes/components/callout.css
*/
color: #fff !important;
}
}
table .minilink {
margin-top: 6px;
}
@ -32,12 +22,6 @@ table .minilink {
.minilink[href]:focus {
background-color: #bbb;
}
@media (prefers-color-scheme: dark) {
.minilink[href]:hover,
.minilink[href]:focus {
background-color: #444;
}
}
pre + .minilink {
color: #fff;
border-radius: 0 0 0.2857142857143em 0.2857142857143em; /* 4px /14 */
@ -74,11 +58,6 @@ h4 .minilink {
text-transform: none;
box-shadow: 0 0 0 1px rgba(0,0,0,0.3);
}
@media (prefers-color-scheme: dark) {
.minilink-addedin {
box-shadow: 0 0 0 1px rgba(255,255,255,0.3);
}
}
.minilink-addedin:not(:first-child) {
margin-left: .5em;
}

View File

@ -79,22 +79,11 @@
border-bottom: 1px solid #ddd;
margin-bottom: 0.25em; /* 4px /16 */
}
@media (prefers-color-scheme: dark) {
.elv-toc-list > li > a {
color: #fff;
border-color: #444;
}
}
/* Active links */
.elv-toc-list li.elv-toc-active > a {
background-color: #dff7ff;
}
@media (prefers-color-scheme: dark) {
.elv-toc-list li.elv-toc-active > a {
background-color: #353535;
}
}
.elv-toc-list ul .elv-toc-active > a:after {
content: "";
}

View File

@ -285,11 +285,6 @@ footer.elv-layout {
.elv-hero {
background-color: #222;
}
@media (prefers-color-scheme: dark) {
.elv-hero {
background-color: #292929;
}
}
.elv-hero img,
.elv-hero svg {
width: 42.95774646vh;
@ -538,3 +533,18 @@ main .elv-toc + h1 .direct-link {
.header-anchor:hover::after {
content: " đź”—";
}
.mdi {
display: inline-block;
width: 1em;
height: 1em;
background-color: currentColor;
-webkit-mask: no-repeat center / 100%;
mask: no-repeat center / 100%;
-webkit-mask-image: var(--svg);
mask-image: var(--svg);
}
.mdi.mdi-content-copy {
--svg: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' width='24' height='24'%3E%3Cpath fill='black' d='M19 21H8V7h11m0-2H8a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h11a2 2 0 0 0 2-2V7a2 2 0 0 0-2-2m-3-4H4a2 2 0 0 0-2 2v14h2V3h12V1Z'/%3E%3C/svg%3E");
}

View File

@ -22,6 +22,6 @@
<body>
{{ content | safe }}
{% initClipboardJS %}
</body>
</html>

View File

@ -49,9 +49,12 @@ location:
- /home
repositories:
- /mnt/removable/backup.borg
- path: /mnt/removable/backup.borg
```
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
Then, write a `before_backup` hook in that same configuration file that uses
the external `findmnt` utility to see whether the drive is mounted before
proceeding.
@ -68,6 +71,9 @@ borgmatic. borgmatic logs the soft failure, skips all further actions in that
configurable file, and proceeds onward to any other borgmatic configuration
files you may have.
Note that `before_backup` only runs on the `create` action. See below about
optionally using `before_actions` instead.
You can imagine a similar check for the sometimes-online server case:
```yaml
@ -76,13 +82,16 @@ location:
- /home
repositories:
- ssh://me@buddys-server.org/./backup.borg
- path: ssh://me@buddys-server.org/./backup.borg
hooks:
before_backup:
- ping -q -c 1 buddys-server.org > /dev/null || exit 75
```
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
Or to only run backups if the battery level is high enough:
```yaml
@ -93,6 +102,12 @@ hooks:
(Writing the battery script is left as an exercise to the reader.)
<span class="minilink minilink-addedin">New in version 1.7.0</span> The
`before_actions` and `after_actions` hooks run before/after all the actions
(like `create`, `prune`, etc.) for each repository. So if you'd like your soft
failure command hook to run regardless of action, consider using
`before_actions` instead of `before_backup`.
## Caveats and details

View File

@ -15,8 +15,7 @@ consistent snapshot that is more suited for backups.
Fortunately, borgmatic includes built-in support for creating database dumps
prior to running backups. For example, here is everything you need to dump and
backup a couple of local PostgreSQL databases, a MySQL/MariaDB database, and a
MongoDB database:
backup a couple of local PostgreSQL databases and a MySQL/MariaDB database.
```yaml
hooks:
@ -25,10 +24,27 @@ hooks:
- name: orders
mysql_databases:
- name: posts
```
<span class="minilink minilink-addedin">New in version 1.5.22</span> You can
also dump MongoDB databases. For example:
```yaml
hooks:
mongodb_databases:
- name: messages
```
<span class="minilink minilink-addedin">New in version 1.7.9</span>
Additionally, you can dump SQLite databases. For example:
```yaml
hooks:
sqlite_databases:
- name: mydb
path: /var/lib/sqlite3/mydb.sqlite
```
As part of each backup, borgmatic streams a database dump for each configured
database directly to Borg, so it's included in the backup without consuming
additional disk space. (The exceptions are the PostgreSQL/MongoDB "directory"
@ -74,8 +90,19 @@ hooks:
password: trustsome1
authentication_database: mongousers
options: "--ssl"
sqlite_databases:
- name: mydb
path: /var/lib/sqlite3/mydb.sqlite
```
See your [borgmatic configuration
file](https://torsion.org/borgmatic/docs/reference/configuration/) for
additional customization of the options passed to database commands (when
listing databases, restoring databases, etc.).
### All databases
If you want to dump all databases on a host, use `all` for the database name:
```yaml
@ -91,9 +118,79 @@ hooks:
Note that you may need to use a `username` of the `postgres` superuser for
this to work with PostgreSQL.
If you would like to backup databases only and not source directories, you can
specify an empty `source_directories` value (as it is a mandatory field prior
to borgmatic 1.7.1):
The SQLite hook in particular does not consider "all" a special database name.
<span class="minilink minilink-addedin">New in version 1.7.6</span> With
PostgreSQL and MySQL, you can optionally dump "all" databases to separate
files instead of one combined dump file, allowing more convenient restores of
individual databases. Enable this by specifying your desired database dump
`format`:
```yaml
hooks:
postgresql_databases:
- name: all
format: custom
mysql_databases:
- name: all
format: sql
```
### Containers
If your database is running within a Docker container and borgmatic is too, no
problem—simply configure borgmatic to connect to the container's name on its
exposed port. For instance:
```yaml
hooks:
postgresql_databases:
- name: users
hostname: your-database-container-name
port: 5433
username: postgres
password: trustsome1
```
But what if borgmatic is running on the host? You can still connect to a
database container if its ports are properly exposed to the host. For
instance, when running the database container with Docker, you can specify
`--publish 127.0.0.1:5433:5432` so that it exposes the container's port 5432
to port 5433 on the host (only reachable on localhost, in this case). Or the
same thing with Docker Compose:
```yaml
services:
your-database-container-name:
image: postgres
ports:
- 127.0.0.1:5433:5432
```
And then you can connect to the database from borgmatic running on the host:
```yaml
hooks:
postgresql_databases:
- name: users
hostname: 127.0.0.1
port: 5433
username: postgres
password: trustsome1
```
Of course, alter the ports in these examples to suit your particular database
system.
### No source directories
<span class="minilink minilink-addedin">New in version 1.7.1</span> If you
would like to backup databases only and not source directories, you can omit
`source_directories` entirely.
In older versions of borgmatic, instead specify an empty `source_directories`
value, as it is a mandatory option prior to version 1.7.1:
```yaml
location:
@ -103,8 +200,6 @@ hooks:
- name: all
```
<span class="minilink minilink-addedin">New in version 1.7.1</span> You can
omit `source_directories` entirely.
### External passwords
@ -126,11 +221,11 @@ bring back any missing configuration files in order to restore a database.
## Supported databases
As of now, borgmatic supports PostgreSQL, MySQL/MariaDB, and MongoDB databases
directly. But see below about general-purpose preparation and cleanup hooks as
a work-around with other database systems. Also, please [file a
ticket](https://torsion.org/borgmatic/#issues) for additional database systems
that you'd like supported.
As of now, borgmatic supports PostgreSQL, MySQL/MariaDB, MongoDB, and SQLite
databases directly. But see below about general-purpose preparation and
cleanup hooks as a work-around with other database systems. Also, please [file
a ticket](https://torsion.org/borgmatic/#issues) for additional database
systems that you'd like supported.
## Database restoration
@ -148,15 +243,15 @@ borgmatic rlist
That should yield output looking something like:
```text
host-2019-01-01T04:05:06.070809 Tue, 2019-01-01 04:05:06 [...]
host-2019-01-02T04:06:07.080910 Wed, 2019-01-02 04:06:07 [...]
host-2023-01-01T04:05:06.070809 Tue, 2023-01-01 04:05:06 [...]
host-2023-01-02T04:06:07.080910 Wed, 2023-01-02 04:06:07 [...]
```
Assuming that you want to restore all database dumps from the archive with the
most up-to-date files and therefore the latest timestamp, run a command like:
```bash
borgmatic restore --archive host-2019-01-02T04:06:07.080910
borgmatic restore --archive host-2023-01-02T04:06:07.080910
```
(No borgmatic `restore` action? Upgrade borgmatic!)
@ -182,10 +277,11 @@ If you have a single repository in your borgmatic configuration file(s), no
problem: the `restore` action figures out which repository to use.
But if you have multiple repositories configured, then you'll need to specify
the repository path containing the archive to restore. Here's an example:
the repository to use via the `--repository` flag. This can be done either
with the repository's path or its label as configured in your borgmatic configuration file.
```bash
borgmatic restore --repository repo.borg --archive host-2019-...
borgmatic restore --repository repo.borg --archive host-2023-...
```
### Restore particular databases
@ -195,9 +291,39 @@ restore one of them, use the `--database` flag to select one or more
databases. For instance:
```bash
borgmatic restore --archive host-2019-... --database users
borgmatic restore --archive host-2023-... --database users
```
<span class="minilink minilink-addedin">New in version 1.7.6</span> You can
also restore individual databases even if you dumped them as "all"—as long as
you dumped them into separate files via use of the "format" option. See above
for more information.
### Restore all databases
To restore all databases:
```bash
borgmatic restore --archive host-2023-... --database all
```
Or omit the `--database` flag entirely:
```bash
borgmatic restore --archive host-2023-...
```
Prior to borgmatic version 1.7.6, this restores a combined "all" database
dump from the archive.
<span class="minilink minilink-addedin">New in version 1.7.6</span> Restoring
"all" databases restores each database found in the selected archive. That
includes any combined dump file named "all" and any other individual database
dumps found in the archive.
### Limitations
There are a few important limitations with borgmatic's current database
@ -237,7 +363,10 @@ user and you're extracting to `/tmp`, then the dump will be in
`/tmp/root/.borgmatic`.
After extraction, you can manually restore the dump file using native database
commands like `pg_restore`, `mysql`, `mongorestore` or similar.
commands like `pg_restore`, `mysql`, `mongorestore`, `sqlite`, or similar.
Also see the documentation on [listing database
dumps](https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/#listing-database-dumps).
## Preparation and cleanup hooks
@ -252,6 +381,23 @@ dumps with any database system.
## Troubleshooting
### PostgreSQL/MySQL authentication errors
With PostgreSQL and MySQL/MariaDB, if you're getting authentication errors
when borgmatic tries to connect to your database, a natural reaction is to
increase your borgmatic verbosity with `--verbosity 2` and go looking in the
logs. You'll notice however that your database password does not show up in
the logs. This is likely not the cause of the authentication problem unless
you mistyped your password, however; borgmatic passes your password to the
database via an environment variable that does not appear in the logs.
The cause of an authentication error is often on the database side—in the
configuration of which users are allowed to connect and how they are
authenticated. For instance, with PostgreSQL, check your
[pg_hba.conf](https://www.postgresql.org/docs/current/auth-pg-hba-conf.html)
file for that configuration.
### MySQL table lock errors
If you encounter table lock errors during a database dump with MySQL/MariaDB,

View File

@ -9,44 +9,56 @@ eleventyNavigation:
Borg itself is great for efficiently de-duplicating data across successive
backup archives, even when dealing with very large repositories. But you may
find that while borgmatic's default mode of `prune`, `compact`, `create`, and
`check` works well on small repositories, it's not so great on larger ones.
That's because running the default pruning, compact, and consistency checks
take a long time on large repositories.
find that while borgmatic's default actions of `create`, `prune`, `compact`,
and `check` works well on small repositories, it's not so great on larger
ones. That's because running the default pruning, compact, and consistency
checks take a long time on large repositories.
<span class="minilink minilink-addedin">Prior to version 1.7.9</span> The
default action ordering was `prune`, `compact`, `create`, and `check`.
### A la carte actions
If you find yourself in this situation, you have some options. First, you can
run borgmatic's `prune`, `compact`, `create`, or `check` actions separately.
For instance, the following optional actions are available:
If you find yourself wanting to customize the actions, you have some options.
First, you can run borgmatic's `prune`, `compact`, `create`, or `check`
actions separately. For instance, the following optional actions are
available (among others):
```bash
borgmatic create
borgmatic prune
borgmatic compact
borgmatic create
borgmatic check
```
You can run with only one of these actions provided, or you can mix and match
any number of them in a single borgmatic run. This supports approaches like
skipping certain actions while running others. For instance, this skips
`prune` and `compact` and only runs `create` and `check`:
You can run borgmatic with only one of these actions provided, or you can mix
and match any number of them in a single borgmatic run. This supports
approaches like skipping certain actions while running others. For instance,
this skips `prune` and `compact` and only runs `create` and `check`:
```bash
borgmatic create check
```
Or, you can make backups with `create` on a frequent schedule (e.g. with
`borgmatic create` called from one cron job), while only running expensive
consistency checks with `check` on a much less frequent basis (e.g. with
`borgmatic check` called from a separate cron job).
<span class="minilink minilink-addedin">New in version 1.7.9</span> borgmatic
now respects your specified command-line action order, running actions in the
order you specify. In previous versions, borgmatic ran your specified actions
in a fixed ordering regardless of the order they appeared on the command-line.
But instead of running actions together, another option is to run backups with
`create` on a frequent schedule (e.g. with `borgmatic create` called from one
cron job), while only running expensive consistency checks with `check` on a
much less frequent basis (e.g. with `borgmatic check` called from a separate
cron job).
### Consistency check configuration
Another option is to customize your consistency checks. The default
consistency checks run both full-repository checks and per-archive checks
within each repository no more than once a month.
Another option is to customize your consistency checks. By default, if you
omit consistency checks from configuration, borgmatic runs full-repository
checks (`repository`) and per-archive checks (`archives`) within each
repository. (Although see below about check frequency.) This is equivalent to
what `borg check` does if run without options.
But if you find that archive checks are too slow, for example, you can
configure borgmatic to run repository checks only. Configure this in the
@ -58,14 +70,26 @@ consistency:
- name: repository
```
(Prior to borgmatic 1.6.2, `checks` was a plain list of strings without the `name:` part.)
<span class="minilink minilink-addedin">Prior to version 1.6.2</span> The
`checks` option was a plain list of strings without the `name:` part, and
borgmatic ran each configured check every time checks were run. For example:
```yaml
consistency:
checks:
- repository
```
Here are the available checks from fastest to slowest:
* `repository`: Checks the consistency of the repository itself.
* `archives`: Checks all of the archives in the repository.
* `extract`: Performs an extraction dry-run of the most recent archive.
* `data`: Verifies the data integrity of all archives contents, decrypting and decompressing all data (implies `archives` as well).
* `data`: Verifies the data integrity of all archives contents, decrypting and decompressing all data.
Note that the `data` check is a more thorough version of the `archives` check,
so enabling the `data` check implicitly enables the `archives` check as well.
See [Borg's check
documentation](https://borgbackup.readthedocs.io/en/stable/usage/check.html)
@ -89,8 +113,13 @@ consistency:
This tells borgmatic to run the `repository` consistency check at most once
every two weeks for a given repository and the `archives` check at most once a
month. The `frequency` value is a number followed by a unit of time, e.g. "3
days", "1 week", "2 months", etc. The `frequency` defaults to `always`, which
means run this check every time checks run.
days", "1 week", "2 months", etc.
The `frequency` defaults to `always` for a check configured without a
`frequency`, which means run this check every time checks run. But if you omit
consistency checks from configuration entirely, borgmatic runs full-repository
checks (`repository`) and per-archive checks (`archives`) within each
repository, at most once a month.
Unlike a real scheduler like cron, borgmatic only makes a best effort to run
checks on the configured frequency. It compares that frequency with how long
@ -120,7 +149,16 @@ consistency:
- name: disabled
```
Or, if you have multiple repositories in your borgmatic configuration file,
<span class="minilink minilink-addedin">Prior to version 1.6.2</span> `checks`
was a plain list of strings without the `name:` part. For instance:
```yaml
consistency:
checks:
- disabled
```
If you have multiple repositories in your borgmatic configuration file,
you can keep running consistency checks, but only against a subset of the
repositories:

View File

@ -25,8 +25,8 @@ so that you can run borgmatic commands while you're hacking on them to
make sure your changes work.
```bash
cd borgmatic/
pip3 install --editable --user .
cd borgmatic
pip3 install --user --editable .
```
Note that this will typically install the borgmatic commands into
@ -51,7 +51,6 @@ pip3 install --user tox
Finally, to actually run tests, run:
```bash
cd borgmatic
tox
```
@ -87,7 +86,7 @@ If you would like to run the full test suite, first install Docker and [Docker
Compose](https://docs.docker.com/compose/install/). Then run:
```bash
scripts/run-full-dev-tests
scripts/run-end-to-end-dev-tests
```
Note that this scripts assumes you have permission to run Docker. If you

View File

@ -20,15 +20,15 @@ borgmatic rlist
That should yield output looking something like:
```text
host-2019-01-01T04:05:06.070809 Tue, 2019-01-01 04:05:06 [...]
host-2019-01-02T04:06:07.080910 Wed, 2019-01-02 04:06:07 [...]
host-2023-01-01T04:05:06.070809 Tue, 2023-01-01 04:05:06 [...]
host-2023-01-02T04:06:07.080910 Wed, 2023-01-02 04:06:07 [...]
```
Assuming that you want to extract the archive with the most up-to-date files
and therefore the latest timestamp, run a command like:
```bash
borgmatic extract --archive host-2019-01-02T04:06:07.080910
borgmatic extract --archive host-2023-01-02T04:06:07.080910
```
(No borgmatic `extract` action? Upgrade borgmatic!)
@ -51,10 +51,11 @@ If you have a single repository in your borgmatic configuration file(s), no
problem: the `extract` action figures out which repository to use.
But if you have multiple repositories configured, then you'll need to specify
the repository path containing the archive to extract. Here's an example:
the repository to use via the `--repository` flag. This can be done either
with the repository's path or its label as configured in your borgmatic configuration file.
```bash
borgmatic extract --repository repo.borg --archive host-2019-...
borgmatic extract --repository repo.borg --archive host-2023-...
```
## Extract particular files
@ -74,6 +75,13 @@ run the `extract` command above, borgmatic will extract `/var/path/1` and
`/var/path/2`.
### Searching for files
If you're not sure which archive contains the files you're looking for, you
can [search across
archives](https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/#searching-for-a-file).
## Extract to a particular destination
By default, borgmatic extracts files into the current directory. To instead

View File

@ -91,6 +91,19 @@ example, to search only the last five archives:
borgmatic list --find foo.txt --last 5
```
## Listing database dumps
If you have enabled borgmatic's [database
hooks](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/), you
can list backed up database dumps via borgmatic. For example:
```bash
borgmatic list --archive latest --find .borgmatic/*_databases
```
This gives you a listing of all database dump files contained in the latest
archive, complete with file sizes.
## Logging

View File

@ -20,11 +20,13 @@ location:
# Paths of local or remote repositories to backup to.
repositories:
- ssh://1234@usw-s001.rsync.net/./backups.borg
- ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
- /var/lib/backups/local.borg
- path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
- path: /var/lib/backups/local.borg
```
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
When you run borgmatic with this configuration, it invokes Borg once for each
configured repository in sequence. (So, not in parallel.) That means—in each
repository—borgmatic creates a single new backup archive containing all of
@ -32,9 +34,8 @@ your source directories.
Here's a way of visualizing what borgmatic does with the above configuration:
1. Backup `/home` and `/etc` to `1234@usw-s001.rsync.net:backups.borg`
2. Backup `/home` and `/etc` to `k8pDxu32@k8pDxu32.repo.borgbase.com:repo`
3. Backup `/home` and `/etc` to `/var/lib/backups/local.borg`
1. Backup `/home` and `/etc` to `k8pDxu32@k8pDxu32.repo.borgbase.com:repo`
2. Backup `/home` and `/etc` to `/var/lib/backups/local.borg`
This gives you redundancy of your data across repositories and even
potentially across providers.
@ -42,3 +43,13 @@ potentially across providers.
See [Borg repository URLs
documentation](https://borgbackup.readthedocs.io/en/stable/usage/general.html#repository-urls)
for more information on how to specify local and remote repository paths.
### Different options per repository
What if you want borgmatic to backup to multiple repositories—while also
setting different options for each one? In that case, you'll need to use
[a separate borgmatic configuration file for each
repository](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/)
instead of the multiple repositories in one configuration file as described
above. That's because all of the repositories in a particular configuration
file get the same options applied.

View File

@ -106,11 +106,60 @@ But if you do want to merge in a YAML key *and* its values, keep reading!
## Include merging
If you need to get even fancier and pull in common configuration options while
potentially overriding individual options, you can perform a YAML merge of
included configuration using the YAML `<<` key. For instance, here's an
example of a main configuration file that pulls in two retention options via
an include and then overrides one of them locally:
If you need to get even fancier and merge in common configuration options, you
can perform a YAML merge of included configuration using the YAML `<<` key.
For instance, here's an example of a main configuration file that pulls in
retention and consistency options via a single include:
```yaml
<<: !include /etc/borgmatic/common.yaml
location:
...
```
This is what `common.yaml` might look like:
```yaml
retention:
keep_hourly: 24
keep_daily: 7
consistency:
checks:
- name: repository
```
Once this include gets merged in, the resulting configuration would have all
of the `location` options from the original configuration file *and* the
`retention` and `consistency` options from the include.
Prior to borgmatic version 1.6.0, when there's a section collision between the
local file and the merged include, the local file's section takes precedence.
So if the `retention` section appears in both the local file and the include
file, the included `retention` is ignored in favor of the local `retention`.
But see below about deep merge in version 1.6.0+.
Note that this `<<` include merging syntax is only for merging in mappings
(configuration options and their values). But if you'd like to include a
single value directly, please see the section above about standard includes.
Additionally, there is a limitation preventing multiple `<<` include merges
per section. So for instance, that means you can do one `<<` merge at the
global level, another `<<` within each configuration section, etc. (This is a
YAML limitation.)
### Deep merge
<span class="minilink minilink-addedin">New in version 1.6.0</span> borgmatic
performs a deep merge of merged include files, meaning that values are merged
at all levels in the two configuration files. This allows you to include
common configuration—up to full borgmatic configuration files—while overriding
only the parts you want to customize.
For instance, here's an example of a main configuration file that pulls in two
retention options via an include and then overrides one of them locally:
```yaml
<<: !include /etc/borgmatic/common.yaml
@ -136,24 +185,8 @@ Once this include gets merged in, the resulting configuration would have a
When there's an option collision between the local file and the merged
include, the local file's option takes precedence.
Note that this `<<` include merging syntax is only for merging in mappings
(configuration options and their values). But if you'd like to include a
single value directly, please see the section above about standard includes.
Additionally, there is a limitation preventing multiple `<<` include merges
per section. So for instance, that means you can do one `<<` merge at the
global level, another `<<` within each configuration section, etc. (This is a
YAML limitation.)
### Deep merge
<span class="minilink minilink-addedin">New in version 1.6.0</span> borgmatic
performs a deep merge of merged include files, meaning that values are merged
at all levels in the two configuration files. Colliding list values are
appended together. This allows you to include common configuration—up to full
borgmatic configuration files—while overriding only the parts you want to
customize.
<span class="minilink minilink-addedin">New in version 1.6.1</span> Colliding
list values are appended together.
## Configuration overrides
@ -222,3 +255,63 @@ Be sure to quote your overrides if they contain spaces or other characters
that your shell may interpret.
An alternate to command-line overrides is passing in your values via [environment variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/).
## Constant interpolation
<span class="minilink minilink-addedin">New in version 1.7.10</span> Another
tool is borgmatic's support for defining custom constants. This is similar to
the [variable interpolation
feature](https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/#variable-interpolation)
for command hooks, but the constants feature lets you substitute your own
custom values into anywhere in the entire configuration file. (Constants don't
work across includes or separate configuration files though.)
Here's an example usage:
```yaml
constants:
user: foo
my_prefix: bar-
location:
source_directories:
- /home/{user}/.config
- /home/{user}/.ssh
...
storage:
archive_name_format: '{my_prefix}{now}'
retention:
prefix: {my_prefix}
consistency:
prefix: {my_prefix}
```
In this example, when borgmatic runs, all instances of `{user}` get replaced
with `foo` and all instances of `{my_prefix}` get replaced with `bar-`. (And
in this particular example, `{now}` doesn't get replaced with anything, but
gets passed directly to Borg.) After substitution, the logical result looks
something like this:
```yaml
location:
source_directories:
- /home/foo/.config
- /home/foo/.ssh
...
storage:
archive_name_format: 'bar-{now}'
retention:
prefix: bar-
consistency:
prefix: bar-
```
An alternate to constants is passing in your values via [environment
variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/).

View File

@ -83,7 +83,7 @@ tests](https://torsion.org/borgmatic/docs/how-to/extract-a-backup/).
## Error hooks
When an error occurs during a `prune`, `compact`, `create`, or `check` action,
When an error occurs during a `create`, `prune`, `compact`, or `check` action,
borgmatic can run configurable shell commands to fire off custom error
notifications or take other actions, so you can get alerted as soon as
something goes wrong. Here's a not-so-useful example:
@ -116,8 +116,8 @@ the repository. Here's the full set of supported variables you can use here:
* `output`: output of the command that failed (may be blank if an error
occurred without running a command)
Note that borgmatic runs the `on_error` hooks only for `prune`, `compact`,
`create`, or `check` actions or hooks in which an error occurs, and not other
Note that borgmatic runs the `on_error` hooks only for `create`, `prune`,
`compact`, or `check` actions or hooks in which an error occurs, and not other
actions. borgmatic does not run `on_error` hooks if an error occurs within a
`before_everything` or `after_everything` hook. For more about hooks, see the
[borgmatic hooks
@ -144,7 +144,7 @@ With this hook in place, borgmatic pings your Healthchecks project when a
backup begins, ends, or errors. Specifically, after the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
hooks</a> run, borgmatic lets Healthchecks know that it has started if any of
the `prune`, `compact`, `create`, or `check` actions are run.
the `create`, `prune`, `compact`, or `check` actions are run.
Then, if the actions complete successfully, borgmatic notifies Healthchecks of
the success after the `after_backup` hooks run, and includes borgmatic logs in
@ -154,8 +154,8 @@ in the Healthchecks UI, although be aware that Healthchecks currently has a
If an error occurs during any action or hook, borgmatic notifies Healthchecks
after the `on_error` hooks run, also tacking on logs including the error
itself. But the logs are only included for errors that occur when a `prune`,
`compact`, `create`, or `check` action is run.
itself. But the logs are only included for errors that occur when a `create`,
`prune`, `compact`, or `check` action is run.
You can customize the verbosity of the logs that are sent to Healthchecks with
borgmatic's `--monitoring-verbosity` flag. The `--list` and `--stats` flags

View File

@ -53,7 +53,8 @@ This runs Borg's `rlist` command once on each configured borgmatic repository.
(The native `borgmatic rlist` action should be preferred for most use.)
What if you only want to run Borg on a single configured borgmatic repository
when you've got several configured? Not a problem.
when you've got several configured? Not a problem. The `--repository` argument
lets you specify the repository to use, either by its path or its label:
```bash
borgmatic borg --repository repo.borg break-lock

View File

@ -90,9 +90,11 @@ installing borgmatic:
* [Fedora unofficial](https://copr.fedorainfracloud.org/coprs/heffer/borgmatic/)
* [Arch Linux](https://www.archlinux.org/packages/community/any/borgmatic/)
* [Alpine Linux](https://pkgs.alpinelinux.org/packages?name=borgmatic)
* [OpenBSD](http://ports.su/sysutils/borgmatic)
* [OpenBSD](https://openports.pl/path/sysutils/borgmatic)
* [openSUSE](https://software.opensuse.org/package/borgmatic)
* [macOS (via Homebrew)](https://formulae.brew.sh/formula/borgmatic)
* [macOS (via MacPorts)](https://ports.macports.org/port/borgmatic/)
* [NixOS](https://search.nixos.org/packages?show=borgmatic&sort=relevance&type=packages&query=borgmatic)
* [Ansible role](https://github.com/borgbase/ansible-role-borgbackup)
* [virtualenv](https://virtualenv.pypa.io/en/stable/)
@ -257,9 +259,9 @@ See `borgmatic --help` and `borgmatic create --help` for more information.
If you omit `create` and other actions, borgmatic runs through a set of
default actions: `prune` any old backups as per the configured retention
policy, `compact` segments to free up space (with Borg 1.2+), `create` a
backup, *and* `check` backups for consistency problems due to things like file
damage. For instance:
policy, `compact` segments to free up space (with Borg 1.2+, borgmatic
1.5.23+), `create` a backup, *and* `check` backups for consistency problems
due to things like file damage. For instance:
```bash
sudo borgmatic --verbosity 1 --list --stats

View File

@ -145,15 +145,18 @@ like this:
```yaml
location:
repositories:
- original.borg
- path: original.borg
```
<span class="minilink minilink-addedin">Prior to version 1.7.10</span> Omit
the `path:` portion of the `repositories` list.
Change it to a new (not yet created) repository path:
```yaml
location:
repositories:
- upgraded.borg
- path: upgraded.borg
```
Then, run the `rcreate` action (formerly `init`) to create that new Borg 2
@ -169,12 +172,21 @@ The `--source-repository` flag is necessary to reuse key material from your
Borg 1 repository so that the subsequent data transfer can work.
The `--encryption` value above selects the same chunk ID algorithm (`blake2`)
used in Borg 1, thereby making deduplication work across transferred archives
and new archives. Note that `repokey-blake2-chacha20-poly1305` may be faster
than `repokey-blake2-aes-ocb` on certain platforms like ARM64. Read about
[Borg encryption
modes](https://borgbackup.readthedocs.io/en/2.0.0b3/usage/rcreate.html#encryption-mode-tldr)
for the menu of available encryption modes.
commonly used in Borg 1, thereby making deduplication work across transferred
archives and new archives.
If you get an error about "You must keep the same ID hash" from Borg, that
means the encryption value you specified doesn't correspond to your source
repository's chunk ID algorithm. In that case, try not using `blake2`:
```bash
borgmatic rcreate --verbosity 1 --encryption repokey-aes-ocb \
--source-repository original.borg --repository upgraded.borg
```
Read about [Borg encryption
modes](https://borgbackup.readthedocs.io/en/2.0.0b5/usage/rcreate.html#encryption-mode-tldr)
for more details.
To transfer data from your original Borg 1 repository to your newly created
Borg 2 repository:
@ -194,9 +206,9 @@ might take a while), and the final command with `--dry-run` again provides
confirmation of success—or tells you if something hasn't been transferred yet.
Note that by omitting the `--upgrader` flag, you can also do archive transfers
between Borg 2 repositories without upgrading, even down to individual
between related Borg 2 repositories without upgrading, even down to individual
archives. For more on that functionality, see the [Borg transfer
documentation](https://borgbackup.readthedocs.io/en/2.0.0b3/usage/transfer.html).
documentation](https://borgbackup.readthedocs.io/en/2.0.0b5/usage/transfer.html).
That's it! Now you can use your new Borg 2 repository as normal with
borgmatic. If you've got multiple repositories, repeat the above process for

View File

@ -7,15 +7,11 @@ eleventyNavigation:
---
## borgmatic options
Here are all of the available borgmatic command-line options. This includes the separate options for
each action sub-command:
Here are all of the available borgmatic command-line options, including the
separate options for each action sub-command. Note that most of the
flags listed here do not have equivalents in borgmatic's [configuration
file](https://torsion.org/borgmatic/docs/reference/configuration/).
```
{% include borgmatic/command-line.txt %}
```
## Related documentation
* [Set up backups with borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/)
* [borgmatic configuration reference](https://torsion.org/borgmatic/docs/reference/configuration/)

View File

@ -15,9 +15,3 @@ Here is a full sample borgmatic configuration file including all available optio
Note that you can also [download this configuration
file](https://torsion.org/borgmatic/docs/reference/config.yaml) for use locally.
## Related documentation
* [Set up backups with borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/)
* [borgmatic command-line reference](https://torsion.org/borgmatic/docs/reference/command-line/)

BIN
docs/static/sqlite.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.6 KiB

View File

@ -1,7 +1,7 @@
#!/bin/sh
# This script is for running all tests, including end-to-end tests, on a developer machine. It sets
# up database containers to run tests against, runs the tests, and then tears down the containers.
# This script is for running end-to-end tests on a developer machine. It sets up database containers
# to run tests against, runs the tests, and then tears down the containers.
#
# Run this script from the root directory of the borgmatic source.
#

View File

@ -3,19 +3,30 @@
# This script installs test dependencies and runs all tests, including end-to-end tests. It
# is designed to run inside a test container, and presumes that other test infrastructure like
# databases are already running. Therefore, on a developer machine, you should not run this script
# directly. Instead, run scripts/run-full-dev-tests
# directly. Instead, run scripts/run-end-to-end-dev-tests
#
# For more information, see:
# https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/
set -e
if [ -z "$TEST_CONTAINER" ] ; then
echo "This script is designed to work inside a test container and is not intended to"
echo "be run manually. If you're trying to run borgmatic's end-to-end tests, execute"
echo "scripts/run-end-to-end-dev-tests instead."
exit 1
fi
apk add --no-cache python3 py3-pip borgbackup postgresql-client mariadb-client mongodb-tools \
py3-ruamel.yaml py3-ruamel.yaml.clib bash
py3-ruamel.yaml py3-ruamel.yaml.clib bash sqlite
# If certain dependencies of black are available in this version of Alpine, install them.
apk add --no-cache py3-typed-ast py3-regex || true
python3 -m pip install --no-cache --upgrade pip==22.2.2 setuptools==64.0.1
pip3 install --ignore-installed tox==3.25.1
export COVERAGE_FILE=/tmp/.coverage
tox --workdir /tmp/.tox --sitepackages
if [ "$1" != "--end-to-end-only" ] ; then
tox --workdir /tmp/.tox --sitepackages
fi
tox --workdir /tmp/.tox --sitepackages -e end-to-end

View File

@ -5,11 +5,13 @@ description_file=README.md
testpaths = tests
addopts = --cov-report term-missing:skip-covered --cov=borgmatic --ignore=tests/end-to-end
filterwarnings =
ignore:Coverage disabled.*:pytest.PytestWarning
ignore:Deprecated call to `pkg_resources.declare_namespace\('ruamel'\)`.*:DeprecationWarning
[flake8]
ignore = E501,W503
exclude = *.*/*
multiline-quotes = '''
docstring-quotes = '''
[tool:isort]
force_single_line = False

View File

@ -1,6 +1,6 @@
from setuptools import find_packages, setup
VERSION = '1.7.5.dev0'
VERSION = '1.7.11.dev0'
setup(

View File

@ -5,6 +5,9 @@ click==7.1.2; python_version >= '3.8'
colorama==0.4.4
coverage==5.3
flake8==4.0.1
flake8-quotes==3.3.2
flake8-use-fstring==1.4
flake8-variables-names==0.0.5
flexmock==0.10.4
isort==5.9.1
mccabe==0.6.1
@ -14,8 +17,8 @@ py==1.10.0
pycodestyle==2.8.0
pyflakes==2.4.0
jsonschema==3.2.0
pytest==6.2.5
pytest-cov==3.0.0
pytest==7.2.0
pytest-cov==4.0.0
regex; python_version >= '3.8'
requests==2.25.0
ruamel.yaml>0.15.0,<0.18.0

View File

@ -17,14 +17,17 @@ services:
MONGO_INITDB_ROOT_PASSWORD: test
tests:
image: alpine:3.13
environment:
TEST_CONTAINER: true
volumes:
- "../..:/app:ro"
tmpfs:
- "/app/borgmatic.egg-info"
tty: true
working_dir: /app
command:
- /app/scripts/run-full-tests
entrypoint: /app/scripts/run-full-tests
command: --end-to-end-only
depends_on:
- postgresql
- mysql
- mongodb

View File

@ -12,17 +12,14 @@ def generate_configuration(config_path, repository_path):
to work for testing (including injecting the given repository path and tacking on an encryption
passphrase).
'''
subprocess.check_call(
'generate-borgmatic-config --destination {}'.format(config_path).split(' ')
)
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
config = (
open(config_path)
.read()
.replace('ssh://user@backupserver/./sourcehostname.borg', repository_path)
.replace('- ssh://user@backupserver/./{fqdn}', '')
.replace('- /var/local/backups/local.borg', '')
.replace('- /home/user/path with spaces', '')
.replace('- /home', '- {}'.format(config_path))
.replace('- path: /mnt/backup', '')
.replace('label: local', '')
.replace('- /home', f'- {config_path}')
.replace('- /etc', '')
.replace('- /var/log/syslog*', '')
+ 'storage:\n encryption_passphrase: "test"'
@ -47,13 +44,13 @@ def test_borgmatic_command():
generate_configuration(config_path, repository_path)
subprocess.check_call(
'borgmatic -v 2 --config {} init --encryption repokey'.format(config_path).split(' ')
f'borgmatic -v 2 --config {config_path} init --encryption repokey'.split(' ')
)
# Run borgmatic to generate a backup archive, and then list it to make sure it exists.
subprocess.check_call('borgmatic --config {}'.format(config_path).split(' '))
subprocess.check_call(f'borgmatic --config {config_path}'.split(' '))
output = subprocess.check_output(
'borgmatic --config {} list --json'.format(config_path).split(' ')
f'borgmatic --config {config_path} list --json'.split(' ')
).decode(sys.stdout.encoding)
parsed_output = json.loads(output)
@ -64,16 +61,14 @@ def test_borgmatic_command():
# Extract the created archive into the current (temporary) directory, and confirm that the
# extracted file looks right.
output = subprocess.check_output(
'borgmatic --config {} extract --archive {}'.format(config_path, archive_name).split(
' '
)
f'borgmatic --config {config_path} extract --archive {archive_name}'.split(' '),
).decode(sys.stdout.encoding)
extracted_config_path = os.path.join(extract_path, config_path)
assert open(extracted_config_path).read() == open(config_path).read()
# Exercise the info action.
output = subprocess.check_output(
'borgmatic --config {} info --json'.format(config_path).split(' ')
f'borgmatic --config {config_path} info --json'.split(' '),
).decode(sys.stdout.encoding)
parsed_output = json.loads(output)

View File

@ -14,6 +14,7 @@ def write_configuration(
repository_path,
borgmatic_source_directory,
postgresql_dump_format='custom',
mongodb_dump_format='archive',
):
'''
Write out borgmatic configuration into a file at the config path. Set the options so as to work
@ -42,6 +43,11 @@ hooks:
hostname: postgresql
username: postgres
password: test
- name: all
format: custom
hostname: postgresql
username: postgres
password: test
mysql_databases:
- name: test
hostname: mysql
@ -51,16 +57,25 @@ hooks:
hostname: mysql
username: root
password: test
- name: all
format: sql
hostname: mysql
username: root
password: test
mongodb_databases:
- name: test
hostname: mongodb
username: root
password: test
authentication_database: admin
format: {mongodb_dump_format}
- name: all
hostname: mongodb
username: root
password: test
sqlite_databases:
- name: sqlite_test
path: /tmp/sqlite_test.db
'''
with open(config_path, 'w') as config_file:
@ -126,6 +141,7 @@ def test_database_dump_and_restore_with_directory_format():
repository_path,
borgmatic_source_directory,
postgresql_dump_format='directory',
mongodb_dump_format='directory',
)
subprocess.check_call(
@ -173,7 +189,7 @@ def test_database_dump_with_error_causes_borgmatic_to_exit():
'-v',
'2',
'--override',
"hooks.postgresql_databases=[{'name': 'nope'}]",
"hooks.postgresql_databases=[{'name': 'nope'}]", # noqa: FS003
]
)
finally:

Some files were not shown because too many files have changed in this diff Show More