forked from borgmatic-collective/borgmatic
Compare commits
1523 Commits
Author | SHA1 | Date | |
---|---|---|---|
8cec7c74d8 | |||
d3086788eb | |||
8d860ea02c | |||
b343363bb8 | |||
9db31bd1e9 | |||
d88bcc8be9 | |||
332f7c4bb6 | |||
5d19d86e4a | |||
044ae7869a | |||
62ae82f2c0 | |||
66194b7304 | |||
98e429594e | |||
4fcfddbe08 | |||
f442aeae9c | |||
e211863cba | |||
45256ae33f | |||
1573d68fe2 | |||
69f6695253 | |||
a7c055264d | |||
db18364a73 | |||
22498ebd4c | |||
e1f02d9fa5 | |||
9ec220c600 | |||
cf0275a3ed | |||
c71eb60cd2 | |||
675e54ba9f | |||
1793ad74bd | |||
767a7d900b | |||
903507bd03 | |||
b6cf7d2adc | |||
a071e02d20 | |||
3aa88085ed | |||
af1cc27988 | |||
dbf8301c19 | |||
2a306bef12 | |||
2a36a2a312 | |||
d7a07f0428 | |||
da321e180d | |||
c6582e1171 | |||
9b83afe491 | |||
2814ac3642 | |||
8a9d5d93f5 | |||
783a6d3b45 | |||
95575c3450 | |||
9b071ff92f | |||
d80e716822 | |||
418ebc8843 | |||
f5a448c7c2 | |||
37ac542b31 | |||
|
8c7d7e3e41 | ||
b811f125b2 | |||
061f3e7917 | |||
6055918907 | |||
4a90e090ad | |||
301b29ee11 | |||
c1eb210253 | |||
30cca62d09 | |||
113c0e7616 | |||
0e6b2c6773 | |||
22c750b949 | |||
504cce39a1 | |||
6c4abb6803 | |||
|
fd7ad86daa | ||
6f3b23c79d | |||
4838f5e810 | |||
116f1ab989 | |||
5e15c9f2bc | |||
442641f9f6 | |||
f67c544be6 | |||
437fd4dbae | |||
36873252d6 | |||
1ef82a27fa | |||
6837dcbf42 | |||
c657764367 | |||
f79286fc91 | |||
694d376d15 | |||
ab4c08019c | |||
fd39f54df7 | |||
ca7e18bb29 | |||
6975a5b155 | |||
b627d00595 | |||
9bd8f1a6df | |||
|
faf682ca35 | ||
6aeb74550d | |||
89500df429 | |||
82b072d0b7 | |||
018c0296fd | |||
9c42e7e817 | |||
953277a066 | |||
e2002b5488 | |||
c9742e1d04 | |||
906da838ef | |||
d7f1c10c8c | |||
e8e4d17168 | |||
a31ce337e9 | |||
902730df46 | |||
c969c822ee | |||
c31702d092 | |||
ba8fbe7a44 | |||
2774c2e4c0 | |||
ae036aebd7 | |||
|
2e9f70d496 | ||
90be5b84b1 | |||
80e95f20a3 | |||
ac7c7d4036 | |||
858b0b9fbe | |||
9cc043f60e | |||
276a27d485 | |||
679bb839d7 | |||
9e64d847ef | |||
61fb275896 | |||
ca0c79c93c | |||
87c97b7568 | |||
80b8c25bba | |||
d1837cd1d3 | |||
c46f2b8508 | |||
a274c0dbf7 | |||
ef7e95e22a | |||
3be99de5b1 | |||
e7b7560477 | |||
317dc7fbce | |||
97fad15009 | |||
462326406e | |||
bbdf4893d1 | |||
ef6617cfe6 | |||
dbef0a440f | |||
22628ba5d4 | |||
8576ac86b9 | |||
540f9f6b72 | |||
f9d7faf884 | |||
7dee6194a2 | |||
68f9c1b950 | |||
43d711463c | |||
00255a2437 | |||
b40e9b7da2 | |||
89d201c8ff | |||
f47c98c4a5 | |||
3b6ed06686 | |||
57009e22b5 | |||
3ab7a3b64a | |||
596dd49cf5 | |||
28d847b8b1 | |||
2a1c6b1477 | |||
30abd0e3de | |||
f36e38ec20 | |||
d807ce095e | |||
7626fe1189 | |||
cc04bf57df | |||
cce6d56661 | |||
a05d0f378e | |||
94321aec7a | |||
4a55749bd2 | |||
2898e63166 | |||
c7176bd00a | |||
647ecdac29 | |||
e7a8acfb96 | |||
622caa0c21 | |||
22149c6401 | |||
9aece3936a | |||
c7e4e6f6c9 | |||
bcad0de1a4 | |||
|
5c6407047f | ||
6ddae20fa1 | |||
23feac2f4c | |||
16066942e3 | |||
|
3720f22234 | ||
|
f7c8e89a9f | ||
|
ba377952fd | ||
|
1fdec480d6 | ||
e85d551eac | |||
2b23a63a08 | |||
c0f48e1071 | |||
6005426684 | |||
673ed1a2d3 | |||
992f62edd2 | |||
f1ffa1da1d | |||
457ed80744 | |||
1fc028ffae | |||
10723efc68 | |||
2e0b2a308f | |||
bd4d109009 | |||
ae25386336 | |||
d929313d45 | |||
d372a86fe6 | |||
e306f03e1d | |||
8336165f23 | |||
c664c6b17b | |||
b63c854509 | |||
aa013af25e | |||
cc32f0018b | |||
dfc4db1860 | |||
35706604ea | |||
6d76e8e5cb | |||
aecb6fcd74 | |||
ea45f6c4c8 | |||
97b5cd089d | |||
f2c2f3139e | |||
dc4e7093e5 | |||
b6f1025ecb | |||
65b2fe86c6 | |||
0e90a80680 | |||
7648bcff39 | |||
a8b8d507b6 | |||
3561c93d74 | |||
331a503a25 | |||
9aefb5179f | |||
d14f22e121 | |||
b6893f6455 | |||
80ec3e7d97 | |||
cd834311eb | |||
d751cceeb0 | |||
ce78b07e4b | |||
87f3c50931 | |||
8e9e06afe6 | |||
2bc91ac3d2 | |||
5b615d51a4 | |||
c7f5d5fd0b | |||
6ef7538eb0 | |||
8fa90053cf | |||
b3682b61d1 | |||
ad0e2e0d7c | |||
6629f40cab | |||
e76bfa555f | |||
8ddb7268eb | |||
cb5fe02ebd | |||
77b84f8a48 | |||
691ec96909 | |||
29b4666205 | |||
316a22701f | |||
be59a3e574 | |||
37327379bc | |||
22c2f13611 | |||
8708ca07f4 | |||
634d9e4946 | |||
54933ebef5 | |||
157e59ac88 | |||
666f0dd751 | |||
8b179e4647 | |||
865eff7d98 | |||
b9741f4d0b | |||
02781662f8 | |||
32a1043468 | |||
3e4aeec649 | |||
b98b827594 | |||
255cc6ec23 | |||
51fc37d57a | |||
1921f55a9d | |||
fbd381fcc1 | |||
cd88f9f2ea | |||
788281cfb9 | |||
cd234b689d | |||
92354a77ee | |||
48ff3e70d1 | |||
7e9adfb899 | |||
e238e256f7 | |||
|
3ecb92a8d2 | ||
d58d450628 | |||
dee9c6e293 | |||
897c4487de | |||
48b50b5209 | |||
13bae8c23b | |||
4a48e6aa04 | |||
525266ede6 | |||
d045eb55ac | |||
0e6b425ac5 | |||
bdc26f2117 | |||
ed7fe5c6d0 | |||
cbce6707f4 | |||
e40e726687 | |||
0c027a3050 | |||
9f44bbad65 | |||
413a079f51 | |||
6f3accf691 | |||
5b3cfc542d | |||
c838c1d11b | |||
4d1d8d7409 | |||
db7499db82 | |||
6b500c2a8b | |||
95c518e59b | |||
976516d0e1 | |||
574eb91921 | |||
28fef3264b | |||
9161dbcb7d | |||
4b3027e4fc | |||
0eb2634f9b | |||
7c5b68c98f | |||
9317cbaaf0 | |||
1b5f04b79f | |||
948c86f62c | |||
7e7209322a | |||
00a57fd947 | |||
6bf6ac310b | |||
4b5af2770d | |||
b525e70e1c | |||
4498671233 | |||
9997aa9a92 | |||
cbf7284f64 | |||
ee466f870d | |||
e3f4bf0293 | |||
46688f10b1 | |||
48f44d2f3d | |||
bff1347ba3 | |||
9582324c88 | |||
bb0716421d | |||
bec73245e9 | |||
dcead12e86 | |||
0119514c11 | |||
b39f08694d | |||
80bdf1430b | |||
2ee75546f5 | |||
07d7ae60d5 | |||
87001337b4 | |||
2e9964c200 | |||
3ec3d8d045 | |||
96384d5ee1 | |||
8ed5467435 | |||
7c6ce9399c | |||
6b7653484b | |||
|
85e0334826 | ||
|
2a80e48a92 | ||
|
5821c6782e | ||
|
f15498f6d9 | ||
a1673d1fa1 | |||
2e99a1898c | |||
7a086d8430 | |||
0e8e9ced64 | |||
f34951c088 | |||
c6f47d4d56 | |||
c3e76585fc | |||
0014b149f8 | |||
091c07bbe2 | |||
240547102f | |||
2bbd53e25a | |||
58f2f63977 | |||
7df6a78c30 | |||
c646edf2c7 | |||
bcc820d646 | |||
3729ba5ca3 | |||
9c19591768 | |||
38ebfd2969 | |||
180018fd81 | |||
794ae94ac4 | |||
4eb6359ed3 | |||
|
976a877a25 | ||
|
b4117916b8 | ||
|
19cad89978 | ||
6b182c9d2d | |||
4d6ed27f73 | |||
745a8f9b8a | |||
6299d8115d | |||
717cfd2d37 | |||
7881327004 | |||
549aa9a25f | |||
1c6890492b | |||
a7c8e7c823 | |||
c8fcf6b336 | |||
449896f661 | |||
1004500d65 | |||
0a8d4e5dfb | |||
38e35bdb12 | |||
65503e38b6 | |||
d0c5bf6f6f | |||
f129e4c301 | |||
fbbb096cec | |||
77980511c6 | |||
4ba206f8f4 | |||
ecc849dd07 | |||
7ff6066d47 | |||
2bb1fc9826 | |||
|
6df6176f3a | ||
acb2ca79d9 | |||
c9211320e1 | |||
760286abe1 | |||
5890a1cb48 | |||
b3f5a9d18f | |||
80b33fbf8a | |||
5389ff6160 | |||
|
e8b8d86592 | ||
92d729a9dd | |||
c63219936e | |||
0aff497430 | |||
1f3907a6a5 | |||
2a8692c64f | |||
1709f57ff0 | |||
|
89baf757cf | ||
|
4f36fe2b9f | ||
|
510449ce65 | ||
|
4cc4b8d484 | ||
9c972cb0e5 | |||
9b1779065e | |||
057ec3e59b | |||
bc2e611a74 | |||
b6d3a1e02f | |||
54d57e1349 | |||
af0b3da8ed | |||
27d37b606b | |||
77a860cc62 | |||
7bd6374751 | |||
cf8882f2bc | |||
b37dd1a79e | |||
fd59776f91 | |||
9fd28d2eed | |||
f5c61c8013 | |||
88cb49dcc4 | |||
73235e59be | |||
7076a7ff86 | |||
d6e376d32d | |||
9016f4be43 | |||
d1c403999f | |||
d543109ef4 | |||
7085a45649 | |||
cf4c603f1d | |||
d2533313bc | |||
c43b50b6e6 | |||
c072678936 | |||
631da1465e | |||
f29519a5cd | |||
|
5d82b42ab8 | ||
4897a78fd3 | |||
a1d986d952 | |||
717c90a7d0 | |||
8fde19a7dc | |||
ad7198ba66 | |||
eb4b4cc92b | |||
41bf520585 | |||
c0ae01f5d5 | |||
8b8f92d717 | |||
ccd1627175 | |||
b8a7e23f46 | |||
1f4f28b4dc | |||
ea6cd53067 | |||
267138776d | |||
604b3d5e17 | |||
667e1e5b15 | |||
9b819f32f8 | |||
b619bde037 | |||
97af16bd86 | |||
fa75f89acc | |||
222b61b577 | |||
e77757f0fd | |||
ebac02f118 | |||
1c9ae81987 | |||
7b1fb68c18 | |||
8aa7830f0d | |||
79bee755ee | |||
|
cde0ee96ff | ||
1ea04aedf0 | |||
446a2bc15a | |||
|
2d10e758e0 | ||
0e978299cf | |||
d06c1f2943 | |||
d768b50b97 | |||
034ade48f2 | |||
d1e9f74087 | |||
f262f77dbd | |||
a3387953a9 | |||
|
7cad5a8608 | ||
9b83fcbf06 | |||
32a93ce8a2 | |||
e428329c03 | |||
e844bbee15 | |||
631c3068a9 | |||
79d4888e22 | |||
de61fdef48 | |||
93caeba200 | |||
3c723e8d99 | |||
c5776447b9 | |||
5356f487a5 | |||
72bd96c656 | |||
f611fe7be3 | |||
dd6ea40a36 | |||
ea1274d1c6 | |||
8526468975 | |||
|
95c415f416 | ||
06dc336481 | |||
893fca2816 | |||
99590cb6b6 | |||
b3fd1be5f6 | |||
a23083f737 | |||
8306b758e8 | |||
218cbd5289 | |||
2ac58670d5 | |||
6f82c9979b | |||
0a659a397f | |||
2781873faf | |||
3aaa89fb08 | |||
35d542a676 | |||
d0b9c436b1 | |||
37cc229749 | |||
17c2d109e5 | |||
c8d5de2179 | |||
32e15dc905 | |||
f5ebca4907 | |||
01db676d68 | |||
d2d92b1f1a | |||
27cbe9dfc0 | |||
8fb830099f | |||
463a133a63 | |||
a16fed8887 | |||
33113890f5 | |||
abd47fc14e | |||
7fb4061759 | |||
b320e74ad5 | |||
0ed8f67b9d | |||
a12a1121b6 | |||
795e18773b | |||
aa14449857 | |||
ed7b1cd3d7 | |||
a155eefa23 | |||
398665be9e | |||
6db232d4ac | |||
d7277893fb | |||
00033bf0a8 | |||
adda33dc4e | |||
097a09578a | |||
65472c8de2 | |||
602ad9e7ee | |||
96df52ec50 | |||
244dc35bae | |||
d9c9d7d2ee | |||
89cb5eb76d | |||
6d3802335e | |||
c1d6232b79 | |||
048a9ebb52 | |||
de478f6ff7 | |||
3e5a19d95a | |||
2ddf38f99c | |||
d88f321cef | |||
74adac6c70 | |||
15ea70a71b | |||
8b91c01a4c | |||
3bcef72050 | |||
695c764a01 | |||
f7c93ea2e8 | |||
1ea047dd94 | |||
4b523f9e2c | |||
6a61070d85 | |||
f36082938e | |||
1ba996ad93 | |||
a23fdf946d | |||
12cf6913ef | |||
a4eef383c3 | |||
ac124612ad | |||
95a479a86e | |||
e4eff0e3dc | |||
dce1928dc4 | |||
|
3c8dc4929f | ||
e511014a28 | |||
bae5f88824 | |||
41ad98653a | |||
6a138aeb6e | |||
f0ce37801b | |||
35f6aba365 | |||
|
f6407bafcb | ||
|
d5e9f67cec | ||
|
b14f371c05 | ||
31a5d1b9c4 | |||
fb4305a953 | |||
eab872823c | |||
3332750243 | |||
4942b7ce4d | |||
a2af77f363 | |||
a7490b56d1 | |||
66eb18d5ea | |||
46486138b6 | |||
d6562c4b1e | |||
1ddde0910c | |||
79f3b84ca2 | |||
55141bda67 | |||
bc02c123e6 | |||
e76d5ad988 | |||
8ad8a9c422 | |||
b15c9b7dab | |||
2405e97c38 | |||
fdbb2ee905 | |||
94b9ef56be | |||
952168ce25 | |||
5273037a94 | |||
53e6ff9524 | |||
f66fd1caaa | |||
d93fdbc5ad | |||
58e0439daf | |||
|
75b5e7254e | ||
39550a7fe9 | |||
|
5f0c084bee | ||
88f06f7921 | |||
8d12079386 | |||
7824a034ca | |||
8ef0ba2fae | |||
cc384f4324 | |||
8a91c79fb0 | |||
ac1d63bb0d | |||
|
83632448be | ||
|
e108526bab | ||
|
e27ba0d08a | ||
5afe0e3d63 | |||
c52f82f9ce | |||
d0c533555e | |||
1995c80e60 | |||
24e1516ec5 | |||
5b1beda82b | |||
e4f1094569 | |||
911668f0c8 | |||
6bfa0783b9 | |||
d64bcd5e83 | |||
ed2ca9f476 | |||
f787dfe809 | |||
afaabd14a8 | |||
e009bfeaa2 | |||
f1358d52aa | |||
b04b333466 | |||
|
dd16504329 | ||
c6cb21a748 | |||
78aa4626fa | |||
d2df224da8 | |||
464ff2fe96 | |||
0cc711173a | |||
14e5cfc8f8 | |||
b8b888090d | |||
68281339b7 | |||
2e5be3d3f1 | |||
abd31a94fb | |||
01e2cf08d1 | |||
9f821862b7 | |||
8660af745e | |||
826e4352d1 | |||
b94999bba4 | |||
65cc4c9429 | |||
df2be9620b | |||
2ab9daaa0f | |||
0c6c61a272 | |||
00f62ca023 | |||
9b2ca15de6 | |||
c4aa34bf5c | |||
4385f2a36a | |||
ed6a9dadf8 | |||
d978a2d190 | |||
375036e409 | |||
|
99168c1035 | ||
f4a231420f | |||
55ebfdda39 | |||
e63e2e0852 | |||
edc4b9e60e | |||
78ff734e6c | |||
2cc743cf47 | |||
d99e6d1994 | |||
50f62d73b7 | |||
26a89de790 | |||
c2276b18c5 | |||
693434f8aa | |||
1e8edc05e9 | |||
1f166a47e9 | |||
9ee6151999 | |||
6cdc92bd0c | |||
612e1fea67 | |||
0a9f4e8708 | |||
781fac3266 | |||
4c38810a32 | |||
bf0d38ff2a | |||
04e5b42606 | |||
30525c43bf | |||
ebeb5efe05 | |||
a3e939f34b | |||
2a771161e7 | |||
ded042d8cc | |||
4ed43ae4dc | |||
9d29ecf304 | |||
427b57e2a9 | |||
e4f0a336c2 | |||
68459c6795 | |||
17fda7281a | |||
ac777965d0 | |||
31d3bc9bd8 | |||
2115eeb6a2 | |||
08f017bc3e | |||
7bc9482970 | |||
57ffad4e04 | |||
5422d14f93 | |||
e6d8c736d0 | |||
|
18d3542fbc | ||
93f453cecf | |||
505bb778fa | |||
b09d464162 | |||
a9104ed090 | |||
06f134cc71 | |||
|
584359b6c0 | ||
|
26a1a3d1e0 | ||
|
6da05cbe2d | ||
f48f52079d | |||
76c569cf84 | |||
|
b121290c0f | ||
8fd46b8c70 | |||
603f525352 | |||
|
8c8640d0ab | ||
e3dd545345 | |||
589fc30fc8 | |||
bd3c51fc5a | |||
2c46f53ef6 | |||
939f4d4e3d | |||
3006db0cae | |||
|
22640a9ca0 | ||
ca23c3b8b3 | |||
74607fdd43 | |||
b53684a8f0 | |||
f055f5dea8 | |||
4dc4fe0b8d | |||
5e3c2da79c | |||
37dc94bc79 | |||
fc274b43f0 | |||
9ab12e4312 | |||
a5ff35c198 | |||
458e7776c5 | |||
fa5fa1c11b | |||
f8bc67be8d | |||
17586d49ac | |||
2f75c9aa9e | |||
60650ccfc7 | |||
c12c47cace | |||
d6aaab8a09 | |||
128ebf04ce | |||
b1941bcce9 | |||
7b3b28616d | |||
f3910f49ca | |||
59e1cac92c | |||
b1f0287fdb | |||
99c35d4077 | |||
07b9ff61f2 | |||
f573c1810a | |||
1d37b14356 | |||
6c617eddd5 | |||
e14ebee4e0 | |||
a897ffd514 | |||
a472735616 | |||
b3fec03cf4 | |||
89dccc25c3 | |||
3846155d62 | |||
386979ebb4 | |||
07222cd984 | |||
cf4c6c274d | |||
340bd72176 | |||
1a1bb71af1 | |||
ae45dfe63a | |||
d6ac7a9192 | |||
d959fdbf8d | |||
81739791e0 | |||
4cdff74e9b | |||
11e830bb1d | |||
cba00a9c4e | |||
f2198de151 | |||
0c439c0c02 | |||
f11a9bb4aa | |||
ee6f390910 | |||
9a5117db14 | |||
9585c8f908 | |||
3495484ddd | |||
67ab2acb82 | |||
c085bacccf | |||
896401088e | |||
ef3dda9213 | |||
c9f5d9b048 | |||
ccbd0b608b | |||
a7cc2ea803 | |||
9ec75ccf3f | |||
7c890be76d | |||
39e5aac479 | |||
e25f2c4e6c | |||
7ad8f9ac6f | |||
2add3ff7ad | |||
0602ca1862 | |||
e973802fc1 | |||
2bdf6dfd70 | |||
f894c49540 | |||
7900e5ea53 | |||
5587f48bda | |||
de3ee07566 | |||
fe39453598 | |||
9c75063c05 | |||
5cf2ef1732 | |||
f35e6ea7ad | |||
90595e9c18 | |||
032d4adee3 | |||
4444219e17 | |||
56fd78089d | |||
86dbc00cbe | |||
c644270599 | |||
1676a98c51 | |||
358ed53da0 | |||
90925c9428 | |||
cd192a6909 | |||
7185146481 | |||
c15e6c5fe5 | |||
79c2b9df06 | |||
acd6772148 | |||
cd91dbd4f7 | |||
8fc4efff88 | |||
4bf3e906a1 | |||
0ca43ef67a | |||
603e055a39 | |||
75c04611dc | |||
881dc9b01e | |||
8c72e909a7 | |||
74ac148747 | |||
be7887c071 | |||
da459d95b8 | |||
b3aa6af859 | |||
b816af1b13 | |||
276aeb9875 | |||
de94001508 | |||
7cfab3620b | |||
6c136ebbf1 | |||
eaa5eb4174 | |||
acc2a39454 | |||
a10c7a8496 | |||
de82919e39 | |||
1ba56d5262 | |||
1c825b5d84 | |||
d6d66de251 | |||
76d79f0331 | |||
dc43c38e29 | |||
7d7308a80d | |||
b43ef9d76d | |||
28cdd67743 | |||
7f126ce127 | |||
a6c4debf78 | |||
a74ad5475e | |||
fa293828df | |||
f5582b1754 | |||
1af95714c2 | |||
0406d18cfd | |||
66e9ec9c3c | |||
899a7c8318 | |||
7c01b69498 | |||
4f0d3bf4ed | |||
9a5e7a3abb | |||
02eb6c7e09 | |||
418c09398c | |||
cdbd4c55e8 | |||
2374410891 | |||
d2c46e91fe | |||
12441331e6 | |||
9ceeae2de0 | |||
e0e493c2f1 | |||
0f05f7ad93 | |||
9bc1b71017 | |||
b3776871b5 | |||
308cb31bf9 | |||
e1f4643215 | |||
bc4fb322b5 | |||
2c4f192e43 | |||
fb7a6dccaa | |||
2826b7bd7c | |||
932848f6c1 | |||
9255940c6b | |||
3eadd16856 | |||
61f46c5ad5 | |||
aad47d1741 | |||
079dd3fe4c | |||
d47f1bff4d | |||
53967f6324 | |||
f5a70dc2a5 | |||
31ae1013d7 | |||
071945e558 | |||
5c4d6a6e83 | |||
9c9be65b2b | |||
c164684703 | |||
842c9001ba | |||
481e47076e | |||
917a0dd0a0 | |||
358aed7c31 | |||
9893834e85 | |||
32cf3225c5 | |||
2bfd7518c5 | |||
4ba56684d1 | |||
0b1e38e5f6 | |||
7974219389 | |||
8424e443a9 | |||
85251cf5d4 | |||
8f882ea3ea | |||
7a2bcc96bb | |||
8b41e58e1f | |||
9417359da3 | |||
1cf0e1bd84 | |||
223f803e87 | |||
6cb901d083 | |||
096be14230 | |||
bb8b1e58e8 | |||
06261d8c86 | |||
869cccf884 | |||
0defaf9cb5 | |||
60b1f9921d | |||
f61bc91b0f | |||
ed2c6053de | |||
2cffa8deaa | |||
f0581271f6 | |||
99522234ea | |||
67f2862fb1 | |||
1c0dc3f904 | |||
b94dbff216 | |||
7388c723cd | |||
128be3c17d | |||
4c30c94258 | |||
20b8b45aeb | |||
2dd899f287 | |||
a13cc0ab17 | |||
620f9e64d6 | |||
25c320b281 | |||
f19eec56ac | |||
7cbcff2e9b | |||
9f6407ada6 | |||
e933ecf046 | |||
4010a2ed77 | |||
2f36096e1a | |||
82ec45e375 | |||
37362150fe | |||
a7ba97803f | |||
31dc903877 | |||
8943867433 | |||
d9cb110563 | |||
32113cee67 | |||
a621ce199a | |||
1f524d6c87 | |||
0320d449ec | |||
30f007687a | |||
adf7856162 | |||
f9dce8b2d3 | |||
15cb6270ef | |||
ed14fdbac9 | |||
8650a15db1 | |||
6a10022543 | |||
52e4f48eb9 | |||
f5e1e8bec9 | |||
a291477c19 | |||
1c88dda76a | |||
0b59c22c23 | |||
576377e2b2 | |||
6ff1867312 | |||
3cb52423d2 | |||
5a5b6491ac | |||
4272c6b077 | |||
26071de2e7 | |||
fe92d9e838 | |||
5ea2d644a2 | |||
c35f90154f | |||
36305c50b1 | |||
2b3b8eab71 | |||
aa7c7651e5 | |||
c41ffb5ceb | |||
766a03375a | |||
2a4d4247e3 | |||
9de5083a7e | |||
d0557b2bcd | |||
1a980d6321 | |||
fb21d4e645 | |||
5933a4d778 | |||
8cf16c7831 | |||
fcf4e03c2f | |||
d1b29e82da | |||
290e969a22 | |||
18ae91ea6e | |||
0bce77a2ac | |||
19155607af | |||
f357c37e2c | |||
2980c14728 | |||
7e0e00d45d | |||
8b4ac0017b | |||
8ec1ec527e | |||
6096a7181c | |||
fa9dfb8ff7 | |||
2dc006aab4 | |||
031b9d6faf | |||
d9018a47f6 | |||
|
e893a20dfd | ||
09d521661f | |||
fd46efb193 | |||
426f54c9cc | |||
45a537b6b1 | |||
d6feca169c | |||
05e2900ab0 | |||
30b52e5523 | |||
14aeddc11f | |||
066399ecdb | |||
d4bbac4467 | |||
7516443a89 | |||
73d67e29b4 | |||
c3e7425f4c | |||
cc9dbb1def | |||
2045edc11b | |||
1dcac44d6c | |||
300ead65d3 | |||
6a0219a7a4 | |||
80c69aac05 | |||
7417a3cd00 | |||
9ca80a54d8 | |||
5c0b17ef39 | |||
1697d8aaef | |||
fef441a8ff | |||
c1ddc4268b | |||
e323290e61 | |||
1ab44d4201 | |||
71b1c3dfb0 | |||
695930a607 | |||
eb2a4ff1f0 | |||
531d5c80c0 | |||
067ed27689 | |||
fa38de2de7 | |||
e4d1b49c39 | |||
af7caec509 | |||
90c1f899fc | |||
a0691ae4cd | |||
2f20e6f808 | |||
7a4636ae0f | |||
53435dcc3e | |||
4d01278037 | |||
2299e5d41e | |||
d16f5d5df3 | |||
da8e9638f4 | |||
900ea80a42 | |||
4b92d0f685 | |||
3ce5533103 | |||
4a1ee8c911 | |||
3f22a99412 | |||
caf95cc913 | |||
fd3130b4d9 | |||
65bb5a49e2 | |||
4bcc517326 | |||
0b164973e0 | |||
a125df991b | |||
f9a9b42c58 | |||
56ad1d164a | |||
3cce18919c | |||
76d6a69f5a | |||
3db17277b4 | |||
ece49eb500 | |||
746428ed44 | |||
984702b3b2 | |||
1bc71e1c5d | |||
47efa88c9d | |||
3821636b77 | |||
596f6f9dac | |||
7ecdaea83a | |||
|
98cb2644db | ||
31db6faa19 | |||
872d8b695a | |||
6db3e1dda5 | |||
|
9aaf78b9dd | ||
5d8ac158ce | |||
d32a53d58f | |||
a836ec944f | |||
e7b128e735 | |||
ff3cb1d80f | |||
c5ff08ee25 | |||
856db29180 | |||
|
20e09b4ea8 | ||
1dd0682661 | |||
7252b8d614 | |||
|
e5870a169b | ||
94795a3560 | |||
7705debab0 | |||
f87df0527f | |||
e4512a40e0 | |||
1d4a9510b8 | |||
2648f07e7a | |||
459bf1fcf6 | |||
3930e63320 | |||
acecb1e397 | |||
9b48eb5a61 | |||
7d40a448cb | |||
da7aed3814 | |||
c7f4200417 | |||
5e2a5494af | |||
7b77fd2510 | |||
ece5608677 | |||
4644f613b2 | |||
3afa5ac76d | |||
27f8a1df04 | |||
8e5b0bbf17 | |||
282e9565c9 | |||
b714ffd48b | |||
9968a15ef8 | |||
d93da55ce9 | |||
789bcd402a | |||
cf6ab60d2e | |||
64364b20ff | |||
d29c7956bc | |||
e5ef485d6b | |||
fc8046edc4 | |||
4538017206 | |||
d664b6d253 | |||
f42aa0a6f2 | |||
9d4ba66f6e | |||
cf846ab8ac | |||
219e287c6c | |||
dede8f9d4b | |||
7a1e3f5639 | |||
|
9bd77292ff | ||
f1a143de5b | |||
765e343c71 | |||
af4b91a048 | |||
cc9044487b | |||
11c30001c3 | |||
ac9161035a | |||
007ec0644c | |||
1db808fb3d | |||
76656275c3 | |||
|
64bdbc4bf0 | ||
61033bb4e5 | |||
e608b7924a | |||
f7f852a28b | |||
9b9c4c4abb | |||
1b59f5b190 | |||
65ab230961 | |||
|
c64d0100d5 | ||
0112407250 | |||
2d3f5fa05d | |||
a87036ee46 | |||
a72f5ff69a | |||
|
bb99009191 | ||
4c45d60529 | |||
2211f959ae | |||
cc1d6f53a0 | |||
389778c716 | |||
|
e55e9e8139 | ||
ef76e87477 | |||
62526038d6 | |||
bf2f39623e | |||
28c890a52d | |||
cd189c4fe4 | |||
b8f6bab12d | |||
50b3240c4f | |||
18fbc75e16 | |||
0881da4a82 | |||
|
fa210766a2 | ||
|
d4f52e3137 | ||
|
8b2ebdc5f7 | ||
a00407256d | |||
24b5eccefc | |||
|
815fb39a05 | ||
|
24c196d2a4 | ||
|
3e26e70d0c | ||
|
5ce25e2790 | ||
|
8243552c8c | ||
|
425e27dee5 | ||
|
9ec9269a18 | ||
|
bf5cbd1deb | ||
|
4c09cbf1a4 | ||
|
fc077af4ce | ||
|
ca4312bb85 | ||
|
fc3b1fccba | ||
|
f83346b9b3 | ||
|
63c7241aec | ||
|
fd77dc579e | ||
|
f017ed648f | ||
|
27a6745743 | ||
|
95be0c8e46 | ||
|
17a774ba7e | ||
|
a1d2bd173b | ||
|
f495550ad7 | ||
|
43d0e597a2 | ||
|
f1c07b5cf5 | ||
|
f2782426d5 | ||
|
f13ed92b0e | ||
|
6e9e7c45d7 | ||
|
c1ca4b9421 | ||
|
469feadbc0 | ||
|
a5403a4373 | ||
|
56c902258d | ||
|
9c1660f467 | ||
|
dd926b5762 | ||
|
9d03351b5d | ||
|
719d9a9835 | ||
|
731c8c9ad9 | ||
2ae8ac2947 | |||
cc94e5f52f | |||
a09c9f248e | |||
16f0a3976c | |||
cc78223164 | |||
30f56235c1 | |||
7458769cb3 | |||
a5aa9355f5 | |||
5c229639f0 | |||
059322b7f8 | |||
|
f1a98d82c6 | ||
|
80e2c023dd | ||
|
86511deac4 | ||
|
bb3475b3f8 | ||
bd196c1fb9 | |||
873fc22cfb | |||
f3d6d7c0a3 | |||
86cc6ca869 | |||
d30caa422e | |||
84c148fb3b | |||
6c4f641c1e | |||
b44bc57548 | |||
bb18a9a3f2 | |||
f7dcbe40d4 | |||
|
95533d2b31 | ||
|
867d3fceb0 | ||
3af92f8b92 | |||
7c048d1989 | |||
d127e73590 | |||
13ba5c84de | |||
50c4f6f2a1 | |||
9588e111c4 | |||
37ae34a432 | |||
e3a559e13b | |||
3664ac7418 | |||
3f83788858 | |||
10cac46f4c | |||
b1f429f4b5 | |||
51095cd419 | |||
ddd56bf2a7 | |||
674a6153f3 | |||
793ffbd048 | |||
aa04473521 | |||
247d36a309 | |||
77d3c66fb9 | |||
9f5b808042 | |||
9bea7ae5ed | |||
e85d487c3a | |||
23679a6edd | |||
525ffa6a28 | |||
0f44fbedf4 | |||
ac47301a64 | |||
ae15e0f404 | |||
9347c02268 | |||
a2e8abc537 | |||
ceeaf25443 | |||
10404143c6 | |||
62d2b267da | |||
94aaf4554f | |||
03d50d74ca | |||
0c8816e6cc | |||
7ed5b33db5 | |||
e3e4aeff94 | |||
57b3066987 | |||
89cd879529 | |||
1527ff7898 | |||
2c61c0bc08 | |||
3967e1b5f0 | |||
bcd8b9982d | |||
8cbd51512b | |||
b36b923c5d | |||
c38f7a3693 | |||
f44a7884e6 | |||
7c77a5a8a5 | |||
b61b09f55c | |||
9caaee18b5 | |||
588955a467 | |||
7c0407ed22 | |||
ee3edeaac2 | |||
499f8aa0a4 | |||
90a0d3b1e0 | |||
548212274f | |||
cd8ceccfaf | |||
1292dd2162 | |||
e5c12fc81c | |||
b02ac44cfc | |||
f5abe05ce9 | |||
52963adfc9 | |||
6af53d1163 | |||
2274cfe480 | |||
3cccac8cb1 | |||
8cf52651fe | |||
919d7573c3 | |||
166ef8faae | |||
8bfffd8cf7 | |||
ac2a63763f | |||
edb54b300b | |||
8b2b41eefc | |||
41d202c2e7 | |||
fb172f018a | |||
8ef6c6fcbe | |||
b1355e75c4 | |||
0691cda46f | |||
d2c143d39c | |||
8bf07e4766 | |||
ef32b292a8 | |||
b3d0fb0cee | |||
61f88228b0 | |||
ff28be7724 | |||
f98558546c | |||
5ff016238e | |||
9cc7c77ba9 | |||
618e56b2a5 | |||
3b1b058ffe | |||
338b80903c | |||
9a3b52e1fd | |||
fea97b5149 | |||
0dfc935af6 | |||
17c87f8758 | |||
2f7527a333 | |||
d49be19544 | |||
263891f414 | |||
d4ae7814a0 | |||
644c2e6612 | |||
dc9b075d5a | |||
999feb81ca | |||
1bcb2a8be4 | |||
f581f4b8d9 | |||
a16d90ff46 | |||
c7803a2814 | |||
e50fd04750 | |||
f4e5dc8382 | |||
745de200df | |||
f19a40ef9c | |||
bff6980eee | |||
483bd50bdf | |||
1dc60d2856 | |||
5110e64e63 | |||
6e85940d63 | |||
4d7556f68b | |||
e00f74ddf7 | |||
9212f87735 | |||
1aaf27dfb2 | |||
ebd34f1695 | |||
87c65fb723 | |||
a34dccbd27 | |||
abb6bed459 | |||
49c4f483fd | |||
5bd1cc5580 | |||
4447956da7 | |||
f6d2e983d9 | |||
9a96a277e6 | |||
6bfe524bac | |||
a45d7bec81 | |||
ead991dcd1 | |||
63c4bf3bf9 | |||
b22b552bf3 | |||
ed0127df91 | |||
938392b25b | |||
17e9f21fb9 | |||
481dbc14c3 | |||
16cc77fd9d | |||
600c438951 | |||
5d9bb13410 | |||
2e3e68d2cb | |||
da513c1089 | |||
331adca23e | |||
3579dbe813 | |||
e1e5db22f8 | |||
8e3a2c7a85 | |||
377e3948ff | |||
706a31f189 | |||
4533fec167 | |||
e5c772d8a5 | |||
633700c0af | |||
40a215802f | |||
007b3e6d4e | |||
5d46acbe41 | |||
|
175761c757 | ||
|
9dc4960277 | ||
c7e23fe9ed | |||
7644a38574 | |||
9e45da75cb | |||
2359135327 | |||
0ea5824427 | |||
df9cc6a7d2 | |||
5b66dc69a1 | |||
448c2593ed | |||
8210172d7f | |||
a364369f25 | |||
82e8dae948 | |||
e430b8c281 | |||
fa87aed263 | |||
219408647c | |||
c3b4cb21ed | |||
5bffa35741 | |||
030b321e39 | |||
a938a2ad61 | |||
|
15bf273e6e | ||
|
37b6f043dd | ||
cf545ae93a | |||
0c97dba5f1 | |||
45a2b9cded | |||
f149fa4b4b | |||
953d08ba63 | |||
88da0c3039 | |||
f669e31305 | |||
0012e0cdea | |||
dd2be365b1 | |||
049f9c8853 | |||
d6585811d6 | |||
31482ee559 | |||
14a277a64f | |||
1ffff3255a | |||
9723dbdd4c | |||
9e52be6ffd | |||
978096b402 | |||
9f5c5c8e13 | |||
4453f41967 | |||
e4cf193cd7 | |||
cc6aa7af05 | |||
e1605ae6ab | |||
32858fb0b4 | |||
d6ea9b6658 | |||
e59845d4e1 | |||
147996b234 | |||
9437e95849 | |||
ac374f89d2 | |||
3a3851d2a5 | |||
ddde37ff59 | |||
80318e6e30 | |||
278aaabeb9 | |||
6756ca8311 | |||
06cdc2c46c | |||
|
fa7955b8cf | ||
|
e5586f7d87 | ||
944c0212c3 | |||
d044504693 | |||
2456fc67f1 | |||
ec416cb152 | |||
8a58b72934 | |||
0da1c6ec7b | |||
6dc0173b74 | |||
f54acc9bbf | |||
5c58f85be1 | |||
d86faa30d3 | |||
3a9e32a411 | |||
a44212ff00 | |||
30f6ec4f7d | |||
178e56e77b | |||
c67ab09e4d | |||
1cb44da6b3 | |||
5299046b6b | |||
7e8123379f | |||
204e515bf7 | |||
c554d1d36d | |||
1334da99e2 | |||
a56529871c | |||
996ca19dac | |||
820492f9a9 | |||
61969d17a2 | |||
f803198f14 | |||
d041e23d35 | |||
f59bc98d79 | |||
|
e996e09657 | ||
158e889deb | |||
|
9c06874073 | ||
2444c4b372 | |||
|
f5e0e10143 | ||
9ecc207139 | |||
|
952a691f60 | ||
803fd3a851 | |||
|
f94181480c | ||
e18cfd6c80 | |||
|
c27b4a3497 | ||
7a287ba289 | |||
|
58d33503a1 | ||
3f99dc6db2 | |||
|
38322a3f6f | ||
837d25cfd8 | |||
52ab7cb881 | |||
fedc75cafc | |||
17ac63aae6 | |||
29106b9645 | |||
1f1c8fdaba | |||
4f6337c46e | |||
|
ce6196a5c6 | ||
55183bf890 | |||
|
6b0aa13856 | ||
cc09d7fc10 | |||
|
d25db4cd0d | ||
96ce9309e0 | |||
|
7097ed67a6 | ||
f2f8503e77 | |||
|
52d5240fa0 | ||
bdce17ae71 | |||
|
5bf3a4875c | ||
b501a568aa | |||
|
d9125451f5 | ||
1578b44536 | |||
|
c3613e0637 | ||
932145f20b | |||
|
c8f1af635f | ||
6c2e668262 | |||
|
cfd61dc1d1 | ||
7750d2568c | |||
4e4f8c2670 | |||
cb402d6846 | |||
aa1178dc49 | |||
3506819511 | |||
ac6c927a23 | |||
bda6451c1d | |||
d9e396e264 | |||
66286f92df | |||
715b240589 | |||
ee5697ac37 | |||
aa48b95ee7 | |||
2639b7105a | |||
02df59e964 | |||
f23810f19a | |||
9f5dd6c10d | |||
eaf2bd22c1 | |||
b1113d57ae | |||
dbd312981e | |||
511314a54a | |||
18267b9677 | |||
056ed7184b | |||
b94c106a36 | |||
965dd1aabe | |||
626dd66254 | |||
d46e370950 | |||
126bb279cd | |||
d0eae19556 | |||
69971cd7e2 | |||
45c6541266 | |||
65c837c828 | |||
8a4167b7a3 | |||
814770c2a9 | |||
f862eda7d6 | |||
5472424d5a | |||
10a449fe1a | |||
f557e2cbbd | |||
|
704b97a636 | ||
200a1bd63e | |||
cf4c262226 | |||
7b5363ce14 | |||
42d9e2bfd8 | |||
d182509771 | |||
e567158246 | |||
db0f057b54 | |||
84922c7232 | |||
|
16bebe9832 |
2
.dockerignore
Normal file
2
.dockerignore
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
.git
|
||||||
|
.tox
|
56
.drone.yml
Normal file
56
.drone.yml
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
kind: pipeline
|
||||||
|
name: python-3-8-alpine-3-13
|
||||||
|
|
||||||
|
services:
|
||||||
|
- name: postgresql
|
||||||
|
image: postgres:13.1-alpine
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: test
|
||||||
|
POSTGRES_DB: test
|
||||||
|
- name: mysql
|
||||||
|
image: mariadb:10.5
|
||||||
|
environment:
|
||||||
|
MYSQL_ROOT_PASSWORD: test
|
||||||
|
MYSQL_DATABASE: test
|
||||||
|
- name: mongodb
|
||||||
|
image: mongo:5.0.5
|
||||||
|
environment:
|
||||||
|
MONGO_INITDB_ROOT_USERNAME: root
|
||||||
|
MONGO_INITDB_ROOT_PASSWORD: test
|
||||||
|
|
||||||
|
clone:
|
||||||
|
skip_verify: true
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: alpine:3.13
|
||||||
|
pull: always
|
||||||
|
commands:
|
||||||
|
- scripts/run-full-tests
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: documentation
|
||||||
|
|
||||||
|
clone:
|
||||||
|
skip_verify: true
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: plugins/docker
|
||||||
|
settings:
|
||||||
|
username:
|
||||||
|
from_secret: docker_username
|
||||||
|
password:
|
||||||
|
from_secret: docker_password
|
||||||
|
registry: projects.torsion.org
|
||||||
|
repo: projects.torsion.org/borgmatic-collective/borgmatic
|
||||||
|
tags: docs
|
||||||
|
dockerfile: docs/Dockerfile
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
repo:
|
||||||
|
- borgmatic-collective/borgmatic
|
||||||
|
branch:
|
||||||
|
- master
|
||||||
|
event:
|
||||||
|
- push
|
47
.eleventy.js
Normal file
47
.eleventy.js
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
const pluginSyntaxHighlight = require("@11ty/eleventy-plugin-syntaxhighlight");
|
||||||
|
const inclusiveLangPlugin = require("@11ty/eleventy-plugin-inclusive-language");
|
||||||
|
const navigationPlugin = require("@11ty/eleventy-navigation");
|
||||||
|
|
||||||
|
module.exports = function(eleventyConfig) {
|
||||||
|
eleventyConfig.addPlugin(pluginSyntaxHighlight);
|
||||||
|
eleventyConfig.addPlugin(inclusiveLangPlugin);
|
||||||
|
eleventyConfig.addPlugin(navigationPlugin);
|
||||||
|
|
||||||
|
let markdownIt = require("markdown-it");
|
||||||
|
let markdownItAnchor = require("markdown-it-anchor");
|
||||||
|
let markdownItReplaceLink = require("markdown-it-replace-link");
|
||||||
|
|
||||||
|
let markdownItOptions = {
|
||||||
|
html: true,
|
||||||
|
breaks: false,
|
||||||
|
linkify: true,
|
||||||
|
replaceLink: function (link, env) {
|
||||||
|
if (process.env.NODE_ENV == "production") {
|
||||||
|
return link;
|
||||||
|
}
|
||||||
|
return link.replace('https://torsion.org/borgmatic/', 'http://localhost:8080/');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let markdownItAnchorOptions = {
|
||||||
|
permalink: markdownItAnchor.permalink.headerLink()
|
||||||
|
};
|
||||||
|
|
||||||
|
eleventyConfig.setLibrary(
|
||||||
|
"md",
|
||||||
|
markdownIt(markdownItOptions)
|
||||||
|
.use(markdownItAnchor, markdownItAnchorOptions)
|
||||||
|
.use(markdownItReplaceLink)
|
||||||
|
);
|
||||||
|
|
||||||
|
eleventyConfig.addPassthroughCopy({"docs/static": "static"});
|
||||||
|
|
||||||
|
eleventyConfig.setLiquidOptions({dynamicPartials: false});
|
||||||
|
|
||||||
|
return {
|
||||||
|
templateFormats: [
|
||||||
|
"md",
|
||||||
|
"txt"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
35
.gitea/issue_template.md
Normal file
35
.gitea/issue_template.md
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
#### What I'm trying to do and why
|
||||||
|
|
||||||
|
#### Steps to reproduce (if a bug)
|
||||||
|
|
||||||
|
Include (sanitized) borgmatic configuration files if applicable.
|
||||||
|
|
||||||
|
#### Actual behavior (if a bug)
|
||||||
|
|
||||||
|
Include (sanitized) `--verbosity 2` output if applicable.
|
||||||
|
|
||||||
|
#### Expected behavior (if a bug)
|
||||||
|
|
||||||
|
#### Other notes / implementation ideas
|
||||||
|
|
||||||
|
#### Environment
|
||||||
|
|
||||||
|
**borgmatic version:** [version here]
|
||||||
|
|
||||||
|
Use `sudo borgmatic --version` or `sudo pip show borgmatic | grep ^Version`
|
||||||
|
|
||||||
|
**borgmatic installation method:** [e.g., Debian package, Docker container, etc.]
|
||||||
|
|
||||||
|
**Borg version:** [version here]
|
||||||
|
|
||||||
|
Use `sudo borg --version`
|
||||||
|
|
||||||
|
**Python version:** [version here]
|
||||||
|
|
||||||
|
Use `python3 --version`
|
||||||
|
|
||||||
|
**Database version (if applicable):** [version here]
|
||||||
|
|
||||||
|
Use `psql --version` or `mysql --version` on client and server.
|
||||||
|
|
||||||
|
**operating system and version:** [OS here]
|
11
.gitignore
vendored
Normal file
11
.gitignore
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
*.egg-info
|
||||||
|
*.pyc
|
||||||
|
*.swp
|
||||||
|
.cache
|
||||||
|
.coverage*
|
||||||
|
.pytest_cache
|
||||||
|
.tox
|
||||||
|
__pycache__
|
||||||
|
build/
|
||||||
|
dist/
|
||||||
|
pip-wheel-metadata/
|
10
.hgtags
10
.hgtags
|
@ -1,10 +0,0 @@
|
||||||
467d3a3ce9185e84ee51ca9156499162efd94f9a 0.0.2
|
|
||||||
7730ae34665c0dedf46deab90b32780abf6dbaff 0.0.3
|
|
||||||
4bb2e81fc77038be4499b7ea6797ab7d109460e0 0.0.4
|
|
||||||
b31d51b633701554e84f996cc0c73bad2990780b 0.0.5
|
|
||||||
b31d51b633701554e84f996cc0c73bad2990780b 0.0.5
|
|
||||||
aa8a807f4ba28f0652764ed14713ffea2fd6922d 0.0.5
|
|
||||||
aa8a807f4ba28f0652764ed14713ffea2fd6922d 0.0.5
|
|
||||||
569aef47a9b25c55b13753f94706f5d330219995 0.0.5
|
|
||||||
569aef47a9b25c55b13753f94706f5d330219995 0.0.5
|
|
||||||
a03495a8e8b471da63b5e2ae79d3ff9065839c2a 0.0.5
|
|
10
AUTHORS
10
AUTHORS
|
@ -1,4 +1,14 @@
|
||||||
Dan Helfman <witten@torsion.org>: Main developer
|
Dan Helfman <witten@torsion.org>: Main developer
|
||||||
|
|
||||||
Alexander Görtz: Python 3 compatibility
|
Alexander Görtz: Python 3 compatibility
|
||||||
|
Florian Lindner: Logging rewrite
|
||||||
Henning Schroeder: Copy editing
|
Henning Schroeder: Copy editing
|
||||||
|
Johannes Feichtner: Support for user hooks
|
||||||
|
Michele Lazzeri: Custom archive names
|
||||||
|
Nick Whyte: Support prefix filtering for archive consistency checks
|
||||||
|
newtonne: Read encryption password from external file
|
||||||
|
Robin `ypid` Schneider: Support additional options of Borg and add validate-borgmatic-config command
|
||||||
|
Scott Squires: Custom archive names
|
||||||
|
Thomas LÉVEIL: Support for a keep_minutely prune option. Support for the --json option
|
||||||
|
|
||||||
|
And many others! See the output of "git log".
|
||||||
|
|
2
MANIFEST.in
Normal file
2
MANIFEST.in
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
include borgmatic/config/schema.yaml
|
||||||
|
graft sample/systemd
|
255
README.md
255
README.md
|
@ -1,123 +1,168 @@
|
||||||
title: Atticmatic
|
---
|
||||||
date:
|
title: borgmatic
|
||||||
save_as: atticmatic/index.html
|
permalink: index.html
|
||||||
|
---
|
||||||
|
|
||||||
## Overview
|
## It's your data. Keep it that way.
|
||||||
|
|
||||||
atticmatic is a simple Python wrapper script for the [Attic backup
|
<img src="docs/static/borgmatic.png" alt="borgmatic logo" width="150px" style="float: right; padding-left: 1em;">
|
||||||
software](https://attic-backup.org/) that initiates a backup, prunes any old
|
|
||||||
backups according to a retention policy, and validates backups for
|
|
||||||
consistency. The script supports specifying your settings in a declarative
|
|
||||||
configuration file rather than having to put them all on the command-line, and
|
|
||||||
handles common errors.
|
|
||||||
|
|
||||||
Here's an example config file:
|
borgmatic is simple, configuration-driven backup software for servers and
|
||||||
|
workstations. Protect your files with client-side encryption. Backup your
|
||||||
|
databases too. Monitor it all with integrated third-party services.
|
||||||
|
|
||||||
[location]
|
The canonical home of borgmatic is at <a href="https://torsion.org/borgmatic">https://torsion.org/borgmatic</a>.
|
||||||
# Space-separated list of source directories to backup.
|
|
||||||
source_directories: /home /etc
|
|
||||||
|
|
||||||
# Path to local or remote Attic repository.
|
Here's an example configuration file:
|
||||||
repository: user@backupserver:sourcehostname.attic
|
|
||||||
|
|
||||||
[retention]
|
```yaml
|
||||||
# Retention policy for how many backups to keep in each category.
|
location:
|
||||||
|
# List of source directories to backup.
|
||||||
|
source_directories:
|
||||||
|
- /home
|
||||||
|
- /etc
|
||||||
|
|
||||||
|
# Paths of local or remote repositories to backup to.
|
||||||
|
repositories:
|
||||||
|
- ssh://1234@usw-s001.rsync.net/./backups.borg
|
||||||
|
- ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo
|
||||||
|
- /var/lib/backups/local.borg
|
||||||
|
|
||||||
|
retention:
|
||||||
|
# Retention policy for how many backups to keep.
|
||||||
keep_daily: 7
|
keep_daily: 7
|
||||||
keep_weekly: 4
|
keep_weekly: 4
|
||||||
keep_monthly: 6
|
keep_monthly: 6
|
||||||
|
|
||||||
[consistency]
|
|
||||||
checks: repository archives
|
|
||||||
|
|
||||||
Additionally, exclude patterns can be specified in a separate excludes config
|
|
||||||
file, one pattern per line.
|
|
||||||
|
|
||||||
atticmatic is hosted at <https://torsion.org/atticmatic> with [source code
|
|
||||||
available](https://torsion.org/hg/atticmatic). It's also mirrored on
|
|
||||||
[GitHub](https://github.com/witten/atticmatic) and
|
|
||||||
[BitBucket](https://bitbucket.org/dhelfman/atticmatic) for convenience.
|
|
||||||
|
|
||||||
|
|
||||||
## Setup
|
|
||||||
|
|
||||||
To get up and running with Attic, follow the [Attic Quick
|
|
||||||
Start](https://attic-backup.org/quickstart.html) guide to create an Attic
|
|
||||||
repository on a local or remote host. Note that if you plan to run atticmatic
|
|
||||||
on a schedule with cron, and you encrypt your attic repository with a
|
|
||||||
passphrase instead of a key file, you'll need to set the `ATTIC_PASSPHRASE`
|
|
||||||
environment variable. See [attic's repository encryption
|
|
||||||
documentation](https://attic-backup.org/quickstart.html#encrypted-repos) for
|
|
||||||
more info.
|
|
||||||
|
|
||||||
If the repository is on a remote host, make sure that your local root user has
|
|
||||||
key-based ssh access to the desired user account on the remote host.
|
|
||||||
|
|
||||||
To install atticmatic, run the following command to download and install it:
|
|
||||||
|
|
||||||
sudo pip install --upgrade hg+https://torsion.org/hg/atticmatic
|
|
||||||
|
|
||||||
Then copy the following configuration files:
|
|
||||||
|
|
||||||
sudo cp sample/atticmatic.cron /etc/cron.d/atticmatic
|
|
||||||
sudo mkdir /etc/atticmatic/
|
|
||||||
sudo cp sample/config sample/excludes /etc/atticmatic/
|
|
||||||
|
|
||||||
Lastly, modify those files with your desired configuration.
|
|
||||||
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
You can run atticmatic and start a backup simply by invoking it without
|
|
||||||
arguments:
|
|
||||||
|
|
||||||
atticmatic
|
|
||||||
|
|
||||||
This will also prune any old backups as per the configured retention policy,
|
|
||||||
and check backups for consistency problems due to things like file damage.
|
|
||||||
|
|
||||||
By default, the backup will proceed silently except in the case of errors. But
|
|
||||||
if you'd like to to get additional information about the progress of the
|
|
||||||
backup as it proceeds, use the verbose option instead:
|
|
||||||
|
|
||||||
atticmattic --verbose
|
|
||||||
|
|
||||||
If you'd like to see the available command-line arguments, view the help:
|
|
||||||
|
|
||||||
atticmattic --help
|
|
||||||
|
|
||||||
|
|
||||||
## Running tests
|
|
||||||
|
|
||||||
First install tox, which is used for setting up testing environments:
|
|
||||||
|
|
||||||
pip install tox
|
|
||||||
|
|
||||||
Then, to actually run tests, run:
|
|
||||||
|
|
||||||
tox
|
|
||||||
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Broken pipe with remote repository
|
|
||||||
|
|
||||||
When running atticmatic on a large remote repository, you may receive errors
|
|
||||||
like the following, particularly while "attic check" is valiating backups for
|
|
||||||
consistency:
|
consistency:
|
||||||
|
# List of checks to run to validate your backups.
|
||||||
|
checks:
|
||||||
|
- name: repository
|
||||||
|
- name: archives
|
||||||
|
frequency: 2 weeks
|
||||||
|
|
||||||
Write failed: Broken pipe
|
hooks:
|
||||||
attic: Error: Connection closed by remote host
|
# Custom preparation scripts to run.
|
||||||
|
before_backup:
|
||||||
|
- prepare-for-backup.sh
|
||||||
|
|
||||||
This error can be caused by an ssh timeout, which you can rectify by adding
|
# Databases to dump and include in backups.
|
||||||
the following to the ~/.ssh/config file on the client:
|
postgresql_databases:
|
||||||
|
- name: users
|
||||||
|
|
||||||
Host *
|
# Third-party services to notify you if backups aren't happening.
|
||||||
ServerAliveInterval 120
|
healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c
|
||||||
|
```
|
||||||
|
|
||||||
This should make the client keep the connection alive while validating
|
Want to see borgmatic in action? Check out the <a
|
||||||
backups.
|
href="https://asciinema.org/a/203761?autoplay=1" target="_blank">screencast</a>.
|
||||||
|
|
||||||
|
<a href="https://asciinema.org/a/203761?autoplay=1" target="_blank"><img src="https://asciinema.org/a/203761.png" width="480"></a>
|
||||||
|
|
||||||
|
borgmatic is powered by [Borg Backup](https://www.borgbackup.org/).
|
||||||
|
|
||||||
|
## Integrations
|
||||||
|
|
||||||
|
<a href="https://www.postgresql.org/"><img src="docs/static/postgresql.png" alt="PostgreSQL" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://www.mysql.com/"><img src="docs/static/mysql.png" alt="MySQL" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://mariadb.com/"><img src="docs/static/mariadb.png" alt="MariaDB" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://www.mongodb.com/"><img src="docs/static/mongodb.png" alt="MongoDB" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://sqlite.org/"><img src="docs/static/sqlite.png" alt="SQLite" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://healthchecks.io/"><img src="docs/static/healthchecks.png" alt="Healthchecks" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://cronitor.io/"><img src="docs/static/cronitor.png" alt="Cronitor" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://cronhub.io/"><img src="docs/static/cronhub.png" alt="Cronhub" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://www.pagerduty.com/"><img src="docs/static/pagerduty.png" alt="PagerDuty" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://ntfy.sh/"><img src="docs/static/ntfy.png" alt="ntfy" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
<a href="https://www.borgbase.com/?utm_source=borgmatic"><img src="docs/static/borgbase.png" alt="BorgBase" height="60px" style="margin-bottom:20px;"></a>
|
||||||
|
|
||||||
|
|
||||||
## Feedback
|
## Getting started
|
||||||
|
|
||||||
|
Your first step is to [install and configure
|
||||||
|
borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/).
|
||||||
|
|
||||||
|
For additional documentation, check out the links above for <a
|
||||||
|
href="https://torsion.org/borgmatic/#documentation">borgmatic how-to and
|
||||||
|
reference guides</a>.
|
||||||
|
|
||||||
|
|
||||||
|
## Hosting providers
|
||||||
|
|
||||||
|
Need somewhere to store your encrypted off-site backups? The following hosting
|
||||||
|
providers include specific support for Borg/borgmatic—and fund borgmatic
|
||||||
|
development and hosting when you use these links to sign up. (These are
|
||||||
|
referral links, but without any tracking scripts or cookies.)
|
||||||
|
|
||||||
|
<ul>
|
||||||
|
<li class="referral"><a href="https://www.borgbase.com/?utm_source=borgmatic">BorgBase</a>: Borg hosting service with support for monitoring, 2FA, and append-only repos</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
Additionally, [rsync.net](https://www.rsync.net/products/borg.html) and
|
||||||
|
[Hetzner](https://www.hetzner.com/storage/storage-box) have compatible storage
|
||||||
|
offerings, but do not currently fund borgmatic development or hosting.
|
||||||
|
|
||||||
|
## Support and contributing
|
||||||
|
|
||||||
|
### Issues
|
||||||
|
|
||||||
|
Are you experiencing an issue with borgmatic? Or do you have an idea for a
|
||||||
|
feature enhancement? Head on over to our [issue
|
||||||
|
tracker](https://projects.torsion.org/borgmatic-collective/borgmatic/issues).
|
||||||
|
In order to create a new issue or add a comment, you'll need to
|
||||||
|
[register](https://projects.torsion.org/user/sign_up?invite_code=borgmatic)
|
||||||
|
first. If you prefer to use an existing GitHub account, you can skip account
|
||||||
|
creation and [login directly](https://projects.torsion.org/user/login).
|
||||||
|
|
||||||
|
Also see the [security
|
||||||
|
policy](https://torsion.org/borgmatic/docs/security-policy/) for any security
|
||||||
|
issues.
|
||||||
|
|
||||||
|
|
||||||
|
### Social
|
||||||
|
|
||||||
|
Check out the [Borg subreddit](https://www.reddit.com/r/BorgBackup/) for
|
||||||
|
general Borg and borgmatic discussion and support.
|
||||||
|
|
||||||
|
Also follow [borgmatic on Mastodon](https://fosstodon.org/@borgmatic).
|
||||||
|
|
||||||
|
|
||||||
|
### Chat
|
||||||
|
|
||||||
|
To chat with borgmatic developers or users, check out the `#borgmatic`
|
||||||
|
IRC channel on Libera Chat, either via <a
|
||||||
|
href="https://web.libera.chat/#borgmatic">web chat</a> or a native <a
|
||||||
|
href="ircs://irc.libera.chat:6697">IRC client</a>. If you don't get a response
|
||||||
|
right away, please hang around a while—or file a ticket instead.
|
||||||
|
|
||||||
|
|
||||||
|
### Other
|
||||||
|
|
||||||
|
Other questions or comments? Contact
|
||||||
|
[witten@torsion.org](mailto:witten@torsion.org).
|
||||||
|
|
||||||
|
|
||||||
|
### Contributing
|
||||||
|
|
||||||
|
borgmatic [source code is
|
||||||
|
available](https://projects.torsion.org/borgmatic-collective/borgmatic) and is also mirrored
|
||||||
|
on [GitHub](https://github.com/borgmatic-collective/borgmatic) for convenience.
|
||||||
|
|
||||||
|
borgmatic is licensed under the GNU General Public License version 3 or any
|
||||||
|
later version.
|
||||||
|
|
||||||
|
If you'd like to contribute to borgmatic development, please feel free to
|
||||||
|
submit a [Pull
|
||||||
|
Request](https://projects.torsion.org/borgmatic-collective/borgmatic/pulls) or
|
||||||
|
open an
|
||||||
|
[issue](https://projects.torsion.org/borgmatic-collective/borgmatic/issues) to
|
||||||
|
discuss your idea. Note that you'll need to
|
||||||
|
[register](https://projects.torsion.org/user/sign_up?invite_code=borgmatic)
|
||||||
|
first. We also accept Pull Requests on GitHub, if that's more your thing. In
|
||||||
|
general, contributions are very welcome. We don't bite!
|
||||||
|
|
||||||
|
Also, please check out the [borgmatic development
|
||||||
|
how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for
|
||||||
|
info on cloning source code, running tests, etc.
|
||||||
|
|
||||||
|
<a href="https://build.torsion.org/borgmatic-collective/borgmatic" alt="build status">![Build Status](https://build.torsion.org/api/badges/borgmatic-collective/borgmatic/status.svg?ref=refs/heads/master)</a>
|
||||||
|
|
||||||
Questions? Comments? Got a patch? Contact <mailto:witten@torsion.org>.
|
|
||||||
|
|
18
SECURITY.md
Normal file
18
SECURITY.md
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
---
|
||||||
|
title: Security policy
|
||||||
|
permalink: security-policy/index.html
|
||||||
|
---
|
||||||
|
|
||||||
|
## Supported versions
|
||||||
|
|
||||||
|
While we want to hear about security vulnerabilities in all versions of
|
||||||
|
borgmatic, security fixes are only made to the most recently released version.
|
||||||
|
It's simply not practical for our small volunteer effort to maintain multiple
|
||||||
|
release branches and put out separate security patches for each.
|
||||||
|
|
||||||
|
## Reporting a vulnerability
|
||||||
|
|
||||||
|
If you find a security vulnerability, please [file a
|
||||||
|
ticket](https://torsion.org/borgmatic/#issues) or [send email
|
||||||
|
directly](mailto:witten@torsion.org) as appropriate. You should expect to hear
|
||||||
|
back within a few days at most and generally sooner.
|
|
@ -1,136 +0,0 @@
|
||||||
from datetime import datetime
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
|
|
||||||
def create_archive(excludes_filename, verbose, source_directories, repository):
|
|
||||||
'''
|
|
||||||
Given an excludes filename, a vebosity flag, a space-separated list of source directories, and
|
|
||||||
a local or remote repository path, create an attic archive.
|
|
||||||
'''
|
|
||||||
sources = tuple(source_directories.split(' '))
|
|
||||||
|
|
||||||
command = (
|
|
||||||
'attic', 'create',
|
|
||||||
'--exclude-from', excludes_filename,
|
|
||||||
'{repo}::{hostname}-{timestamp}'.format(
|
|
||||||
repo=repository,
|
|
||||||
hostname=platform.node(),
|
|
||||||
timestamp=datetime.now().isoformat(),
|
|
||||||
),
|
|
||||||
) + sources + (
|
|
||||||
('--verbose', '--stats') if verbose else ()
|
|
||||||
)
|
|
||||||
|
|
||||||
subprocess.check_call(command)
|
|
||||||
|
|
||||||
|
|
||||||
def _make_prune_flags(retention_config):
|
|
||||||
'''
|
|
||||||
Given a retention config dict mapping from option name to value, tranform it into an iterable of
|
|
||||||
command-line name-value flag pairs.
|
|
||||||
|
|
||||||
For example, given a retention config of:
|
|
||||||
|
|
||||||
{'keep_weekly': 4, 'keep_monthly': 6}
|
|
||||||
|
|
||||||
This will be returned as an iterable of:
|
|
||||||
|
|
||||||
(
|
|
||||||
('--keep-weekly', '4'),
|
|
||||||
('--keep-monthly', '6'),
|
|
||||||
)
|
|
||||||
'''
|
|
||||||
return (
|
|
||||||
('--' + option_name.replace('_', '-'), str(retention_config[option_name]))
|
|
||||||
for option_name, value in retention_config.items()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def prune_archives(verbose, repository, retention_config):
|
|
||||||
'''
|
|
||||||
Given a verbosity flag, a local or remote repository path, and a retention config dict, prune
|
|
||||||
attic archives according the the retention policy specified in that configuration.
|
|
||||||
'''
|
|
||||||
command = (
|
|
||||||
'attic', 'prune',
|
|
||||||
repository,
|
|
||||||
) + tuple(
|
|
||||||
element
|
|
||||||
for pair in _make_prune_flags(retention_config)
|
|
||||||
for element in pair
|
|
||||||
) + (('--verbose',) if verbose else ())
|
|
||||||
|
|
||||||
subprocess.check_call(command)
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CHECKS = ('repository', 'archives')
|
|
||||||
|
|
||||||
|
|
||||||
def _parse_checks(consistency_config):
|
|
||||||
'''
|
|
||||||
Given a consistency config with a space-separated "checks" option, transform it to a tuple of
|
|
||||||
named checks to run.
|
|
||||||
|
|
||||||
For example, given a retention config of:
|
|
||||||
|
|
||||||
{'checks': 'repository archives'}
|
|
||||||
|
|
||||||
This will be returned as:
|
|
||||||
|
|
||||||
('repository', 'archives')
|
|
||||||
|
|
||||||
If no "checks" option is present, return the DEFAULT_CHECKS. If the checks value is the string
|
|
||||||
"disabled", return an empty tuple, meaning that no checks should be run.
|
|
||||||
'''
|
|
||||||
checks = consistency_config.get('checks', '').strip()
|
|
||||||
if not checks:
|
|
||||||
return DEFAULT_CHECKS
|
|
||||||
|
|
||||||
return tuple(
|
|
||||||
check for check in consistency_config['checks'].split(' ')
|
|
||||||
if check.lower() not in ('disabled', '')
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _make_check_flags(checks):
|
|
||||||
'''
|
|
||||||
Given a parsed sequence of checks, transform it into tuple of command-line flags.
|
|
||||||
|
|
||||||
For example, given parsed checks of:
|
|
||||||
|
|
||||||
('repository',)
|
|
||||||
|
|
||||||
This will be returned as:
|
|
||||||
|
|
||||||
('--repository-only',)
|
|
||||||
'''
|
|
||||||
if checks == DEFAULT_CHECKS:
|
|
||||||
return ()
|
|
||||||
|
|
||||||
return tuple(
|
|
||||||
'--{}-only'.format(check) for check in checks
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def check_archives(verbose, repository, consistency_config):
|
|
||||||
'''
|
|
||||||
Given a verbosity flag, a local or remote repository path, and a consistency config dict, check
|
|
||||||
the contained attic archives for consistency.
|
|
||||||
|
|
||||||
If there are no consistency checks to run, skip running them.
|
|
||||||
'''
|
|
||||||
checks = _parse_checks(consistency_config)
|
|
||||||
if not checks:
|
|
||||||
return
|
|
||||||
|
|
||||||
command = (
|
|
||||||
'attic', 'check',
|
|
||||||
repository,
|
|
||||||
) + _make_check_flags(checks) + (('--verbose',) if verbose else ())
|
|
||||||
|
|
||||||
# Attic's check command spews to stdout even without the verbose flag. Suppress it.
|
|
||||||
stdout = None if verbose else open(os.devnull, 'w')
|
|
||||||
|
|
||||||
subprocess.check_call(command, stdout=stdout)
|
|
|
@ -1,51 +0,0 @@
|
||||||
from __future__ import print_function
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
from subprocess import CalledProcessError
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from atticmatic.attic import check_archives, create_archive, prune_archives
|
|
||||||
from atticmatic.config import parse_configuration
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CONFIG_FILENAME = '/etc/atticmatic/config'
|
|
||||||
DEFAULT_EXCLUDES_FILENAME = '/etc/atticmatic/excludes'
|
|
||||||
|
|
||||||
|
|
||||||
def parse_arguments(*arguments):
|
|
||||||
'''
|
|
||||||
Parse the given command-line arguments and return them as an ArgumentParser instance.
|
|
||||||
'''
|
|
||||||
parser = ArgumentParser()
|
|
||||||
parser.add_argument(
|
|
||||||
'-c', '--config',
|
|
||||||
dest='config_filename',
|
|
||||||
default=DEFAULT_CONFIG_FILENAME,
|
|
||||||
help='Configuration filename',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--excludes',
|
|
||||||
dest='excludes_filename',
|
|
||||||
default=DEFAULT_EXCLUDES_FILENAME,
|
|
||||||
help='Excludes filename',
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'-v', '--verbose',
|
|
||||||
action='store_true',
|
|
||||||
help='Display verbose progress information',
|
|
||||||
)
|
|
||||||
|
|
||||||
return parser.parse_args(arguments)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
try:
|
|
||||||
args = parse_arguments(*sys.argv[1:])
|
|
||||||
config = parse_configuration(args.config_filename)
|
|
||||||
repository = config.location['repository']
|
|
||||||
|
|
||||||
create_archive(args.excludes_filename, args.verbose, **config.location)
|
|
||||||
prune_archives(args.verbose, repository, config.retention)
|
|
||||||
check_archives(args.verbose, repository, config.consistency)
|
|
||||||
except (ValueError, IOError, CalledProcessError) as error:
|
|
||||||
print(error, file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
|
@ -1,40 +0,0 @@
|
||||||
import sys
|
|
||||||
|
|
||||||
from nose.tools import assert_raises
|
|
||||||
|
|
||||||
from atticmatic import command as module
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_no_arguments_uses_defaults():
|
|
||||||
parser = module.parse_arguments()
|
|
||||||
|
|
||||||
assert parser.config_filename == module.DEFAULT_CONFIG_FILENAME
|
|
||||||
assert parser.excludes_filename == module.DEFAULT_EXCLUDES_FILENAME
|
|
||||||
assert parser.verbose == False
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_filename_arguments_overrides_defaults():
|
|
||||||
parser = module.parse_arguments('--config', 'myconfig', '--excludes', 'myexcludes')
|
|
||||||
|
|
||||||
assert parser.config_filename == 'myconfig'
|
|
||||||
assert parser.excludes_filename == 'myexcludes'
|
|
||||||
assert parser.verbose == False
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_verbose_flag_overrides_default():
|
|
||||||
parser = module.parse_arguments('--verbose')
|
|
||||||
|
|
||||||
assert parser.config_filename == module.DEFAULT_CONFIG_FILENAME
|
|
||||||
assert parser.excludes_filename == module.DEFAULT_EXCLUDES_FILENAME
|
|
||||||
assert parser.verbose == True
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_arguments_with_invalid_arguments_exits():
|
|
||||||
original_stderr = sys.stderr
|
|
||||||
sys.stderr = sys.stdout
|
|
||||||
|
|
||||||
try:
|
|
||||||
with assert_raises(SystemExit):
|
|
||||||
module.parse_arguments('--posix-me-harder')
|
|
||||||
finally:
|
|
||||||
sys.stderr = original_stderr
|
|
|
@ -1,208 +0,0 @@
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
from flexmock import flexmock
|
|
||||||
|
|
||||||
from atticmatic import attic as module
|
|
||||||
|
|
||||||
|
|
||||||
def insert_subprocess_mock(check_call_command, **kwargs):
|
|
||||||
subprocess = flexmock()
|
|
||||||
subprocess.should_receive('check_call').with_args(check_call_command, **kwargs).once()
|
|
||||||
flexmock(module).subprocess = subprocess
|
|
||||||
|
|
||||||
|
|
||||||
def insert_subprocess_never():
|
|
||||||
subprocess = flexmock()
|
|
||||||
subprocess.should_receive('check_call').never()
|
|
||||||
flexmock(module).subprocess = subprocess
|
|
||||||
|
|
||||||
|
|
||||||
def insert_platform_mock():
|
|
||||||
flexmock(module).platform = flexmock().should_receive('node').and_return('host').mock
|
|
||||||
|
|
||||||
|
|
||||||
def insert_datetime_mock():
|
|
||||||
flexmock(module).datetime = flexmock().should_receive('now').and_return(
|
|
||||||
flexmock().should_receive('isoformat').and_return('now').mock
|
|
||||||
).mock
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_archive_should_call_attic_with_parameters():
|
|
||||||
insert_subprocess_mock(
|
|
||||||
('attic', 'create', '--exclude-from', 'excludes', 'repo::host-now', 'foo', 'bar'),
|
|
||||||
)
|
|
||||||
insert_platform_mock()
|
|
||||||
insert_datetime_mock()
|
|
||||||
|
|
||||||
module.create_archive(
|
|
||||||
excludes_filename='excludes',
|
|
||||||
verbose=False,
|
|
||||||
source_directories='foo bar',
|
|
||||||
repository='repo',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_archive_with_verbose_should_call_attic_with_verbose_parameters():
|
|
||||||
insert_subprocess_mock(
|
|
||||||
(
|
|
||||||
'attic', 'create', '--exclude-from', 'excludes', 'repo::host-now', 'foo', 'bar',
|
|
||||||
'--verbose', '--stats',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
insert_platform_mock()
|
|
||||||
insert_datetime_mock()
|
|
||||||
|
|
||||||
module.create_archive(
|
|
||||||
excludes_filename='excludes',
|
|
||||||
verbose=True,
|
|
||||||
source_directories='foo bar',
|
|
||||||
repository='repo',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
BASE_PRUNE_FLAGS = (
|
|
||||||
('--keep-daily', '1'),
|
|
||||||
('--keep-weekly', '2'),
|
|
||||||
('--keep-monthly', '3'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_make_prune_flags_should_return_flags_from_config():
|
|
||||||
retention_config = OrderedDict(
|
|
||||||
(
|
|
||||||
('keep_daily', 1),
|
|
||||||
('keep_weekly', 2),
|
|
||||||
('keep_monthly', 3),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
result = module._make_prune_flags(retention_config)
|
|
||||||
|
|
||||||
assert tuple(result) == BASE_PRUNE_FLAGS
|
|
||||||
|
|
||||||
|
|
||||||
def test_prune_archives_should_call_attic_with_parameters():
|
|
||||||
retention_config = flexmock()
|
|
||||||
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
|
|
||||||
BASE_PRUNE_FLAGS,
|
|
||||||
)
|
|
||||||
insert_subprocess_mock(
|
|
||||||
(
|
|
||||||
'attic', 'prune', 'repo', '--keep-daily', '1', '--keep-weekly', '2', '--keep-monthly',
|
|
||||||
'3',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
module.prune_archives(
|
|
||||||
verbose=False,
|
|
||||||
repository='repo',
|
|
||||||
retention_config=retention_config,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_prune_archives_with_verbose_should_call_attic_with_verbose_parameters():
|
|
||||||
retention_config = flexmock()
|
|
||||||
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
|
|
||||||
BASE_PRUNE_FLAGS,
|
|
||||||
)
|
|
||||||
insert_subprocess_mock(
|
|
||||||
(
|
|
||||||
'attic', 'prune', 'repo', '--keep-daily', '1', '--keep-weekly', '2', '--keep-monthly',
|
|
||||||
'3', '--verbose',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
module.prune_archives(
|
|
||||||
repository='repo',
|
|
||||||
verbose=True,
|
|
||||||
retention_config=retention_config,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_returns_them_as_tuple():
|
|
||||||
checks = module._parse_checks({'checks': 'foo disabled bar'})
|
|
||||||
|
|
||||||
assert checks == ('foo', 'bar')
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_with_missing_value_returns_defaults():
|
|
||||||
checks = module._parse_checks({})
|
|
||||||
|
|
||||||
assert checks == module.DEFAULT_CHECKS
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_with_blank_value_returns_defaults():
|
|
||||||
checks = module._parse_checks({'checks': ''})
|
|
||||||
|
|
||||||
assert checks == module.DEFAULT_CHECKS
|
|
||||||
|
|
||||||
|
|
||||||
def test_parse_checks_with_disabled_returns_no_checks():
|
|
||||||
checks = module._parse_checks({'checks': 'disabled'})
|
|
||||||
|
|
||||||
assert checks == ()
|
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_checks_returns_flags():
|
|
||||||
flags = module._make_check_flags(('foo', 'bar'))
|
|
||||||
|
|
||||||
assert flags == ('--foo-only', '--bar-only')
|
|
||||||
|
|
||||||
|
|
||||||
def test_make_check_flags_with_default_checks_returns_no_flags():
|
|
||||||
flags = module._make_check_flags(module.DEFAULT_CHECKS)
|
|
||||||
|
|
||||||
assert flags == ()
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_should_call_attic_with_parameters():
|
|
||||||
consistency_config = flexmock()
|
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(flexmock())
|
|
||||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
|
||||||
stdout = flexmock()
|
|
||||||
insert_subprocess_mock(
|
|
||||||
('attic', 'check', 'repo'),
|
|
||||||
stdout=stdout,
|
|
||||||
)
|
|
||||||
insert_platform_mock()
|
|
||||||
insert_datetime_mock()
|
|
||||||
flexmock(module).open = lambda filename, mode: stdout
|
|
||||||
flexmock(module).os = flexmock().should_receive('devnull').mock
|
|
||||||
|
|
||||||
module.check_archives(
|
|
||||||
verbose=False,
|
|
||||||
repository='repo',
|
|
||||||
consistency_config=consistency_config,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_with_verbose_should_call_attic_with_verbose_parameters():
|
|
||||||
consistency_config = flexmock()
|
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(flexmock())
|
|
||||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
|
||||||
insert_subprocess_mock(
|
|
||||||
('attic', 'check', 'repo', '--verbose'),
|
|
||||||
stdout=None,
|
|
||||||
)
|
|
||||||
insert_platform_mock()
|
|
||||||
insert_datetime_mock()
|
|
||||||
|
|
||||||
module.check_archives(
|
|
||||||
verbose=True,
|
|
||||||
repository='repo',
|
|
||||||
consistency_config=consistency_config,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_check_archives_without_any_checks_should_bail():
|
|
||||||
consistency_config = flexmock()
|
|
||||||
flexmock(module).should_receive('_parse_checks').and_return(())
|
|
||||||
insert_subprocess_never()
|
|
||||||
|
|
||||||
module.check_archives(
|
|
||||||
verbose=False,
|
|
||||||
repository='repo',
|
|
||||||
consistency_config=consistency_config,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
36
borgmatic/actions/borg.py
Normal file
36
borgmatic/actions/borg.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.borg
|
||||||
|
import borgmatic.borg.rlist
|
||||||
|
import borgmatic.config.validate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_borg(
|
||||||
|
repository, storage, local_borg_version, borg_arguments, local_path, remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "borg" action for the given repository.
|
||||||
|
'''
|
||||||
|
if borg_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
|
repository, borg_arguments.repository
|
||||||
|
):
|
||||||
|
logger.info('{}: Running arbitrary Borg command'.format(repository))
|
||||||
|
archive_name = borgmatic.borg.rlist.resolve_archive_name(
|
||||||
|
repository,
|
||||||
|
borg_arguments.archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
borgmatic.borg.borg.run_arbitrary_borg(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
options=borg_arguments.options,
|
||||||
|
archive=archive_name,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
21
borgmatic/actions/break_lock.py
Normal file
21
borgmatic/actions/break_lock.py
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.break_lock
|
||||||
|
import borgmatic.config.validate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_break_lock(
|
||||||
|
repository, storage, local_borg_version, break_lock_arguments, local_path, remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "break-lock" action for the given repository.
|
||||||
|
'''
|
||||||
|
if break_lock_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
|
repository, break_lock_arguments.repository
|
||||||
|
):
|
||||||
|
logger.info(f'{repository}: Breaking repository and cache locks')
|
||||||
|
borgmatic.borg.break_lock.break_lock(
|
||||||
|
repository, storage, local_borg_version, local_path=local_path, remote_path=remote_path,
|
||||||
|
)
|
55
borgmatic/actions/check.py
Normal file
55
borgmatic/actions/check.py
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.check
|
||||||
|
import borgmatic.hooks.command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_check(
|
||||||
|
config_filename,
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
consistency,
|
||||||
|
hooks,
|
||||||
|
hook_context,
|
||||||
|
local_borg_version,
|
||||||
|
check_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "check" action for the given repository.
|
||||||
|
'''
|
||||||
|
borgmatic.hooks.command.execute_hook(
|
||||||
|
hooks.get('before_check'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-check',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
|
logger.info('{}: Running consistency checks'.format(repository))
|
||||||
|
borgmatic.borg.check.check_archives(
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
consistency,
|
||||||
|
local_borg_version,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
progress=check_arguments.progress,
|
||||||
|
repair=check_arguments.repair,
|
||||||
|
only_checks=check_arguments.only,
|
||||||
|
force=check_arguments.force,
|
||||||
|
)
|
||||||
|
borgmatic.hooks.command.execute_hook(
|
||||||
|
hooks.get('after_check'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-check',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
57
borgmatic/actions/compact.py
Normal file
57
borgmatic/actions/compact.py
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.compact
|
||||||
|
import borgmatic.borg.feature
|
||||||
|
import borgmatic.hooks.command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_compact(
|
||||||
|
config_filename,
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
retention,
|
||||||
|
hooks,
|
||||||
|
hook_context,
|
||||||
|
local_borg_version,
|
||||||
|
compact_arguments,
|
||||||
|
global_arguments,
|
||||||
|
dry_run_label,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "compact" action for the given repository.
|
||||||
|
'''
|
||||||
|
borgmatic.hooks.command.execute_hook(
|
||||||
|
hooks.get('before_compact'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-compact',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
|
if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version):
|
||||||
|
logger.info('{}: Compacting segments{}'.format(repository, dry_run_label))
|
||||||
|
borgmatic.borg.compact.compact_segments(
|
||||||
|
global_arguments.dry_run,
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
progress=compact_arguments.progress,
|
||||||
|
cleanup_commits=compact_arguments.cleanup_commits,
|
||||||
|
threshold=compact_arguments.threshold,
|
||||||
|
)
|
||||||
|
else: # pragma: nocover
|
||||||
|
logger.info('{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository))
|
||||||
|
borgmatic.hooks.command.execute_hook(
|
||||||
|
hooks.get('after_compact'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-compact',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
90
borgmatic/actions/create.py
Normal file
90
borgmatic/actions/create.py
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.create
|
||||||
|
import borgmatic.hooks.command
|
||||||
|
import borgmatic.hooks.dispatch
|
||||||
|
import borgmatic.hooks.dump
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_create(
|
||||||
|
config_filename,
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
hooks,
|
||||||
|
hook_context,
|
||||||
|
local_borg_version,
|
||||||
|
create_arguments,
|
||||||
|
global_arguments,
|
||||||
|
dry_run_label,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "create" action for the given repository.
|
||||||
|
|
||||||
|
If create_arguments.json is True, yield the JSON output from creating the archive.
|
||||||
|
'''
|
||||||
|
borgmatic.hooks.command.execute_hook(
|
||||||
|
hooks.get('before_backup'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-backup',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
|
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
|
||||||
|
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||||
|
'remove_database_dumps',
|
||||||
|
hooks,
|
||||||
|
repository,
|
||||||
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
|
location,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
active_dumps = borgmatic.hooks.dispatch.call_hooks(
|
||||||
|
'dump_databases',
|
||||||
|
hooks,
|
||||||
|
repository,
|
||||||
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
|
location,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
stream_processes = [process for processes in active_dumps.values() for process in processes]
|
||||||
|
|
||||||
|
json_output = borgmatic.borg.create.create_archive(
|
||||||
|
global_arguments.dry_run,
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
progress=create_arguments.progress,
|
||||||
|
stats=create_arguments.stats,
|
||||||
|
json=create_arguments.json,
|
||||||
|
list_files=create_arguments.list_files,
|
||||||
|
stream_processes=stream_processes,
|
||||||
|
)
|
||||||
|
if json_output: # pragma: nocover
|
||||||
|
yield json.loads(json_output)
|
||||||
|
|
||||||
|
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||||
|
'remove_database_dumps',
|
||||||
|
hooks,
|
||||||
|
config_filename,
|
||||||
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
|
location,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
borgmatic.hooks.command.execute_hook(
|
||||||
|
hooks.get('after_backup'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-backup',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
48
borgmatic/actions/export_tar.py
Normal file
48
borgmatic/actions/export_tar.py
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.export_tar
|
||||||
|
import borgmatic.borg.rlist
|
||||||
|
import borgmatic.config.validate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_export_tar(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
export_tar_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "export-tar" action for the given repository.
|
||||||
|
'''
|
||||||
|
if export_tar_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
|
repository, export_tar_arguments.repository
|
||||||
|
):
|
||||||
|
logger.info(
|
||||||
|
'{}: Exporting archive {} as tar file'.format(repository, export_tar_arguments.archive)
|
||||||
|
)
|
||||||
|
borgmatic.borg.export_tar.export_tar_archive(
|
||||||
|
global_arguments.dry_run,
|
||||||
|
repository,
|
||||||
|
borgmatic.borg.rlist.resolve_archive_name(
|
||||||
|
repository,
|
||||||
|
export_tar_arguments.archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
),
|
||||||
|
export_tar_arguments.paths,
|
||||||
|
export_tar_arguments.destination,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
tar_filter=export_tar_arguments.tar_filter,
|
||||||
|
list_files=export_tar_arguments.list_files,
|
||||||
|
strip_components=export_tar_arguments.strip_components,
|
||||||
|
)
|
67
borgmatic/actions/extract.py
Normal file
67
borgmatic/actions/extract.py
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.extract
|
||||||
|
import borgmatic.borg.rlist
|
||||||
|
import borgmatic.config.validate
|
||||||
|
import borgmatic.hooks.command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_extract(
|
||||||
|
config_filename,
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
hooks,
|
||||||
|
hook_context,
|
||||||
|
local_borg_version,
|
||||||
|
extract_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "extract" action for the given repository.
|
||||||
|
'''
|
||||||
|
borgmatic.hooks.command.execute_hook(
|
||||||
|
hooks.get('before_extract'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-extract',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
|
if extract_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
|
repository, extract_arguments.repository
|
||||||
|
):
|
||||||
|
logger.info('{}: Extracting archive {}'.format(repository, extract_arguments.archive))
|
||||||
|
borgmatic.borg.extract.extract_archive(
|
||||||
|
global_arguments.dry_run,
|
||||||
|
repository,
|
||||||
|
borgmatic.borg.rlist.resolve_archive_name(
|
||||||
|
repository,
|
||||||
|
extract_arguments.archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
),
|
||||||
|
extract_arguments.paths,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
destination_path=extract_arguments.destination,
|
||||||
|
strip_components=extract_arguments.strip_components,
|
||||||
|
progress=extract_arguments.progress,
|
||||||
|
)
|
||||||
|
borgmatic.hooks.command.execute_hook(
|
||||||
|
hooks.get('after_extract'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-extract',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
41
borgmatic/actions/info.py
Normal file
41
borgmatic/actions/info.py
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.info
|
||||||
|
import borgmatic.borg.rlist
|
||||||
|
import borgmatic.config.validate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_info(
|
||||||
|
repository, storage, local_borg_version, info_arguments, local_path, remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "info" action for the given repository and archive.
|
||||||
|
|
||||||
|
If info_arguments.json is True, yield the JSON output from the info for the archive.
|
||||||
|
'''
|
||||||
|
if info_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
|
repository, info_arguments.repository
|
||||||
|
):
|
||||||
|
if not info_arguments.json: # pragma: nocover
|
||||||
|
logger.answer(f'{repository}: Displaying archive summary information')
|
||||||
|
info_arguments.archive = borgmatic.borg.rlist.resolve_archive_name(
|
||||||
|
repository,
|
||||||
|
info_arguments.archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
json_output = borgmatic.borg.info.display_archives_info(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
info_arguments=info_arguments,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
||||||
|
if json_output: # pragma: nocover
|
||||||
|
yield json.loads(json_output)
|
43
borgmatic/actions/list.py
Normal file
43
borgmatic/actions/list.py
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.list
|
||||||
|
import borgmatic.config.validate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_list(
|
||||||
|
repository, storage, local_borg_version, list_arguments, local_path, remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "list" action for the given repository and archive.
|
||||||
|
|
||||||
|
If list_arguments.json is True, yield the JSON output from listing the archive.
|
||||||
|
'''
|
||||||
|
if list_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
|
repository, list_arguments.repository
|
||||||
|
):
|
||||||
|
if not list_arguments.json: # pragma: nocover
|
||||||
|
if list_arguments.find_paths:
|
||||||
|
logger.answer(f'{repository}: Searching archives')
|
||||||
|
elif not list_arguments.archive:
|
||||||
|
logger.answer(f'{repository}: Listing archives')
|
||||||
|
list_arguments.archive = borgmatic.borg.rlist.resolve_archive_name(
|
||||||
|
repository,
|
||||||
|
list_arguments.archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
json_output = borgmatic.borg.list.list_archive(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
list_arguments=list_arguments,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
||||||
|
if json_output: # pragma: nocover
|
||||||
|
yield json.loads(json_output)
|
42
borgmatic/actions/mount.py
Normal file
42
borgmatic/actions/mount.py
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.mount
|
||||||
|
import borgmatic.borg.rlist
|
||||||
|
import borgmatic.config.validate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_mount(
|
||||||
|
repository, storage, local_borg_version, mount_arguments, local_path, remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "mount" action for the given repository.
|
||||||
|
'''
|
||||||
|
if mount_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
|
repository, mount_arguments.repository
|
||||||
|
):
|
||||||
|
if mount_arguments.archive:
|
||||||
|
logger.info('{}: Mounting archive {}'.format(repository, mount_arguments.archive))
|
||||||
|
else: # pragma: nocover
|
||||||
|
logger.info('{}: Mounting repository'.format(repository))
|
||||||
|
|
||||||
|
borgmatic.borg.mount.mount_archive(
|
||||||
|
repository,
|
||||||
|
borgmatic.borg.rlist.resolve_archive_name(
|
||||||
|
repository,
|
||||||
|
mount_arguments.archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
),
|
||||||
|
mount_arguments.mount_point,
|
||||||
|
mount_arguments.paths,
|
||||||
|
mount_arguments.foreground,
|
||||||
|
mount_arguments.options,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
53
borgmatic/actions/prune.py
Normal file
53
borgmatic/actions/prune.py
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.prune
|
||||||
|
import borgmatic.hooks.command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_prune(
|
||||||
|
config_filename,
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
retention,
|
||||||
|
hooks,
|
||||||
|
hook_context,
|
||||||
|
local_borg_version,
|
||||||
|
prune_arguments,
|
||||||
|
global_arguments,
|
||||||
|
dry_run_label,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "prune" action for the given repository.
|
||||||
|
'''
|
||||||
|
borgmatic.hooks.command.execute_hook(
|
||||||
|
hooks.get('before_prune'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-prune',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
|
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
|
||||||
|
borgmatic.borg.prune.prune_archives(
|
||||||
|
global_arguments.dry_run,
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
retention,
|
||||||
|
local_borg_version,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
stats=prune_arguments.stats,
|
||||||
|
list_archives=prune_arguments.list_archives,
|
||||||
|
)
|
||||||
|
borgmatic.hooks.command.execute_hook(
|
||||||
|
hooks.get('after_prune'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-prune',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
40
borgmatic/actions/rcreate.py
Normal file
40
borgmatic/actions/rcreate.py
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.rcreate
|
||||||
|
import borgmatic.config.validate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_rcreate(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
rcreate_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "rcreate" action for the given repository.
|
||||||
|
'''
|
||||||
|
if rcreate_arguments.repository and not borgmatic.config.validate.repositories_match(
|
||||||
|
repository, rcreate_arguments.repository
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info('{}: Creating repository'.format(repository))
|
||||||
|
borgmatic.borg.rcreate.create_repository(
|
||||||
|
global_arguments.dry_run,
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
rcreate_arguments.encryption_mode,
|
||||||
|
rcreate_arguments.source_repository,
|
||||||
|
rcreate_arguments.copy_crypt_key,
|
||||||
|
rcreate_arguments.append_only,
|
||||||
|
rcreate_arguments.storage_quota,
|
||||||
|
rcreate_arguments.make_parent_dirs,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
345
borgmatic/actions/restore.py
Normal file
345
borgmatic/actions/restore.py
Normal file
|
@ -0,0 +1,345 @@
|
||||||
|
import copy
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
import borgmatic.borg.extract
|
||||||
|
import borgmatic.borg.list
|
||||||
|
import borgmatic.borg.mount
|
||||||
|
import borgmatic.borg.rlist
|
||||||
|
import borgmatic.borg.state
|
||||||
|
import borgmatic.config.validate
|
||||||
|
import borgmatic.hooks.dispatch
|
||||||
|
import borgmatic.hooks.dump
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
UNSPECIFIED_HOOK = object()
|
||||||
|
|
||||||
|
|
||||||
|
def get_configured_database(
|
||||||
|
hooks, archive_database_names, hook_name, database_name, configuration_database_name=None
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Find the first database with the given hook name and database name in the configured hooks
|
||||||
|
dict and the given archive database names dict (from hook name to database names contained in
|
||||||
|
a particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database
|
||||||
|
hooks for the named database. If a configuration database name is given, use that instead of the
|
||||||
|
database name to lookup the database in the given hooks configuration.
|
||||||
|
|
||||||
|
Return the found database as a tuple of (found hook name, database configuration dict).
|
||||||
|
'''
|
||||||
|
if not configuration_database_name:
|
||||||
|
configuration_database_name = database_name
|
||||||
|
|
||||||
|
if hook_name == UNSPECIFIED_HOOK:
|
||||||
|
hooks_to_search = hooks
|
||||||
|
else:
|
||||||
|
hooks_to_search = {hook_name: hooks[hook_name]}
|
||||||
|
|
||||||
|
return next(
|
||||||
|
(
|
||||||
|
(name, hook_database)
|
||||||
|
for (name, hook) in hooks_to_search.items()
|
||||||
|
for hook_database in hook
|
||||||
|
if hook_database['name'] == configuration_database_name
|
||||||
|
and database_name in archive_database_names.get(name, [])
|
||||||
|
),
|
||||||
|
(None, None),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_configured_hook_name_and_database(hooks, database_name):
|
||||||
|
'''
|
||||||
|
Find the hook name and first database dict with the given database name in the configured hooks
|
||||||
|
dict. This searches across all database hooks.
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
def restore_single_database(
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
hooks,
|
||||||
|
local_borg_version,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
archive_name,
|
||||||
|
hook_name,
|
||||||
|
database,
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Given (among other things) an archive name, a database hook name, and a configured database
|
||||||
|
configuration dict, restore that database from the archive.
|
||||||
|
'''
|
||||||
|
logger.info(f'{repository}: Restoring database {database["name"]}')
|
||||||
|
|
||||||
|
dump_pattern = borgmatic.hooks.dispatch.call_hooks(
|
||||||
|
'make_database_dump_pattern',
|
||||||
|
hooks,
|
||||||
|
repository,
|
||||||
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
|
location,
|
||||||
|
database['name'],
|
||||||
|
)[hook_name]
|
||||||
|
|
||||||
|
# Kick off a single database extract to stdout.
|
||||||
|
extract_process = borgmatic.borg.extract.extract_archive(
|
||||||
|
dry_run=global_arguments.dry_run,
|
||||||
|
repository=repository,
|
||||||
|
archive=archive_name,
|
||||||
|
paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]),
|
||||||
|
location_config=location,
|
||||||
|
storage_config=storage,
|
||||||
|
local_borg_version=local_borg_version,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
destination_path='/',
|
||||||
|
# A directory format dump isn't a single file, and therefore can't extract
|
||||||
|
# to stdout. In this case, the extract_process return value is None.
|
||||||
|
extract_to_stdout=bool(database.get('format') != 'directory'),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Run a single database restore, consuming the extract stdout (if any).
|
||||||
|
borgmatic.hooks.dispatch.call_hooks(
|
||||||
|
'restore_database_dump',
|
||||||
|
{hook_name: [database]},
|
||||||
|
repository,
|
||||||
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
|
location,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
extract_process,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def collect_archive_database_names(
|
||||||
|
repository, archive, location, storage, local_borg_version, local_path, remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a resolved archive name, a location configuration dict,
|
||||||
|
a storage configuration dict, the local Borg version, and local and remote Borg paths, query the
|
||||||
|
archive for the names of databases it contains and return them as a dict from hook name to a
|
||||||
|
sequence of database names.
|
||||||
|
'''
|
||||||
|
borgmatic_source_directory = os.path.expanduser(
|
||||||
|
location.get(
|
||||||
|
'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||||
|
)
|
||||||
|
).lstrip('/')
|
||||||
|
parent_dump_path = os.path.expanduser(
|
||||||
|
borgmatic.hooks.dump.make_database_dump_path(borgmatic_source_directory, '*_databases/*/*')
|
||||||
|
)
|
||||||
|
dump_paths = borgmatic.borg.list.capture_archive_listing(
|
||||||
|
repository,
|
||||||
|
archive,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
list_path=parent_dump_path,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine the database names corresponding to the dumps found in the archive and
|
||||||
|
# add them to restore_names.
|
||||||
|
archive_database_names = {}
|
||||||
|
|
||||||
|
for dump_path in dump_paths:
|
||||||
|
try:
|
||||||
|
(hook_name, _, database_name) = dump_path.split(
|
||||||
|
borgmatic_source_directory + os.path.sep, 1
|
||||||
|
)[1].split(os.path.sep)[0:3]
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
logger.warning(
|
||||||
|
f'{repository}: Ignoring invalid database dump path "{dump_path}" in archive {archive}'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if database_name not in archive_database_names.get(hook_name, []):
|
||||||
|
archive_database_names.setdefault(hook_name, []).extend([database_name])
|
||||||
|
|
||||||
|
return archive_database_names
|
||||||
|
|
||||||
|
|
||||||
|
def find_databases_to_restore(requested_database_names, archive_database_names):
|
||||||
|
'''
|
||||||
|
Given a sequence of requested database names to restore and a dict of hook name to the names of
|
||||||
|
databases found in an archive, return an expanded sequence of database names to restore,
|
||||||
|
replacing "all" with actual database names as appropriate.
|
||||||
|
|
||||||
|
Raise ValueError if any of the requested database names cannot be found in the archive.
|
||||||
|
'''
|
||||||
|
# A map from database hook name to the database names to restore for that hook.
|
||||||
|
restore_names = (
|
||||||
|
{UNSPECIFIED_HOOK: requested_database_names}
|
||||||
|
if requested_database_names
|
||||||
|
else {UNSPECIFIED_HOOK: ['all']}
|
||||||
|
)
|
||||||
|
|
||||||
|
# If "all" is in restore_names, then replace it with the names of dumps found within the
|
||||||
|
# archive.
|
||||||
|
if 'all' in restore_names[UNSPECIFIED_HOOK]:
|
||||||
|
restore_names[UNSPECIFIED_HOOK].remove('all')
|
||||||
|
|
||||||
|
for (hook_name, database_names) in archive_database_names.items():
|
||||||
|
restore_names.setdefault(hook_name, []).extend(database_names)
|
||||||
|
|
||||||
|
# If a database is to be restored as part of "all", then remove it from restore names so
|
||||||
|
# it doesn't get restored twice.
|
||||||
|
for database_name in database_names:
|
||||||
|
if database_name in restore_names[UNSPECIFIED_HOOK]:
|
||||||
|
restore_names[UNSPECIFIED_HOOK].remove(database_name)
|
||||||
|
|
||||||
|
if not restore_names[UNSPECIFIED_HOOK]:
|
||||||
|
restore_names.pop(UNSPECIFIED_HOOK)
|
||||||
|
|
||||||
|
combined_restore_names = set(
|
||||||
|
name for database_names in restore_names.values() for name in database_names
|
||||||
|
)
|
||||||
|
combined_archive_database_names = set(
|
||||||
|
name for database_names in archive_database_names.values() for name in database_names
|
||||||
|
)
|
||||||
|
|
||||||
|
missing_names = sorted(set(combined_restore_names) - combined_archive_database_names)
|
||||||
|
if missing_names:
|
||||||
|
joined_names = ', '.join(f'"{name}"' for name in missing_names)
|
||||||
|
raise ValueError(
|
||||||
|
f"Cannot restore database{'s' if len(missing_names) > 1 else ''} {joined_names} missing from archive"
|
||||||
|
)
|
||||||
|
|
||||||
|
return restore_names
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_databases_found(restore_names, remaining_restore_names, found_names):
|
||||||
|
'''
|
||||||
|
Given a dict from hook name to database names to restore, a dict from hook name to remaining
|
||||||
|
database names to restore, and a sequence of found (actually restored) database names, raise
|
||||||
|
ValueError if requested databases to restore were missing from the archive and/or configuration.
|
||||||
|
'''
|
||||||
|
combined_restore_names = set(
|
||||||
|
name
|
||||||
|
for database_names in tuple(restore_names.values())
|
||||||
|
+ tuple(remaining_restore_names.values())
|
||||||
|
for name in database_names
|
||||||
|
)
|
||||||
|
|
||||||
|
if not combined_restore_names and not found_names:
|
||||||
|
raise ValueError('No databases were found to restore')
|
||||||
|
|
||||||
|
missing_names = sorted(set(combined_restore_names) - set(found_names))
|
||||||
|
if missing_names:
|
||||||
|
joined_names = ', '.join(f'"{name}"' for name in missing_names)
|
||||||
|
raise ValueError(
|
||||||
|
f"Cannot restore database{'s' if len(missing_names) > 1 else ''} {joined_names} missing from borgmatic's configuration"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def run_restore(
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
hooks,
|
||||||
|
local_borg_version,
|
||||||
|
restore_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "restore" action for the given repository, but only if the repository matches the
|
||||||
|
requested repository in restore arguments.
|
||||||
|
|
||||||
|
Raise ValueError if a configured database could not be found to restore.
|
||||||
|
'''
|
||||||
|
if restore_arguments.repository and not borgmatic.config.validate.repositories_match(
|
||||||
|
repository, restore_arguments.repository
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
'{}: Restoring databases from archive {}'.format(repository, restore_arguments.archive)
|
||||||
|
)
|
||||||
|
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||||
|
'remove_database_dumps',
|
||||||
|
hooks,
|
||||||
|
repository,
|
||||||
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
|
location,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
|
||||||
|
archive_name = borgmatic.borg.rlist.resolve_archive_name(
|
||||||
|
repository, restore_arguments.archive, storage, local_borg_version, local_path, remote_path,
|
||||||
|
)
|
||||||
|
archive_database_names = collect_archive_database_names(
|
||||||
|
repository, archive_name, location, storage, local_borg_version, local_path, remote_path,
|
||||||
|
)
|
||||||
|
restore_names = find_databases_to_restore(restore_arguments.databases, archive_database_names)
|
||||||
|
found_names = set()
|
||||||
|
remaining_restore_names = {}
|
||||||
|
|
||||||
|
for hook_name, database_names in restore_names.items():
|
||||||
|
for database_name in database_names:
|
||||||
|
found_hook_name, found_database = get_configured_database(
|
||||||
|
hooks, archive_database_names, hook_name, database_name
|
||||||
|
)
|
||||||
|
|
||||||
|
if not found_database:
|
||||||
|
remaining_restore_names.setdefault(found_hook_name or hook_name, []).append(
|
||||||
|
database_name
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
found_names.add(database_name)
|
||||||
|
restore_single_database(
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
hooks,
|
||||||
|
local_borg_version,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
archive_name,
|
||||||
|
found_hook_name or hook_name,
|
||||||
|
found_database,
|
||||||
|
)
|
||||||
|
|
||||||
|
# For any database that weren't found via exact matches in the hooks configuration, try to
|
||||||
|
# fallback to "all" entries.
|
||||||
|
for hook_name, database_names in remaining_restore_names.items():
|
||||||
|
for database_name in database_names:
|
||||||
|
found_hook_name, found_database = get_configured_database(
|
||||||
|
hooks, archive_database_names, hook_name, database_name, 'all'
|
||||||
|
)
|
||||||
|
|
||||||
|
if not found_database:
|
||||||
|
continue
|
||||||
|
|
||||||
|
found_names.add(database_name)
|
||||||
|
database = copy.copy(found_database)
|
||||||
|
database['name'] = database_name
|
||||||
|
|
||||||
|
restore_single_database(
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
hooks,
|
||||||
|
local_borg_version,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
archive_name,
|
||||||
|
found_hook_name or hook_name,
|
||||||
|
database,
|
||||||
|
)
|
||||||
|
|
||||||
|
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||||
|
'remove_database_dumps',
|
||||||
|
hooks,
|
||||||
|
repository,
|
||||||
|
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
|
||||||
|
location,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
|
||||||
|
ensure_databases_found(restore_names, remaining_restore_names, found_names)
|
32
borgmatic/actions/rinfo.py
Normal file
32
borgmatic/actions/rinfo.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.rinfo
|
||||||
|
import borgmatic.config.validate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_rinfo(
|
||||||
|
repository, storage, local_borg_version, rinfo_arguments, local_path, remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "rinfo" action for the given repository.
|
||||||
|
|
||||||
|
If rinfo_arguments.json is True, yield the JSON output from the info for the repository.
|
||||||
|
'''
|
||||||
|
if rinfo_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
|
repository, rinfo_arguments.repository
|
||||||
|
):
|
||||||
|
if not rinfo_arguments.json: # pragma: nocover
|
||||||
|
logger.answer('{}: Displaying repository summary information'.format(repository))
|
||||||
|
json_output = borgmatic.borg.rinfo.display_repository_info(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
rinfo_arguments=rinfo_arguments,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
||||||
|
if json_output: # pragma: nocover
|
||||||
|
yield json.loads(json_output)
|
32
borgmatic/actions/rlist.py
Normal file
32
borgmatic/actions/rlist.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.rlist
|
||||||
|
import borgmatic.config.validate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_rlist(
|
||||||
|
repository, storage, local_borg_version, rlist_arguments, local_path, remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "rlist" action for the given repository.
|
||||||
|
|
||||||
|
If rlist_arguments.json is True, yield the JSON output from listing the repository.
|
||||||
|
'''
|
||||||
|
if rlist_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||||
|
repository, rlist_arguments.repository
|
||||||
|
):
|
||||||
|
if not rlist_arguments.json: # pragma: nocover
|
||||||
|
logger.answer('{}: Listing repository'.format(repository))
|
||||||
|
json_output = borgmatic.borg.rlist.list_repository(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments=rlist_arguments,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
||||||
|
if json_output: # pragma: nocover
|
||||||
|
yield json.loads(json_output)
|
29
borgmatic/actions/transfer.py
Normal file
29
borgmatic/actions/transfer.py
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.borg.transfer
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def run_transfer(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
transfer_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run the "transfer" action for the given repository.
|
||||||
|
'''
|
||||||
|
logger.info(f'{repository}: Transferring archives to repository')
|
||||||
|
borgmatic.borg.transfer.transfer_archives(
|
||||||
|
global_arguments.dry_run,
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
transfer_arguments,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
)
|
68
borgmatic/borg/borg.py
Normal file
68
borgmatic/borg/borg.py
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, flags
|
||||||
|
from borgmatic.execute import execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
REPOSITORYLESS_BORG_COMMANDS = {'serve', None}
|
||||||
|
BORG_SUBCOMMANDS_WITH_SUBCOMMANDS = {'key', 'debug'}
|
||||||
|
BORG_SUBCOMMANDS_WITHOUT_REPOSITORY = (('debug', 'info'), ('debug', 'convert-profile'), ())
|
||||||
|
|
||||||
|
|
||||||
|
def run_arbitrary_borg(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
options,
|
||||||
|
archive=None,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the local Borg version, a
|
||||||
|
sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary
|
||||||
|
Borg command on the given repository/archive.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
try:
|
||||||
|
options = options[1:] if options[0] == '--' else options
|
||||||
|
|
||||||
|
# Borg commands like "key" have a sub-command ("export", etc.) that must follow it.
|
||||||
|
command_options_start_index = 2 if options[0] in BORG_SUBCOMMANDS_WITH_SUBCOMMANDS else 1
|
||||||
|
borg_command = tuple(options[:command_options_start_index])
|
||||||
|
command_options = tuple(options[command_options_start_index:])
|
||||||
|
except IndexError:
|
||||||
|
borg_command = ()
|
||||||
|
command_options = ()
|
||||||
|
|
||||||
|
if borg_command in BORG_SUBCOMMANDS_WITHOUT_REPOSITORY:
|
||||||
|
repository_archive_flags = ()
|
||||||
|
elif archive:
|
||||||
|
repository_archive_flags = flags.make_repository_archive_flags(
|
||||||
|
repository, archive, local_borg_version
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
repository_archive_flags = flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path,)
|
||||||
|
+ borg_command
|
||||||
|
+ repository_archive_flags
|
||||||
|
+ command_options
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
|
)
|
||||||
|
|
||||||
|
return execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
31
borgmatic/borg/break_lock.py
Normal file
31
borgmatic/borg/break_lock.py
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from borgmatic.borg import environment, flags
|
||||||
|
from borgmatic.execute import execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def break_lock(
|
||||||
|
repository, storage_config, local_borg_version, local_path='borg', remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage configuration dict, the local Borg version,
|
||||||
|
and optional local and remote Borg paths, break any repository and cache locks leftover from Borg
|
||||||
|
aborting.
|
||||||
|
'''
|
||||||
|
umask = storage_config.get('umask', None)
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'break-lock')
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment)
|
334
borgmatic/borg/check.py
Normal file
334
borgmatic/borg/check.py
Normal file
|
@ -0,0 +1,334 @@
|
||||||
|
import argparse
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
from borgmatic.borg import environment, extract, feature, flags, rinfo, state
|
||||||
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
|
DEFAULT_CHECKS = (
|
||||||
|
{'name': 'repository', 'frequency': '1 month'},
|
||||||
|
{'name': 'archives', 'frequency': '1 month'},
|
||||||
|
)
|
||||||
|
DEFAULT_PREFIX = '{hostname}-'
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_checks(consistency_config, only_checks=None):
|
||||||
|
'''
|
||||||
|
Given a consistency config with a "checks" sequence of dicts and an optional list of override
|
||||||
|
checks, return a tuple of named checks to run.
|
||||||
|
|
||||||
|
For example, given a retention config of:
|
||||||
|
|
||||||
|
{'checks': ({'name': 'repository'}, {'name': 'archives'})}
|
||||||
|
|
||||||
|
This will be returned as:
|
||||||
|
|
||||||
|
('repository', 'archives')
|
||||||
|
|
||||||
|
If no "checks" option is present in the config, return the DEFAULT_CHECKS. If a checks value
|
||||||
|
has a name of "disabled", return an empty tuple, meaning that no checks should be run.
|
||||||
|
'''
|
||||||
|
checks = only_checks or tuple(
|
||||||
|
check_config['name']
|
||||||
|
for check_config in (consistency_config.get('checks', None) or DEFAULT_CHECKS)
|
||||||
|
)
|
||||||
|
checks = tuple(check.lower() for check in checks)
|
||||||
|
if 'disabled' in checks:
|
||||||
|
if len(checks) > 1:
|
||||||
|
logger.warning(
|
||||||
|
'Multiple checks are configured, but one of them is "disabled"; not running any checks'
|
||||||
|
)
|
||||||
|
return ()
|
||||||
|
|
||||||
|
return checks
|
||||||
|
|
||||||
|
|
||||||
|
def parse_frequency(frequency):
|
||||||
|
'''
|
||||||
|
Given a frequency string with a number and a unit of time, return a corresponding
|
||||||
|
datetime.timedelta instance or None if the frequency is None or "always".
|
||||||
|
|
||||||
|
For instance, given "3 weeks", return datetime.timedelta(weeks=3)
|
||||||
|
|
||||||
|
Raise ValueError if the given frequency cannot be parsed.
|
||||||
|
'''
|
||||||
|
if not frequency:
|
||||||
|
return None
|
||||||
|
|
||||||
|
frequency = frequency.strip().lower()
|
||||||
|
|
||||||
|
if frequency == 'always':
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
number, time_unit = frequency.split(' ')
|
||||||
|
number = int(number)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Could not parse consistency check frequency '{frequency}'")
|
||||||
|
|
||||||
|
if not time_unit.endswith('s'):
|
||||||
|
time_unit += 's'
|
||||||
|
|
||||||
|
if time_unit == 'months':
|
||||||
|
number *= 30
|
||||||
|
time_unit = 'days'
|
||||||
|
elif time_unit == 'years':
|
||||||
|
number *= 365
|
||||||
|
time_unit = 'days'
|
||||||
|
|
||||||
|
try:
|
||||||
|
return datetime.timedelta(**{time_unit: number})
|
||||||
|
except TypeError:
|
||||||
|
raise ValueError(f"Could not parse consistency check frequency '{frequency}'")
|
||||||
|
|
||||||
|
|
||||||
|
def filter_checks_on_frequency(
|
||||||
|
location_config, consistency_config, borg_repository_id, checks, force
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a location config, a consistency config with a "checks" sequence of dicts, a Borg
|
||||||
|
repository ID, a sequence of checks, and whether to force checks to run, filter down those
|
||||||
|
checks based on the configured "frequency" for each check as compared to its check time file.
|
||||||
|
|
||||||
|
In other words, a check whose check time file's timestamp is too new (based on the configured
|
||||||
|
frequency) will get cut from the returned sequence of checks. Example:
|
||||||
|
|
||||||
|
consistency_config = {
|
||||||
|
'checks': [
|
||||||
|
{
|
||||||
|
'name': 'archives',
|
||||||
|
'frequency': '2 weeks',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
When this function is called with that consistency_config and "archives" in checks, "archives"
|
||||||
|
will get filtered out of the returned result if its check time file is newer than 2 weeks old,
|
||||||
|
indicating that it's not yet time to run that check again.
|
||||||
|
|
||||||
|
Raise ValueError if a frequency cannot be parsed.
|
||||||
|
'''
|
||||||
|
filtered_checks = list(checks)
|
||||||
|
|
||||||
|
if force:
|
||||||
|
return tuple(filtered_checks)
|
||||||
|
|
||||||
|
for check_config in consistency_config.get('checks', DEFAULT_CHECKS):
|
||||||
|
check = check_config['name']
|
||||||
|
if checks and check not in checks:
|
||||||
|
continue
|
||||||
|
|
||||||
|
frequency_delta = parse_frequency(check_config.get('frequency'))
|
||||||
|
if not frequency_delta:
|
||||||
|
continue
|
||||||
|
|
||||||
|
check_time = read_check_time(
|
||||||
|
make_check_time_path(location_config, borg_repository_id, check)
|
||||||
|
)
|
||||||
|
if not check_time:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we've not yet reached the time when the frequency dictates we're ready for another
|
||||||
|
# check, skip this check.
|
||||||
|
if datetime.datetime.now() < check_time + frequency_delta:
|
||||||
|
remaining = check_time + frequency_delta - datetime.datetime.now()
|
||||||
|
logger.info(
|
||||||
|
f'Skipping {check} check due to configured frequency; {remaining} until next check'
|
||||||
|
)
|
||||||
|
filtered_checks.remove(check)
|
||||||
|
|
||||||
|
return tuple(filtered_checks)
|
||||||
|
|
||||||
|
|
||||||
|
def make_check_flags(local_borg_version, checks, check_last=None, prefix=None):
|
||||||
|
'''
|
||||||
|
Given the local Borg version and a parsed sequence of checks, transform the checks into tuple of
|
||||||
|
command-line flags.
|
||||||
|
|
||||||
|
For example, given parsed checks of:
|
||||||
|
|
||||||
|
('repository',)
|
||||||
|
|
||||||
|
This will be returned as:
|
||||||
|
|
||||||
|
('--repository-only',)
|
||||||
|
|
||||||
|
However, if both "repository" and "archives" are in checks, then omit them from the returned
|
||||||
|
flags because Borg does both checks by default. If "data" is in checks, that implies "archives".
|
||||||
|
|
||||||
|
Additionally, if a check_last value is given and "archives" is in checks, then include a
|
||||||
|
"--last" flag. And if a prefix value is given and "archives" is in checks, then include a
|
||||||
|
"--match-archives" flag.
|
||||||
|
'''
|
||||||
|
if 'data' in checks:
|
||||||
|
data_flags = ('--verify-data',)
|
||||||
|
checks += ('archives',)
|
||||||
|
else:
|
||||||
|
data_flags = ()
|
||||||
|
|
||||||
|
if 'archives' in checks:
|
||||||
|
last_flags = ('--last', str(check_last)) if check_last else ()
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
|
||||||
|
match_archives_flags = ('--match-archives', f'sh:{prefix}*') if prefix else ()
|
||||||
|
else:
|
||||||
|
match_archives_flags = ('--glob-archives', f'{prefix}*') if prefix else ()
|
||||||
|
else:
|
||||||
|
last_flags = ()
|
||||||
|
match_archives_flags = ()
|
||||||
|
if check_last:
|
||||||
|
logger.warning(
|
||||||
|
'Ignoring check_last option, as "archives" or "data" are not in consistency checks'
|
||||||
|
)
|
||||||
|
if prefix:
|
||||||
|
logger.warning(
|
||||||
|
'Ignoring consistency prefix option, as "archives" or "data" are not in consistency checks'
|
||||||
|
)
|
||||||
|
|
||||||
|
common_flags = last_flags + match_archives_flags + data_flags
|
||||||
|
|
||||||
|
if {'repository', 'archives'}.issubset(set(checks)):
|
||||||
|
return common_flags
|
||||||
|
|
||||||
|
return (
|
||||||
|
tuple('--{}-only'.format(check) for check in checks if check in ('repository', 'archives'))
|
||||||
|
+ common_flags
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_check_time_path(location_config, borg_repository_id, check_type):
|
||||||
|
'''
|
||||||
|
Given a location configuration dict, a Borg repository ID, and the name of a check type
|
||||||
|
("repository", "archives", etc.), return a path for recording that check's time (the time of
|
||||||
|
that check last occurring).
|
||||||
|
'''
|
||||||
|
return os.path.join(
|
||||||
|
os.path.expanduser(
|
||||||
|
location_config.get(
|
||||||
|
'borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||||
|
)
|
||||||
|
),
|
||||||
|
'checks',
|
||||||
|
borg_repository_id,
|
||||||
|
check_type,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def write_check_time(path): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Record a check time of now as the modification time of the given path.
|
||||||
|
'''
|
||||||
|
logger.debug(f'Writing check time at {path}')
|
||||||
|
|
||||||
|
os.makedirs(os.path.dirname(path), mode=0o700, exist_ok=True)
|
||||||
|
pathlib.Path(path, mode=0o600).touch()
|
||||||
|
|
||||||
|
|
||||||
|
def read_check_time(path):
|
||||||
|
'''
|
||||||
|
Return the check time based on the modification time of the given path. Return None if the path
|
||||||
|
doesn't exist.
|
||||||
|
'''
|
||||||
|
logger.debug(f'Reading check time from {path}')
|
||||||
|
|
||||||
|
try:
|
||||||
|
return datetime.datetime.fromtimestamp(os.stat(path).st_mtime)
|
||||||
|
except FileNotFoundError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def check_archives(
|
||||||
|
repository,
|
||||||
|
location_config,
|
||||||
|
storage_config,
|
||||||
|
consistency_config,
|
||||||
|
local_borg_version,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
progress=None,
|
||||||
|
repair=None,
|
||||||
|
only_checks=None,
|
||||||
|
force=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, a consistency config dict,
|
||||||
|
local/remote commands to run, whether to include progress information, whether to attempt a
|
||||||
|
repair, and an optional list of checks to use instead of configured checks, check the contained
|
||||||
|
Borg archives for consistency.
|
||||||
|
|
||||||
|
If there are no consistency checks to run, skip running them.
|
||||||
|
|
||||||
|
Raises ValueError if the Borg repository ID cannot be determined.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
borg_repository_id = json.loads(
|
||||||
|
rinfo.display_repository_info(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
argparse.Namespace(json=True),
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
)['repository']['id']
|
||||||
|
except (json.JSONDecodeError, KeyError):
|
||||||
|
raise ValueError(f'Cannot determine Borg repository ID for {repository}')
|
||||||
|
|
||||||
|
checks = filter_checks_on_frequency(
|
||||||
|
location_config,
|
||||||
|
consistency_config,
|
||||||
|
borg_repository_id,
|
||||||
|
parse_checks(consistency_config, only_checks),
|
||||||
|
force,
|
||||||
|
)
|
||||||
|
check_last = consistency_config.get('check_last', None)
|
||||||
|
lock_wait = None
|
||||||
|
extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '')
|
||||||
|
|
||||||
|
if set(checks).intersection({'repository', 'archives', 'data'}):
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
verbosity_flags = ()
|
||||||
|
if logger.isEnabledFor(logging.INFO):
|
||||||
|
verbosity_flags = ('--info',)
|
||||||
|
if logger.isEnabledFor(logging.DEBUG):
|
||||||
|
verbosity_flags = ('--debug', '--show-rc')
|
||||||
|
|
||||||
|
prefix = consistency_config.get('prefix', DEFAULT_PREFIX)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'check')
|
||||||
|
+ (('--repair',) if repair else ())
|
||||||
|
+ make_check_flags(local_borg_version, checks, check_last, prefix)
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
+ verbosity_flags
|
||||||
|
+ (('--progress',) if progress else ())
|
||||||
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
# The Borg repair option triggers an interactive prompt, which won't work when output is
|
||||||
|
# captured. And progress messes with the terminal directly.
|
||||||
|
if repair or progress:
|
||||||
|
execute_command(
|
||||||
|
full_command, output_file=DO_NOT_CAPTURE, extra_environment=borg_environment
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
execute_command(full_command, extra_environment=borg_environment)
|
||||||
|
|
||||||
|
for check in checks:
|
||||||
|
write_check_time(make_check_time_path(location_config, borg_repository_id, check))
|
||||||
|
|
||||||
|
if 'extract' in checks:
|
||||||
|
extract.extract_last_archive_dry_run(
|
||||||
|
storage_config, local_borg_version, repository, lock_wait, local_path, remote_path
|
||||||
|
)
|
||||||
|
write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract'))
|
51
borgmatic/borg/compact.py
Normal file
51
borgmatic/borg/compact.py
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from borgmatic.borg import environment, flags
|
||||||
|
from borgmatic.execute import execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def compact_segments(
|
||||||
|
dry_run,
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
progress=False,
|
||||||
|
cleanup_commits=False,
|
||||||
|
threshold=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given dry-run flag, a local or remote repository path, a storage config dict, and the local
|
||||||
|
Borg version, compact the segments in a repository.
|
||||||
|
'''
|
||||||
|
umask = storage_config.get('umask', None)
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
extra_borg_options = storage_config.get('extra_borg_options', {}).get('compact', '')
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'compact')
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
+ (('--progress',) if progress else ())
|
||||||
|
+ (('--cleanup-commits',) if cleanup_commits else ())
|
||||||
|
+ (('--threshold', str(threshold)) if threshold else ())
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
logging.info(f'{repository}: Skipping compact (dry run)')
|
||||||
|
return
|
||||||
|
|
||||||
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
504
borgmatic/borg/create.py
Normal file
504
borgmatic/borg/create.py
Normal file
|
@ -0,0 +1,504 @@
|
||||||
|
import glob
|
||||||
|
import itertools
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import stat
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, feature, flags, state
|
||||||
|
from borgmatic.execute import (
|
||||||
|
DO_NOT_CAPTURE,
|
||||||
|
execute_command,
|
||||||
|
execute_command_and_capture_output,
|
||||||
|
execute_command_with_processes,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def expand_directory(directory):
|
||||||
|
'''
|
||||||
|
Given a directory path, expand any tilde (representing a user's home directory) and any globs
|
||||||
|
therein. Return a list of one or more resulting paths.
|
||||||
|
'''
|
||||||
|
expanded_directory = os.path.expanduser(directory)
|
||||||
|
|
||||||
|
return glob.glob(expanded_directory) or [expanded_directory]
|
||||||
|
|
||||||
|
|
||||||
|
def expand_directories(directories):
|
||||||
|
'''
|
||||||
|
Given a sequence of directory paths, expand tildes and globs in each one. Return all the
|
||||||
|
resulting directories as a single flattened tuple.
|
||||||
|
'''
|
||||||
|
if directories is None:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
return tuple(
|
||||||
|
itertools.chain.from_iterable(expand_directory(directory) for directory in directories)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def expand_home_directories(directories):
|
||||||
|
'''
|
||||||
|
Given a sequence of directory paths, expand tildes in each one. Do not perform any globbing.
|
||||||
|
Return the results as a tuple.
|
||||||
|
'''
|
||||||
|
if directories is None:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
return tuple(os.path.expanduser(directory) for directory in directories)
|
||||||
|
|
||||||
|
|
||||||
|
def map_directories_to_devices(directories):
|
||||||
|
'''
|
||||||
|
Given a sequence of directories, return a map from directory to an identifier for the device on
|
||||||
|
which that directory resides or None if the path doesn't exist.
|
||||||
|
|
||||||
|
This is handy for determining whether two different directories are on the same filesystem (have
|
||||||
|
the same device identifier).
|
||||||
|
'''
|
||||||
|
return {
|
||||||
|
directory: os.stat(directory).st_dev if os.path.exists(directory) else None
|
||||||
|
for directory in directories
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def deduplicate_directories(directory_devices, additional_directory_devices):
|
||||||
|
'''
|
||||||
|
Given a map from directory to the identifier for the device on which that directory resides,
|
||||||
|
return the directories as a sorted tuple with all duplicate child directories removed. For
|
||||||
|
instance, if paths is ('/foo', '/foo/bar'), return just: ('/foo',)
|
||||||
|
|
||||||
|
The one exception to this rule is if two paths are on different filesystems (devices). In that
|
||||||
|
case, they won't get de-duplicated in case they both need to be passed to Borg (e.g. the
|
||||||
|
location.one_file_system option is true).
|
||||||
|
|
||||||
|
The idea is that if Borg is given a parent directory, then it doesn't also need to be given
|
||||||
|
child directories, because it will naturally spider the contents of the parent directory. And
|
||||||
|
there are cases where Borg coming across the same file twice will result in duplicate reads and
|
||||||
|
even hangs, e.g. when a database hook is using a named pipe for streaming database dumps to
|
||||||
|
Borg.
|
||||||
|
|
||||||
|
If any additional directory devices are given, also deduplicate against them, but don't include
|
||||||
|
them in the returned directories.
|
||||||
|
'''
|
||||||
|
deduplicated = set()
|
||||||
|
directories = sorted(directory_devices.keys())
|
||||||
|
additional_directories = sorted(additional_directory_devices.keys())
|
||||||
|
all_devices = {**directory_devices, **additional_directory_devices}
|
||||||
|
|
||||||
|
for directory in directories:
|
||||||
|
deduplicated.add(directory)
|
||||||
|
parents = pathlib.PurePath(directory).parents
|
||||||
|
|
||||||
|
# If another directory in the given list (or the additional list) is a parent of current
|
||||||
|
# directory (even n levels up) and both are on the same filesystem, then the current
|
||||||
|
# directory is a duplicate.
|
||||||
|
for other_directory in directories + additional_directories:
|
||||||
|
for parent in parents:
|
||||||
|
if (
|
||||||
|
pathlib.PurePath(other_directory) == parent
|
||||||
|
and all_devices[directory] is not None
|
||||||
|
and all_devices[other_directory] == all_devices[directory]
|
||||||
|
):
|
||||||
|
if directory in deduplicated:
|
||||||
|
deduplicated.remove(directory)
|
||||||
|
break
|
||||||
|
|
||||||
|
return tuple(sorted(deduplicated))
|
||||||
|
|
||||||
|
|
||||||
|
def write_pattern_file(patterns=None, sources=None, pattern_file=None):
|
||||||
|
'''
|
||||||
|
Given a sequence of patterns and an optional sequence of source directories, write them to a
|
||||||
|
named temporary file (with the source directories as additional roots) and return the file.
|
||||||
|
If an optional open pattern file is given, overwrite it instead of making a new temporary file.
|
||||||
|
Return None if no patterns are provided.
|
||||||
|
'''
|
||||||
|
if not patterns and not sources:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if pattern_file is None:
|
||||||
|
pattern_file = tempfile.NamedTemporaryFile('w')
|
||||||
|
else:
|
||||||
|
pattern_file.seek(0)
|
||||||
|
|
||||||
|
pattern_file.write(
|
||||||
|
'\n'.join(tuple(patterns or ()) + tuple(f'R {source}' for source in (sources or [])))
|
||||||
|
)
|
||||||
|
pattern_file.flush()
|
||||||
|
|
||||||
|
return pattern_file
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_files_readable(*filename_lists):
|
||||||
|
'''
|
||||||
|
Given a sequence of filename sequences, ensure that each filename is openable. This prevents
|
||||||
|
unreadable files from being passed to Borg, which in certain situations only warns instead of
|
||||||
|
erroring.
|
||||||
|
'''
|
||||||
|
for file_object in itertools.chain.from_iterable(
|
||||||
|
filename_list for filename_list in filename_lists if filename_list
|
||||||
|
):
|
||||||
|
open(file_object).close()
|
||||||
|
|
||||||
|
|
||||||
|
def make_pattern_flags(location_config, pattern_filename=None):
|
||||||
|
'''
|
||||||
|
Given a location config dict with a potential patterns_from option, and a filename containing
|
||||||
|
any additional patterns, return the corresponding Borg flags for those files as a tuple.
|
||||||
|
'''
|
||||||
|
pattern_filenames = tuple(location_config.get('patterns_from') or ()) + (
|
||||||
|
(pattern_filename,) if pattern_filename else ()
|
||||||
|
)
|
||||||
|
|
||||||
|
return tuple(
|
||||||
|
itertools.chain.from_iterable(
|
||||||
|
('--patterns-from', pattern_filename) for pattern_filename in pattern_filenames
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_exclude_flags(location_config, exclude_filename=None):
|
||||||
|
'''
|
||||||
|
Given a location config dict with various exclude options, and a filename containing any exclude
|
||||||
|
patterns, return the corresponding Borg flags as a tuple.
|
||||||
|
'''
|
||||||
|
exclude_filenames = tuple(location_config.get('exclude_from') or ()) + (
|
||||||
|
(exclude_filename,) if exclude_filename else ()
|
||||||
|
)
|
||||||
|
exclude_from_flags = tuple(
|
||||||
|
itertools.chain.from_iterable(
|
||||||
|
('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames
|
||||||
|
)
|
||||||
|
)
|
||||||
|
caches_flag = ('--exclude-caches',) if location_config.get('exclude_caches') else ()
|
||||||
|
if_present_flags = tuple(
|
||||||
|
itertools.chain.from_iterable(
|
||||||
|
('--exclude-if-present', if_present)
|
||||||
|
for if_present in location_config.get('exclude_if_present', ())
|
||||||
|
)
|
||||||
|
)
|
||||||
|
keep_exclude_tags_flags = (
|
||||||
|
('--keep-exclude-tags',) if location_config.get('keep_exclude_tags') else ()
|
||||||
|
)
|
||||||
|
exclude_nodump_flags = ('--exclude-nodump',) if location_config.get('exclude_nodump') else ()
|
||||||
|
|
||||||
|
return (
|
||||||
|
exclude_from_flags
|
||||||
|
+ caches_flag
|
||||||
|
+ if_present_flags
|
||||||
|
+ keep_exclude_tags_flags
|
||||||
|
+ exclude_nodump_flags
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_list_filter_flags(local_borg_version, dry_run):
|
||||||
|
'''
|
||||||
|
Given the local Borg version and whether this is a dry run, return the corresponding flags for
|
||||||
|
passing to "--list --filter". The general idea is that excludes are shown for a dry run or when
|
||||||
|
the verbosity is debug.
|
||||||
|
'''
|
||||||
|
base_flags = 'AME'
|
||||||
|
show_excludes = logger.isEnabledFor(logging.DEBUG)
|
||||||
|
|
||||||
|
if feature.available(feature.Feature.EXCLUDED_FILES_MINUS, local_borg_version):
|
||||||
|
if show_excludes or dry_run:
|
||||||
|
return f'{base_flags}+-'
|
||||||
|
else:
|
||||||
|
return base_flags
|
||||||
|
|
||||||
|
if show_excludes:
|
||||||
|
return f'{base_flags}x-'
|
||||||
|
else:
|
||||||
|
return f'{base_flags}-'
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
|
||||||
|
|
||||||
|
|
||||||
|
def collect_borgmatic_source_directories(borgmatic_source_directory):
|
||||||
|
'''
|
||||||
|
Return a list of borgmatic-specific source directories used for state like database backups.
|
||||||
|
'''
|
||||||
|
if not borgmatic_source_directory:
|
||||||
|
borgmatic_source_directory = state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||||
|
|
||||||
|
return (
|
||||||
|
[borgmatic_source_directory]
|
||||||
|
if os.path.exists(os.path.expanduser(borgmatic_source_directory))
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
ROOT_PATTERN_PREFIX = 'R '
|
||||||
|
|
||||||
|
|
||||||
|
def pattern_root_directories(patterns=None):
|
||||||
|
'''
|
||||||
|
Given a sequence of patterns, parse out and return just the root directories.
|
||||||
|
'''
|
||||||
|
if not patterns:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return [
|
||||||
|
pattern.split(ROOT_PATTERN_PREFIX, maxsplit=1)[1]
|
||||||
|
for pattern in patterns
|
||||||
|
if pattern.startswith(ROOT_PATTERN_PREFIX)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def special_file(path):
|
||||||
|
'''
|
||||||
|
Return whether the given path is a special file (character device, block device, or named pipe
|
||||||
|
/ FIFO).
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
mode = os.stat(path).st_mode
|
||||||
|
except (FileNotFoundError, OSError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return stat.S_ISCHR(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode)
|
||||||
|
|
||||||
|
|
||||||
|
def any_parent_directories(path, candidate_parents):
|
||||||
|
'''
|
||||||
|
Return whether any of the given candidate parent directories are an actual parent of the given
|
||||||
|
path. This includes grandparents, etc.
|
||||||
|
'''
|
||||||
|
for parent in candidate_parents:
|
||||||
|
if pathlib.PurePosixPath(parent) in pathlib.PurePath(path).parents:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def collect_special_file_paths(
|
||||||
|
create_command, local_path, working_directory, borg_environment, skip_directories
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a Borg create command as a tuple, a local Borg path, a working directory, and a dict of
|
||||||
|
environment variables to pass to Borg, and a sequence of parent directories to skip, collect the
|
||||||
|
paths for any special files (character devices, block devices, and named pipes / FIFOs) that
|
||||||
|
Borg would encounter during a create. These are all paths that could cause Borg to hang if its
|
||||||
|
--read-special flag is used.
|
||||||
|
'''
|
||||||
|
paths_output = execute_command_and_capture_output(
|
||||||
|
create_command + ('--dry-run', '--list'),
|
||||||
|
capture_stderr=True,
|
||||||
|
working_directory=working_directory,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
|
||||||
|
paths = tuple(
|
||||||
|
path_line.split(' ', 1)[1]
|
||||||
|
for path_line in paths_output.split('\n')
|
||||||
|
if path_line and path_line.startswith('- ') or path_line.startswith('+ ')
|
||||||
|
)
|
||||||
|
|
||||||
|
return tuple(
|
||||||
|
path
|
||||||
|
for path in paths
|
||||||
|
if special_file(path) and not any_parent_directories(path, skip_directories)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_archive(
|
||||||
|
dry_run,
|
||||||
|
repository,
|
||||||
|
location_config,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
progress=False,
|
||||||
|
stats=False,
|
||||||
|
json=False,
|
||||||
|
list_files=False,
|
||||||
|
stream_processes=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a
|
||||||
|
storage config dict, create a Borg archive and return Borg's JSON output (if any).
|
||||||
|
|
||||||
|
If a sequence of stream processes is given (instances of subprocess.Popen), then execute the
|
||||||
|
create command while also triggering the given processes to produce output.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
borgmatic_source_directories = expand_directories(
|
||||||
|
collect_borgmatic_source_directories(location_config.get('borgmatic_source_directory'))
|
||||||
|
)
|
||||||
|
sources = deduplicate_directories(
|
||||||
|
map_directories_to_devices(
|
||||||
|
expand_directories(
|
||||||
|
tuple(location_config.get('source_directories', ())) + borgmatic_source_directories
|
||||||
|
)
|
||||||
|
),
|
||||||
|
additional_directory_devices=map_directories_to_devices(
|
||||||
|
expand_directories(pattern_root_directories(location_config.get('patterns')))
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from'))
|
||||||
|
|
||||||
|
try:
|
||||||
|
working_directory = os.path.expanduser(location_config.get('working_directory'))
|
||||||
|
except TypeError:
|
||||||
|
working_directory = None
|
||||||
|
|
||||||
|
pattern_file = (
|
||||||
|
write_pattern_file(location_config.get('patterns'), sources)
|
||||||
|
if location_config.get('patterns') or location_config.get('patterns_from')
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
exclude_file = write_pattern_file(
|
||||||
|
expand_home_directories(location_config.get('exclude_patterns'))
|
||||||
|
)
|
||||||
|
checkpoint_interval = storage_config.get('checkpoint_interval', None)
|
||||||
|
checkpoint_volume = storage_config.get('checkpoint_volume', None)
|
||||||
|
chunker_params = storage_config.get('chunker_params', None)
|
||||||
|
compression = storage_config.get('compression', None)
|
||||||
|
upload_rate_limit = storage_config.get('upload_rate_limit', None)
|
||||||
|
umask = storage_config.get('umask', None)
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
list_filter_flags = make_list_filter_flags(local_borg_version, dry_run)
|
||||||
|
files_cache = location_config.get('files_cache')
|
||||||
|
archive_name_format = storage_config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT)
|
||||||
|
extra_borg_options = storage_config.get('extra_borg_options', {}).get('create', '')
|
||||||
|
|
||||||
|
if feature.available(feature.Feature.ATIME, local_borg_version):
|
||||||
|
atime_flags = ('--atime',) if location_config.get('atime') is True else ()
|
||||||
|
else:
|
||||||
|
atime_flags = ('--noatime',) if location_config.get('atime') is False else ()
|
||||||
|
|
||||||
|
if feature.available(feature.Feature.NOFLAGS, local_borg_version):
|
||||||
|
noflags_flags = ('--noflags',) if location_config.get('flags') is False else ()
|
||||||
|
else:
|
||||||
|
noflags_flags = ('--nobsdflags',) if location_config.get('flags') is False else ()
|
||||||
|
|
||||||
|
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
|
||||||
|
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else ()
|
||||||
|
else:
|
||||||
|
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else ()
|
||||||
|
|
||||||
|
if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version):
|
||||||
|
upload_ratelimit_flags = (
|
||||||
|
('--upload-ratelimit', str(upload_rate_limit)) if upload_rate_limit else ()
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
upload_ratelimit_flags = (
|
||||||
|
('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else ()
|
||||||
|
)
|
||||||
|
|
||||||
|
if stream_processes and location_config.get('read_special') is False:
|
||||||
|
logger.warning(
|
||||||
|
f'{repository}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
|
||||||
|
)
|
||||||
|
|
||||||
|
create_command = (
|
||||||
|
tuple(local_path.split(' '))
|
||||||
|
+ ('create',)
|
||||||
|
+ make_pattern_flags(location_config, pattern_file.name if pattern_file else None)
|
||||||
|
+ make_exclude_flags(location_config, exclude_file.name if exclude_file else None)
|
||||||
|
+ (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ())
|
||||||
|
+ (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ())
|
||||||
|
+ (('--chunker-params', chunker_params) if chunker_params else ())
|
||||||
|
+ (('--compression', compression) if compression else ())
|
||||||
|
+ upload_ratelimit_flags
|
||||||
|
+ (
|
||||||
|
('--one-file-system',)
|
||||||
|
if location_config.get('one_file_system') or stream_processes
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ numeric_ids_flags
|
||||||
|
+ atime_flags
|
||||||
|
+ (('--noctime',) if location_config.get('ctime') is False else ())
|
||||||
|
+ (('--nobirthtime',) if location_config.get('birthtime') is False else ())
|
||||||
|
+ (('--read-special',) if location_config.get('read_special') or stream_processes else ())
|
||||||
|
+ noflags_flags
|
||||||
|
+ (('--files-cache', files_cache) if files_cache else ())
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
+ (
|
||||||
|
('--list', '--filter', list_filter_flags)
|
||||||
|
if list_files and not json and not progress
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ (('--dry-run',) if dry_run else ())
|
||||||
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
|
+ flags.make_repository_archive_flags(repository, archive_name_format, local_borg_version)
|
||||||
|
+ (sources if not pattern_file else ())
|
||||||
|
)
|
||||||
|
|
||||||
|
if json:
|
||||||
|
output_log_level = None
|
||||||
|
elif list_files or (stats and not dry_run):
|
||||||
|
output_log_level = logging.ANSWER
|
||||||
|
else:
|
||||||
|
output_log_level = logging.INFO
|
||||||
|
|
||||||
|
# The progress output isn't compatible with captured and logged output, as progress messes with
|
||||||
|
# the terminal directly.
|
||||||
|
output_file = DO_NOT_CAPTURE if progress else None
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
# If database hooks are enabled (as indicated by streaming processes), exclude files that might
|
||||||
|
# cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True.
|
||||||
|
if stream_processes and not location_config.get('read_special'):
|
||||||
|
logger.debug(f'{repository}: Collecting special file paths')
|
||||||
|
special_file_paths = collect_special_file_paths(
|
||||||
|
create_command,
|
||||||
|
local_path,
|
||||||
|
working_directory,
|
||||||
|
borg_environment,
|
||||||
|
skip_directories=borgmatic_source_directories,
|
||||||
|
)
|
||||||
|
|
||||||
|
if special_file_paths:
|
||||||
|
logger.warning(
|
||||||
|
f'{repository}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}'
|
||||||
|
)
|
||||||
|
exclude_file = write_pattern_file(
|
||||||
|
expand_home_directories(
|
||||||
|
tuple(location_config.get('exclude_patterns') or ()) + special_file_paths
|
||||||
|
),
|
||||||
|
pattern_file=exclude_file,
|
||||||
|
)
|
||||||
|
create_command += make_exclude_flags(location_config, exclude_file.name)
|
||||||
|
|
||||||
|
create_command += (
|
||||||
|
(('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ())
|
||||||
|
+ (('--stats',) if stats and not json and not dry_run else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not json else ())
|
||||||
|
+ (('--progress',) if progress else ())
|
||||||
|
+ (('--json',) if json else ())
|
||||||
|
)
|
||||||
|
|
||||||
|
if stream_processes:
|
||||||
|
return execute_command_with_processes(
|
||||||
|
create_command,
|
||||||
|
stream_processes,
|
||||||
|
output_log_level,
|
||||||
|
output_file,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
working_directory=working_directory,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
elif output_log_level is None:
|
||||||
|
return execute_command_and_capture_output(
|
||||||
|
create_command, working_directory=working_directory, extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
execute_command(
|
||||||
|
create_command,
|
||||||
|
output_log_level,
|
||||||
|
output_file,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
working_directory=working_directory,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
39
borgmatic/borg/environment.py
Normal file
39
borgmatic/borg/environment.py
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
OPTION_TO_ENVIRONMENT_VARIABLE = {
|
||||||
|
'borg_base_directory': 'BORG_BASE_DIR',
|
||||||
|
'borg_config_directory': 'BORG_CONFIG_DIR',
|
||||||
|
'borg_cache_directory': 'BORG_CACHE_DIR',
|
||||||
|
'borg_security_directory': 'BORG_SECURITY_DIR',
|
||||||
|
'borg_keys_directory': 'BORG_KEYS_DIR',
|
||||||
|
'encryption_passcommand': 'BORG_PASSCOMMAND',
|
||||||
|
'encryption_passphrase': 'BORG_PASSPHRASE',
|
||||||
|
'ssh_command': 'BORG_RSH',
|
||||||
|
'temporary_directory': 'TMPDIR',
|
||||||
|
}
|
||||||
|
|
||||||
|
DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE = {
|
||||||
|
'relocated_repo_access_is_ok': 'BORG_RELOCATED_REPO_ACCESS_IS_OK',
|
||||||
|
'unknown_unencrypted_repo_access_is_ok': 'BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def make_environment(storage_config):
|
||||||
|
'''
|
||||||
|
Given a borgmatic storage configuration dict, return its options converted to a Borg environment
|
||||||
|
variable dict.
|
||||||
|
'''
|
||||||
|
environment = {}
|
||||||
|
|
||||||
|
for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items():
|
||||||
|
value = storage_config.get(option_name)
|
||||||
|
|
||||||
|
if value:
|
||||||
|
environment[environment_variable_name] = value
|
||||||
|
|
||||||
|
for (
|
||||||
|
option_name,
|
||||||
|
environment_variable_name,
|
||||||
|
) in DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE.items():
|
||||||
|
value = storage_config.get(option_name, False)
|
||||||
|
environment[environment_variable_name] = 'yes' if value else 'no'
|
||||||
|
|
||||||
|
return environment
|
73
borgmatic/borg/export_tar.py
Normal file
73
borgmatic/borg/export_tar.py
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, flags
|
||||||
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def export_tar_archive(
|
||||||
|
dry_run,
|
||||||
|
repository,
|
||||||
|
archive,
|
||||||
|
paths,
|
||||||
|
destination_path,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
tar_filter=None,
|
||||||
|
list_files=False,
|
||||||
|
strip_components=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
|
||||||
|
export from the archive, a destination path to export to, a storage configuration dict, the
|
||||||
|
local Borg version, optional local and remote Borg paths, an optional filter program, whether to
|
||||||
|
include per-file details, and an optional number of path components to strip, export the archive
|
||||||
|
into the given destination path as a tar-formatted file.
|
||||||
|
|
||||||
|
If the destination path is "-", then stream the output to stdout instead of to a file.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
umask = storage_config.get('umask', None)
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'export-tar')
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--list',) if list_files else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ (('--dry-run',) if dry_run else ())
|
||||||
|
+ (('--tar-filter', tar_filter) if tar_filter else ())
|
||||||
|
+ (('--strip-components', str(strip_components)) if strip_components else ())
|
||||||
|
+ flags.make_repository_archive_flags(
|
||||||
|
repository if ':' in repository else os.path.abspath(repository),
|
||||||
|
archive,
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
|
+ (destination_path,)
|
||||||
|
+ (tuple(paths) if paths else ())
|
||||||
|
)
|
||||||
|
|
||||||
|
if list_files:
|
||||||
|
output_log_level = logging.ANSWER
|
||||||
|
else:
|
||||||
|
output_log_level = logging.INFO
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
logging.info('{}: Skipping export to tar file (dry run)'.format(repository))
|
||||||
|
return
|
||||||
|
|
||||||
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_file=DO_NOT_CAPTURE if destination_path == '-' else None,
|
||||||
|
output_log_level=output_log_level,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
143
borgmatic/borg/extract.py
Normal file
143
borgmatic/borg/extract.py
Normal file
|
@ -0,0 +1,143 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from borgmatic.borg import environment, feature, flags, rlist
|
||||||
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_last_archive_dry_run(
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
repository,
|
||||||
|
lock_wait=None,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Perform an extraction dry-run of the most recent archive. If there are no archives, skip the
|
||||||
|
dry-run.
|
||||||
|
'''
|
||||||
|
remote_path_flags = ('--remote-path', remote_path) if remote_path else ()
|
||||||
|
lock_wait_flags = ('--lock-wait', str(lock_wait)) if lock_wait else ()
|
||||||
|
verbosity_flags = ()
|
||||||
|
if logger.isEnabledFor(logging.DEBUG):
|
||||||
|
verbosity_flags = ('--debug', '--show-rc')
|
||||||
|
elif logger.isEnabledFor(logging.INFO):
|
||||||
|
verbosity_flags = ('--info',)
|
||||||
|
|
||||||
|
try:
|
||||||
|
last_archive_name = rlist.resolve_archive_name(
|
||||||
|
repository, 'latest', storage_config, local_borg_version, local_path, remote_path
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
logger.warning('No archives found. Skipping extract consistency check.')
|
||||||
|
return
|
||||||
|
|
||||||
|
list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else ()
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
full_extract_command = (
|
||||||
|
(local_path, 'extract', '--dry-run')
|
||||||
|
+ remote_path_flags
|
||||||
|
+ lock_wait_flags
|
||||||
|
+ verbosity_flags
|
||||||
|
+ list_flag
|
||||||
|
+ flags.make_repository_archive_flags(repository, last_archive_name, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
execute_command(
|
||||||
|
full_extract_command, working_directory=None, extra_environment=borg_environment
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def extract_archive(
|
||||||
|
dry_run,
|
||||||
|
repository,
|
||||||
|
archive,
|
||||||
|
paths,
|
||||||
|
location_config,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
destination_path=None,
|
||||||
|
strip_components=None,
|
||||||
|
progress=False,
|
||||||
|
extract_to_stdout=False,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
|
||||||
|
restore from the archive, the local Borg version string, location/storage configuration dicts,
|
||||||
|
optional local and remote Borg paths, and an optional destination path to extract to, extract
|
||||||
|
the archive into the current directory.
|
||||||
|
|
||||||
|
If extract to stdout is True, then start the extraction streaming to stdout, and return that
|
||||||
|
extract process as an instance of subprocess.Popen.
|
||||||
|
'''
|
||||||
|
umask = storage_config.get('umask', None)
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
if progress and extract_to_stdout:
|
||||||
|
raise ValueError('progress and extract_to_stdout cannot both be set')
|
||||||
|
|
||||||
|
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
|
||||||
|
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else ()
|
||||||
|
else:
|
||||||
|
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else ()
|
||||||
|
|
||||||
|
if strip_components == 'all':
|
||||||
|
if not paths:
|
||||||
|
raise ValueError('The --strip-components flag with "all" requires at least one --path')
|
||||||
|
|
||||||
|
# Calculate the maximum number of leading path components of the given paths.
|
||||||
|
strip_components = max(0, *(len(path.split(os.path.sep)) - 1 for path in paths))
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'extract')
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ numeric_ids_flags
|
||||||
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug', '--list', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ (('--dry-run',) if dry_run else ())
|
||||||
|
+ (('--strip-components', str(strip_components)) if strip_components else ())
|
||||||
|
+ (('--progress',) if progress else ())
|
||||||
|
+ (('--stdout',) if extract_to_stdout else ())
|
||||||
|
+ flags.make_repository_archive_flags(
|
||||||
|
repository if ':' in repository else os.path.abspath(repository),
|
||||||
|
archive,
|
||||||
|
local_borg_version,
|
||||||
|
)
|
||||||
|
+ (tuple(paths) if paths else ())
|
||||||
|
)
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
# The progress output isn't compatible with captured and logged output, as progress messes with
|
||||||
|
# the terminal directly.
|
||||||
|
if progress:
|
||||||
|
return execute_command(
|
||||||
|
full_command,
|
||||||
|
output_file=DO_NOT_CAPTURE,
|
||||||
|
working_directory=destination_path,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if extract_to_stdout:
|
||||||
|
return execute_command(
|
||||||
|
full_command,
|
||||||
|
output_file=subprocess.PIPE,
|
||||||
|
working_directory=destination_path,
|
||||||
|
run_to_completion=False,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
|
||||||
|
# if the restore paths don't exist in the archive.
|
||||||
|
execute_command(
|
||||||
|
full_command, working_directory=destination_path, extra_environment=borg_environment
|
||||||
|
)
|
40
borgmatic/borg/feature.py
Normal file
40
borgmatic/borg/feature.py
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from pkg_resources import parse_version
|
||||||
|
|
||||||
|
|
||||||
|
class Feature(Enum):
|
||||||
|
COMPACT = 1
|
||||||
|
ATIME = 2
|
||||||
|
NOFLAGS = 3
|
||||||
|
NUMERIC_IDS = 4
|
||||||
|
UPLOAD_RATELIMIT = 5
|
||||||
|
SEPARATE_REPOSITORY_ARCHIVE = 6
|
||||||
|
RCREATE = 7
|
||||||
|
RLIST = 8
|
||||||
|
RINFO = 9
|
||||||
|
MATCH_ARCHIVES = 10
|
||||||
|
EXCLUDED_FILES_MINUS = 11
|
||||||
|
|
||||||
|
|
||||||
|
FEATURE_TO_MINIMUM_BORG_VERSION = {
|
||||||
|
Feature.COMPACT: parse_version('1.2.0a2'), # borg compact
|
||||||
|
Feature.ATIME: parse_version('1.2.0a7'), # borg create --atime
|
||||||
|
Feature.NOFLAGS: parse_version('1.2.0a8'), # borg create --noflags
|
||||||
|
Feature.NUMERIC_IDS: parse_version('1.2.0b3'), # borg create/extract/mount --numeric-ids
|
||||||
|
Feature.UPLOAD_RATELIMIT: parse_version('1.2.0b3'), # borg create --upload-ratelimit
|
||||||
|
Feature.SEPARATE_REPOSITORY_ARCHIVE: parse_version('2.0.0a2'), # --repo with separate archive
|
||||||
|
Feature.RCREATE: parse_version('2.0.0a2'), # borg rcreate
|
||||||
|
Feature.RLIST: parse_version('2.0.0a2'), # borg rlist
|
||||||
|
Feature.RINFO: parse_version('2.0.0a2'), # borg rinfo
|
||||||
|
Feature.MATCH_ARCHIVES: parse_version('2.0.0b3'), # borg --match-archives
|
||||||
|
Feature.EXCLUDED_FILES_MINUS: parse_version('2.0.0b5'), # --list --filter uses "-" for excludes
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def available(feature, borg_version):
|
||||||
|
'''
|
||||||
|
Given a Borg Feature constant and a Borg version string, return whether that feature is
|
||||||
|
available in that version of Borg.
|
||||||
|
'''
|
||||||
|
return FEATURE_TO_MINIMUM_BORG_VERSION[feature] <= parse_version(borg_version)
|
58
borgmatic/borg/flags.py
Normal file
58
borgmatic/borg/flags.py
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
import itertools
|
||||||
|
|
||||||
|
from borgmatic.borg import feature
|
||||||
|
|
||||||
|
|
||||||
|
def make_flags(name, value):
|
||||||
|
'''
|
||||||
|
Given a flag name and its value, return it formatted as Borg-compatible flags.
|
||||||
|
'''
|
||||||
|
if not value:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
flag = '--{}'.format(name.replace('_', '-'))
|
||||||
|
|
||||||
|
if value is True:
|
||||||
|
return (flag,)
|
||||||
|
|
||||||
|
return (flag, str(value))
|
||||||
|
|
||||||
|
|
||||||
|
def make_flags_from_arguments(arguments, excludes=()):
|
||||||
|
'''
|
||||||
|
Given borgmatic command-line arguments as an instance of argparse.Namespace, and optionally a
|
||||||
|
list of named arguments to exclude, generate and return the corresponding Borg command-line
|
||||||
|
flags as a tuple.
|
||||||
|
'''
|
||||||
|
return tuple(
|
||||||
|
itertools.chain.from_iterable(
|
||||||
|
make_flags(name, value=getattr(arguments, name))
|
||||||
|
for name in sorted(vars(arguments))
|
||||||
|
if name not in excludes and not name.startswith('_')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_repository_flags(repository, local_borg_version):
|
||||||
|
'''
|
||||||
|
Given the path of a Borg repository and the local Borg version, return Borg-version-appropriate
|
||||||
|
command-line flags (as a tuple) for selecting that repository.
|
||||||
|
'''
|
||||||
|
return (
|
||||||
|
('--repo',)
|
||||||
|
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
||||||
|
else ()
|
||||||
|
) + (repository,)
|
||||||
|
|
||||||
|
|
||||||
|
def make_repository_archive_flags(repository, archive, local_borg_version):
|
||||||
|
'''
|
||||||
|
Given the path of a Borg repository, an archive name or pattern, and the local Borg version,
|
||||||
|
return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository
|
||||||
|
and archive.
|
||||||
|
'''
|
||||||
|
return (
|
||||||
|
('--repo', repository, archive)
|
||||||
|
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
||||||
|
else (f'{repository}::{archive}',)
|
||||||
|
)
|
70
borgmatic/borg/info.py
Normal file
70
borgmatic/borg/info.py
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, feature, flags
|
||||||
|
from borgmatic.execute import execute_command, execute_command_and_capture_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def display_archives_info(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
info_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the local Borg version, and the
|
||||||
|
arguments to the info action, display summary information for Borg archives in the repository or
|
||||||
|
return JSON summary information.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'info')
|
||||||
|
+ (
|
||||||
|
('--info',)
|
||||||
|
if logger.getEffectiveLevel() == logging.INFO and not info_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
('--debug', '--show-rc')
|
||||||
|
if logger.isEnabledFor(logging.DEBUG) and not info_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
|
+ (
|
||||||
|
(
|
||||||
|
flags.make_flags('match-archives', f'sh:{info_arguments.prefix}*')
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
else flags.make_flags('glob-archives', f'{info_arguments.prefix}*')
|
||||||
|
)
|
||||||
|
if info_arguments.prefix
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ flags.make_flags_from_arguments(
|
||||||
|
info_arguments, excludes=('repository', 'archive', 'prefix')
|
||||||
|
)
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
+ (
|
||||||
|
flags.make_flags('match-archives', info_arguments.archive)
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
else flags.make_flags('glob-archives', info_arguments.archive)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if info_arguments.json:
|
||||||
|
return execute_command_and_capture_output(
|
||||||
|
full_command, extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
240
borgmatic/borg/list.py
Normal file
240
borgmatic/borg/list.py
Normal file
|
@ -0,0 +1,240 @@
|
||||||
|
import argparse
|
||||||
|
import copy
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, feature, flags, rlist
|
||||||
|
from borgmatic.execute import execute_command, execute_command_and_capture_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST = ('prefix', 'match_archives', 'sort_by', 'first', 'last')
|
||||||
|
MAKE_FLAGS_EXCLUDES = (
|
||||||
|
'repository',
|
||||||
|
'archive',
|
||||||
|
'successful',
|
||||||
|
'paths',
|
||||||
|
'find_paths',
|
||||||
|
) + ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST
|
||||||
|
|
||||||
|
|
||||||
|
def make_list_command(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
list_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the arguments to the list
|
||||||
|
action, and local and remote Borg paths, return a command as a tuple to list archives or paths
|
||||||
|
within an archive.
|
||||||
|
'''
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
return (
|
||||||
|
(local_path, 'list')
|
||||||
|
+ (
|
||||||
|
('--info',)
|
||||||
|
if logger.getEffectiveLevel() == logging.INFO and not list_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
('--debug', '--show-rc')
|
||||||
|
if logger.isEnabledFor(logging.DEBUG) and not list_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
|
+ flags.make_flags_from_arguments(list_arguments, excludes=MAKE_FLAGS_EXCLUDES)
|
||||||
|
+ (
|
||||||
|
flags.make_repository_archive_flags(
|
||||||
|
repository, list_arguments.archive, local_borg_version
|
||||||
|
)
|
||||||
|
if list_arguments.archive
|
||||||
|
else flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
+ (tuple(list_arguments.paths) if list_arguments.paths else ())
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_find_paths(find_paths):
|
||||||
|
'''
|
||||||
|
Given a sequence of path fragments or patterns as passed to `--find`, transform all path
|
||||||
|
fragments into glob patterns. Pass through existing patterns untouched.
|
||||||
|
|
||||||
|
For example, given find_paths of:
|
||||||
|
|
||||||
|
['foo.txt', 'pp:root/somedir']
|
||||||
|
|
||||||
|
... transform that into:
|
||||||
|
|
||||||
|
['sh:**/*foo.txt*/**', 'pp:root/somedir']
|
||||||
|
'''
|
||||||
|
if not find_paths:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
return tuple(
|
||||||
|
find_path
|
||||||
|
if re.compile(r'([-!+RrPp] )|(\w\w:)').match(find_path)
|
||||||
|
else f'sh:**/*{find_path}*/**'
|
||||||
|
for find_path in find_paths
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def capture_archive_listing(
|
||||||
|
repository,
|
||||||
|
archive,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
list_path=None,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, an archive name, a storage config dict, the local Borg
|
||||||
|
version, the archive path in which to list files, and local and remote Borg paths, capture the
|
||||||
|
output of listing that archive and return it as a list of file paths.
|
||||||
|
'''
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
return tuple(
|
||||||
|
execute_command_and_capture_output(
|
||||||
|
make_list_command(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
argparse.Namespace(
|
||||||
|
repository=repository,
|
||||||
|
archive=archive,
|
||||||
|
paths=[f'sh:{list_path}'],
|
||||||
|
find_paths=None,
|
||||||
|
json=None,
|
||||||
|
format='{path}{NL}',
|
||||||
|
),
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
),
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
.strip('\n')
|
||||||
|
.split('\n')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def list_archive(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
list_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the local Borg version, the
|
||||||
|
arguments to the list action, and local and remote Borg paths, display the output of listing
|
||||||
|
the files of a Borg archive (or return JSON output). If list_arguments.find_paths are given,
|
||||||
|
list the files by searching across multiple archives. If neither find_paths nor archive name
|
||||||
|
are given, instead list the archives in the given repository.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
|
||||||
|
if not list_arguments.archive and not list_arguments.find_paths:
|
||||||
|
if feature.available(feature.Feature.RLIST, local_borg_version):
|
||||||
|
logger.warning(
|
||||||
|
'Omitting the --archive flag on the list action is deprecated when using Borg 2.x+. Use the rlist action instead.'
|
||||||
|
)
|
||||||
|
|
||||||
|
rlist_arguments = argparse.Namespace(
|
||||||
|
repository=repository,
|
||||||
|
short=list_arguments.short,
|
||||||
|
format=list_arguments.format,
|
||||||
|
json=list_arguments.json,
|
||||||
|
prefix=list_arguments.prefix,
|
||||||
|
match_archives=list_arguments.match_archives,
|
||||||
|
sort_by=list_arguments.sort_by,
|
||||||
|
first=list_arguments.first,
|
||||||
|
last=list_arguments.last,
|
||||||
|
)
|
||||||
|
return rlist.list_repository(
|
||||||
|
repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path
|
||||||
|
)
|
||||||
|
|
||||||
|
if list_arguments.archive:
|
||||||
|
for name in ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST:
|
||||||
|
if getattr(list_arguments, name, None):
|
||||||
|
logger.warning(
|
||||||
|
f"The --{name.replace('_', '-')} flag on the list action is ignored when using the --archive flag."
|
||||||
|
)
|
||||||
|
|
||||||
|
if list_arguments.json:
|
||||||
|
raise ValueError(
|
||||||
|
'The --json flag on the list action is not supported when using the --archive/--find flags.'
|
||||||
|
)
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
# If there are any paths to find (and there's not a single archive already selected), start by
|
||||||
|
# getting a list of archives to search.
|
||||||
|
if list_arguments.find_paths and not list_arguments.archive:
|
||||||
|
rlist_arguments = argparse.Namespace(
|
||||||
|
repository=repository,
|
||||||
|
short=True,
|
||||||
|
format=None,
|
||||||
|
json=None,
|
||||||
|
prefix=list_arguments.prefix,
|
||||||
|
match_archives=list_arguments.match_archives,
|
||||||
|
sort_by=list_arguments.sort_by,
|
||||||
|
first=list_arguments.first,
|
||||||
|
last=list_arguments.last,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ask Borg to list archives. Capture its output for use below.
|
||||||
|
archive_lines = tuple(
|
||||||
|
execute_command_and_capture_output(
|
||||||
|
rlist.make_rlist_command(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
),
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
.strip('\n')
|
||||||
|
.split('\n')
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
archive_lines = (list_arguments.archive,)
|
||||||
|
|
||||||
|
# For each archive listed by Borg, run list on the contents of that archive.
|
||||||
|
for archive in archive_lines:
|
||||||
|
logger.answer(f'{repository}: Listing archive {archive}')
|
||||||
|
|
||||||
|
archive_arguments = copy.copy(list_arguments)
|
||||||
|
archive_arguments.archive = archive
|
||||||
|
|
||||||
|
# This list call is to show the files in a single archive, not list multiple archives. So
|
||||||
|
# blank out any archive filtering flags. They'll break anyway in Borg 2.
|
||||||
|
for name in ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST:
|
||||||
|
setattr(archive_arguments, name, None)
|
||||||
|
|
||||||
|
main_command = make_list_command(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
archive_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
) + make_find_paths(list_arguments.find_paths)
|
||||||
|
|
||||||
|
execute_command(
|
||||||
|
main_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
71
borgmatic/borg/mount.py
Normal file
71
borgmatic/borg/mount.py
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from borgmatic.borg import environment, feature, flags
|
||||||
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def mount_archive(
|
||||||
|
repository,
|
||||||
|
archive,
|
||||||
|
mount_point,
|
||||||
|
paths,
|
||||||
|
foreground,
|
||||||
|
options,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, an optional archive name, a filesystem mount point,
|
||||||
|
zero or more paths to mount from the archive, extra Borg mount options, a storage configuration
|
||||||
|
dict, the local Borg version, and optional local and remote Borg paths, mount the archive onto
|
||||||
|
the mount point.
|
||||||
|
'''
|
||||||
|
umask = storage_config.get('umask', None)
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'mount')
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ (('--foreground',) if foreground else ())
|
||||||
|
+ (('-o', options) if options else ())
|
||||||
|
+ (
|
||||||
|
(
|
||||||
|
flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
+ (
|
||||||
|
('--match-archives', archive)
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
else ('--glob-archives', archive)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version)
|
||||||
|
else (
|
||||||
|
flags.make_repository_archive_flags(repository, archive, local_borg_version)
|
||||||
|
if archive
|
||||||
|
else flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
+ (mount_point,)
|
||||||
|
+ (tuple(paths) if paths else ())
|
||||||
|
)
|
||||||
|
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
# Don't capture the output when foreground mode is used so that ctrl-C can work properly.
|
||||||
|
if foreground:
|
||||||
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_file=DO_NOT_CAPTURE,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment)
|
90
borgmatic/borg/prune.py
Normal file
90
borgmatic/borg/prune.py
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, feature, flags
|
||||||
|
from borgmatic.execute import execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def make_prune_flags(retention_config, local_borg_version):
|
||||||
|
'''
|
||||||
|
Given a retention config dict mapping from option name to value, tranform it into an iterable of
|
||||||
|
command-line name-value flag pairs.
|
||||||
|
|
||||||
|
For example, given a retention config of:
|
||||||
|
|
||||||
|
{'keep_weekly': 4, 'keep_monthly': 6}
|
||||||
|
|
||||||
|
This will be returned as an iterable of:
|
||||||
|
|
||||||
|
(
|
||||||
|
('--keep-weekly', '4'),
|
||||||
|
('--keep-monthly', '6'),
|
||||||
|
)
|
||||||
|
'''
|
||||||
|
config = retention_config.copy()
|
||||||
|
prefix = config.pop('prefix', '{hostname}-')
|
||||||
|
|
||||||
|
if prefix:
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
|
||||||
|
config['match_archives'] = f'sh:{prefix}*'
|
||||||
|
else:
|
||||||
|
config['glob_archives'] = f'{prefix}*'
|
||||||
|
|
||||||
|
return (
|
||||||
|
('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def prune_archives(
|
||||||
|
dry_run,
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
retention_config,
|
||||||
|
local_borg_version,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
stats=False,
|
||||||
|
list_archives=False,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given dry-run flag, a local or remote repository path, a storage config dict, and a
|
||||||
|
retention config dict, prune Borg archives according to the retention policy specified in that
|
||||||
|
configuration.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
umask = storage_config.get('umask', None)
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '')
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'prune')
|
||||||
|
+ tuple(
|
||||||
|
element
|
||||||
|
for pair in make_prune_flags(retention_config, local_borg_version)
|
||||||
|
for element in pair
|
||||||
|
)
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ (('--umask', str(umask)) if umask else ())
|
||||||
|
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||||
|
+ (('--stats',) if stats and not dry_run else ())
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--list',) if list_archives else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ (('--dry-run',) if dry_run else ())
|
||||||
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
if stats or list_archives:
|
||||||
|
output_log_level = logging.ANSWER
|
||||||
|
else:
|
||||||
|
output_log_level = logging.INFO
|
||||||
|
|
||||||
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=output_log_level,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
81
borgmatic/borg/rcreate.py
Normal file
81
borgmatic/borg/rcreate.py
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from borgmatic.borg import environment, feature, flags, rinfo
|
||||||
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2
|
||||||
|
|
||||||
|
|
||||||
|
def create_repository(
|
||||||
|
dry_run,
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
encryption_mode,
|
||||||
|
source_repository=None,
|
||||||
|
copy_crypt_key=False,
|
||||||
|
append_only=None,
|
||||||
|
storage_quota=None,
|
||||||
|
make_parent_dirs=False,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a dry-run flag, a local or remote repository path, a storage configuration dict, the local
|
||||||
|
Borg version, a Borg encryption mode, the path to another repo whose key material should be
|
||||||
|
reused, whether the repository should be append-only, and the storage quota to use, create the
|
||||||
|
repository. If the repository already exists, then log and skip creation.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
rinfo.display_repository_info(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
argparse.Namespace(json=True),
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
logger.info(f'{repository}: Repository already exists. Skipping creation.')
|
||||||
|
return
|
||||||
|
except subprocess.CalledProcessError as error:
|
||||||
|
if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
|
||||||
|
raise
|
||||||
|
|
||||||
|
extra_borg_options = storage_config.get('extra_borg_options', {}).get('rcreate', '')
|
||||||
|
|
||||||
|
rcreate_command = (
|
||||||
|
(local_path,)
|
||||||
|
+ (
|
||||||
|
('rcreate',)
|
||||||
|
if feature.available(feature.Feature.RCREATE, local_borg_version)
|
||||||
|
else ('init',)
|
||||||
|
)
|
||||||
|
+ (('--encryption', encryption_mode) if encryption_mode else ())
|
||||||
|
+ (('--other-repo', source_repository) if source_repository else ())
|
||||||
|
+ (('--copy-crypt-key',) if copy_crypt_key else ())
|
||||||
|
+ (('--append-only',) if append_only else ())
|
||||||
|
+ (('--storage-quota', storage_quota) if storage_quota else ())
|
||||||
|
+ (('--make-parent-dirs',) if make_parent_dirs else ())
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ (('--remote-path', remote_path) if remote_path else ())
|
||||||
|
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
logging.info(f'{repository}: Skipping repository creation (dry run)')
|
||||||
|
return
|
||||||
|
|
||||||
|
# Do not capture output here, so as to support interactive prompts.
|
||||||
|
execute_command(
|
||||||
|
rcreate_command,
|
||||||
|
output_file=DO_NOT_CAPTURE,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
61
borgmatic/borg/rinfo.py
Normal file
61
borgmatic/borg/rinfo.py
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, feature, flags
|
||||||
|
from borgmatic.execute import execute_command, execute_command_and_capture_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def display_repository_info(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
rinfo_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the local Borg version, and the
|
||||||
|
arguments to the rinfo action, display summary information for the Borg repository or return
|
||||||
|
JSON summary information.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path,)
|
||||||
|
+ (
|
||||||
|
('rinfo',)
|
||||||
|
if feature.available(feature.Feature.RINFO, local_borg_version)
|
||||||
|
else ('info',)
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
('--info',)
|
||||||
|
if logger.getEffectiveLevel() == logging.INFO and not rinfo_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
('--debug', '--show-rc')
|
||||||
|
if logger.isEnabledFor(logging.DEBUG) and not rinfo_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
|
+ (('--json',) if rinfo_arguments.json else ())
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
extra_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
if rinfo_arguments.json:
|
||||||
|
return execute_command_and_capture_output(
|
||||||
|
full_command, extra_environment=extra_environment,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=extra_environment,
|
||||||
|
)
|
127
borgmatic/borg/rlist.py
Normal file
127
borgmatic/borg/rlist.py
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, feature, flags
|
||||||
|
from borgmatic.execute import execute_command, execute_command_and_capture_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_archive_name(
|
||||||
|
repository, archive, storage_config, local_borg_version, local_path='borg', remote_path=None
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, an archive name, a storage config dict, a local Borg
|
||||||
|
path, and a remote Borg path, simply return the archive name. But if the archive name is
|
||||||
|
"latest", then instead introspect the repository for the latest archive and return its name.
|
||||||
|
|
||||||
|
Raise ValueError if "latest" is given but there are no archives in the repository.
|
||||||
|
'''
|
||||||
|
if archive != 'latest':
|
||||||
|
return archive
|
||||||
|
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(
|
||||||
|
local_path,
|
||||||
|
'rlist' if feature.available(feature.Feature.RLIST, local_borg_version) else 'list',
|
||||||
|
)
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
|
+ flags.make_flags('last', 1)
|
||||||
|
+ ('--short',)
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
output = execute_command_and_capture_output(
|
||||||
|
full_command, extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
latest_archive = output.strip().splitlines()[-1]
|
||||||
|
except IndexError:
|
||||||
|
raise ValueError('No archives found in the repository')
|
||||||
|
|
||||||
|
logger.debug('{}: Latest archive is {}'.format(repository, latest_archive))
|
||||||
|
|
||||||
|
return latest_archive
|
||||||
|
|
||||||
|
|
||||||
|
MAKE_FLAGS_EXCLUDES = ('repository', 'prefix')
|
||||||
|
|
||||||
|
|
||||||
|
def make_rlist_command(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the local Borg version, the
|
||||||
|
arguments to the rlist action, and local and remote Borg paths, return a command as a tuple to
|
||||||
|
list archives with a repository.
|
||||||
|
'''
|
||||||
|
lock_wait = storage_config.get('lock_wait', None)
|
||||||
|
|
||||||
|
return (
|
||||||
|
(
|
||||||
|
local_path,
|
||||||
|
'rlist' if feature.available(feature.Feature.RLIST, local_borg_version) else 'list',
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
('--info',)
|
||||||
|
if logger.getEffectiveLevel() == logging.INFO and not rlist_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
('--debug', '--show-rc')
|
||||||
|
if logger.isEnabledFor(logging.DEBUG) and not rlist_arguments.json
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', lock_wait)
|
||||||
|
+ (
|
||||||
|
(
|
||||||
|
flags.make_flags('match-archives', f'sh:{rlist_arguments.prefix}*')
|
||||||
|
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version)
|
||||||
|
else flags.make_flags('glob-archives', f'{rlist_arguments.prefix}*')
|
||||||
|
)
|
||||||
|
if rlist_arguments.prefix
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ flags.make_flags_from_arguments(rlist_arguments, excludes=MAKE_FLAGS_EXCLUDES)
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def list_repository(
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
rlist_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a local or remote repository path, a storage config dict, the local Borg version, the
|
||||||
|
arguments to the list action, and local and remote Borg paths, display the output of listing
|
||||||
|
Borg archives in the given repository (or return JSON output).
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
borg_environment = environment.make_environment(storage_config)
|
||||||
|
|
||||||
|
main_command = make_rlist_command(
|
||||||
|
repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path
|
||||||
|
)
|
||||||
|
|
||||||
|
if rlist_arguments.json:
|
||||||
|
return execute_command_and_capture_output(main_command, extra_environment=borg_environment,)
|
||||||
|
else:
|
||||||
|
execute_command(
|
||||||
|
main_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=borg_environment,
|
||||||
|
)
|
1
borgmatic/borg/state.py
Normal file
1
borgmatic/borg/state.py
Normal file
|
@ -0,0 +1 @@
|
||||||
|
DEFAULT_BORGMATIC_SOURCE_DIRECTORY = '~/.borgmatic'
|
52
borgmatic/borg/transfer.py
Normal file
52
borgmatic/borg/transfer.py
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
from borgmatic.borg import environment, flags
|
||||||
|
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def transfer_archives(
|
||||||
|
dry_run,
|
||||||
|
repository,
|
||||||
|
storage_config,
|
||||||
|
local_borg_version,
|
||||||
|
transfer_arguments,
|
||||||
|
local_path='borg',
|
||||||
|
remote_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given a dry-run flag, a local or remote repository path, a storage config dict, the local Borg
|
||||||
|
version, and the arguments to the transfer action, transfer archives to the given repository.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'transfer')
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ flags.make_flags('remote-path', remote_path)
|
||||||
|
+ flags.make_flags('lock-wait', storage_config.get('lock_wait', None))
|
||||||
|
+ (('--progress',) if transfer_arguments.progress else ())
|
||||||
|
+ (
|
||||||
|
flags.make_flags(
|
||||||
|
'match-archives', transfer_arguments.match_archives or transfer_arguments.archive
|
||||||
|
)
|
||||||
|
)
|
||||||
|
+ flags.make_flags_from_arguments(
|
||||||
|
transfer_arguments,
|
||||||
|
excludes=('repository', 'source_repository', 'archive', 'match_archives'),
|
||||||
|
)
|
||||||
|
+ flags.make_repository_flags(repository, local_borg_version)
|
||||||
|
+ flags.make_flags('other-repo', transfer_arguments.source_repository)
|
||||||
|
+ flags.make_flags('dry-run', dry_run)
|
||||||
|
)
|
||||||
|
|
||||||
|
return execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.ANSWER,
|
||||||
|
output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None,
|
||||||
|
borg_local_path=local_path,
|
||||||
|
extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
20
borgmatic/borg/umount.py
Normal file
20
borgmatic/borg/umount.py
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from borgmatic.execute import execute_command
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def unmount_archive(mount_point, local_path='borg'):
|
||||||
|
'''
|
||||||
|
Given a mounted filesystem mount point, and an optional local Borg paths, umount the filesystem
|
||||||
|
from the mount point.
|
||||||
|
'''
|
||||||
|
full_command = (
|
||||||
|
(local_path, 'umount')
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
+ (mount_point,)
|
||||||
|
)
|
||||||
|
|
||||||
|
execute_command(full_command)
|
28
borgmatic/borg/version.py
Normal file
28
borgmatic/borg/version.py
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from borgmatic.borg import environment
|
||||||
|
from borgmatic.execute import execute_command_and_capture_output
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def local_borg_version(storage_config, local_path='borg'):
|
||||||
|
'''
|
||||||
|
Given a storage configuration dict and a local Borg binary path, return a version string for it.
|
||||||
|
|
||||||
|
Raise OSError or CalledProcessError if there is a problem running Borg.
|
||||||
|
Raise ValueError if the version cannot be parsed.
|
||||||
|
'''
|
||||||
|
full_command = (
|
||||||
|
(local_path, '--version')
|
||||||
|
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
|
||||||
|
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
|
||||||
|
)
|
||||||
|
output = execute_command_and_capture_output(
|
||||||
|
full_command, extra_environment=environment.make_environment(storage_config),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return output.split(' ')[1].strip()
|
||||||
|
except IndexError:
|
||||||
|
raise ValueError('Could not parse Borg version string')
|
876
borgmatic/commands/arguments.py
Normal file
876
borgmatic/commands/arguments.py
Normal file
|
@ -0,0 +1,876 @@
|
||||||
|
import collections
|
||||||
|
from argparse import Action, ArgumentParser
|
||||||
|
|
||||||
|
from borgmatic.config import collect
|
||||||
|
|
||||||
|
SUBPARSER_ALIASES = {
|
||||||
|
'rcreate': ['init', '-I'],
|
||||||
|
'prune': ['-p'],
|
||||||
|
'compact': [],
|
||||||
|
'create': ['-C'],
|
||||||
|
'check': ['-k'],
|
||||||
|
'extract': ['-x'],
|
||||||
|
'export-tar': [],
|
||||||
|
'mount': ['-m'],
|
||||||
|
'umount': ['-u'],
|
||||||
|
'restore': ['-r'],
|
||||||
|
'rlist': [],
|
||||||
|
'list': ['-l'],
|
||||||
|
'rinfo': [],
|
||||||
|
'info': ['-i'],
|
||||||
|
'transfer': [],
|
||||||
|
'break-lock': [],
|
||||||
|
'borg': [],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def parse_subparser_arguments(unparsed_arguments, subparsers):
|
||||||
|
'''
|
||||||
|
Given a sequence of arguments and a dict from subparser name to argparse.ArgumentParser
|
||||||
|
instance, give each requested action's subparser a shot at parsing all arguments. This allows
|
||||||
|
common arguments like "--repository" to be shared across multiple subparsers.
|
||||||
|
|
||||||
|
Return the result as a tuple of (a dict mapping from subparser name to a parsed namespace of
|
||||||
|
arguments, a list of remaining arguments not claimed by any subparser).
|
||||||
|
'''
|
||||||
|
arguments = collections.OrderedDict()
|
||||||
|
remaining_arguments = list(unparsed_arguments)
|
||||||
|
alias_to_subparser_name = {
|
||||||
|
alias: subparser_name
|
||||||
|
for subparser_name, aliases in SUBPARSER_ALIASES.items()
|
||||||
|
for alias in aliases
|
||||||
|
}
|
||||||
|
|
||||||
|
# If the "borg" action is used, skip all other subparsers. This avoids confusion like
|
||||||
|
# "borg list" triggering borgmatic's own list action.
|
||||||
|
if 'borg' in unparsed_arguments:
|
||||||
|
subparsers = {'borg': subparsers['borg']}
|
||||||
|
|
||||||
|
for argument in remaining_arguments:
|
||||||
|
canonical_name = alias_to_subparser_name.get(argument, argument)
|
||||||
|
subparser = subparsers.get(canonical_name)
|
||||||
|
|
||||||
|
if not subparser:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If a parsed value happens to be the same as the name of a subparser, remove it from the
|
||||||
|
# remaining arguments. This prevents, for instance, "check --only extract" from triggering
|
||||||
|
# the "extract" subparser.
|
||||||
|
parsed, unused_remaining = subparser.parse_known_args(unparsed_arguments)
|
||||||
|
for value in vars(parsed).values():
|
||||||
|
if isinstance(value, str):
|
||||||
|
if value in subparsers:
|
||||||
|
remaining_arguments.remove(value)
|
||||||
|
elif isinstance(value, list):
|
||||||
|
for item in value:
|
||||||
|
if item in subparsers:
|
||||||
|
remaining_arguments.remove(item)
|
||||||
|
|
||||||
|
arguments[canonical_name] = parsed
|
||||||
|
|
||||||
|
# If no actions are explicitly requested, assume defaults.
|
||||||
|
if not arguments and '--help' not in unparsed_arguments and '-h' not in unparsed_arguments:
|
||||||
|
for subparser_name in ('create', 'prune', 'compact', 'check'):
|
||||||
|
subparser = subparsers[subparser_name]
|
||||||
|
parsed, unused_remaining = subparser.parse_known_args(unparsed_arguments)
|
||||||
|
arguments[subparser_name] = parsed
|
||||||
|
|
||||||
|
remaining_arguments = list(unparsed_arguments)
|
||||||
|
|
||||||
|
# Now ask each subparser, one by one, to greedily consume arguments.
|
||||||
|
for subparser_name, subparser in subparsers.items():
|
||||||
|
if subparser_name not in arguments.keys():
|
||||||
|
continue
|
||||||
|
|
||||||
|
subparser = subparsers[subparser_name]
|
||||||
|
unused_parsed, remaining_arguments = subparser.parse_known_args(remaining_arguments)
|
||||||
|
|
||||||
|
# Special case: If "borg" is present in the arguments, consume all arguments after (+1) the
|
||||||
|
# "borg" action.
|
||||||
|
if 'borg' in arguments:
|
||||||
|
borg_options_index = remaining_arguments.index('borg') + 1
|
||||||
|
arguments['borg'].options = remaining_arguments[borg_options_index:]
|
||||||
|
remaining_arguments = remaining_arguments[:borg_options_index]
|
||||||
|
|
||||||
|
# Remove the subparser names themselves.
|
||||||
|
for subparser_name, subparser in subparsers.items():
|
||||||
|
if subparser_name in remaining_arguments:
|
||||||
|
remaining_arguments.remove(subparser_name)
|
||||||
|
|
||||||
|
return (arguments, remaining_arguments)
|
||||||
|
|
||||||
|
|
||||||
|
class Extend_action(Action):
|
||||||
|
'''
|
||||||
|
An argparse action to support Python 3.8's "extend" action in older versions of Python.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __call__(self, parser, namespace, values, option_string=None):
|
||||||
|
items = getattr(namespace, self.dest, None)
|
||||||
|
|
||||||
|
if items:
|
||||||
|
items.extend(values)
|
||||||
|
else:
|
||||||
|
setattr(namespace, self.dest, list(values))
|
||||||
|
|
||||||
|
|
||||||
|
def make_parsers():
|
||||||
|
'''
|
||||||
|
Build a top-level parser and its subparsers and return them as a tuple.
|
||||||
|
'''
|
||||||
|
config_paths = collect.get_default_config_paths(expand_home=True)
|
||||||
|
unexpanded_config_paths = collect.get_default_config_paths(expand_home=False)
|
||||||
|
|
||||||
|
global_parser = ArgumentParser(add_help=False)
|
||||||
|
global_parser.register('action', 'extend', Extend_action)
|
||||||
|
global_group = global_parser.add_argument_group('global arguments')
|
||||||
|
|
||||||
|
global_group.add_argument(
|
||||||
|
'-c',
|
||||||
|
'--config',
|
||||||
|
nargs='*',
|
||||||
|
dest='config_paths',
|
||||||
|
default=config_paths,
|
||||||
|
help='Configuration filenames or directories, defaults to: {}'.format(
|
||||||
|
' '.join(unexpanded_config_paths)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--excludes',
|
||||||
|
dest='excludes_filename',
|
||||||
|
help='Deprecated in favor of exclude_patterns within configuration',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'-n',
|
||||||
|
'--dry-run',
|
||||||
|
dest='dry_run',
|
||||||
|
action='store_true',
|
||||||
|
help='Go through the motions, but do not actually write to any repositories',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'-nc', '--no-color', dest='no_color', action='store_true', help='Disable colored output'
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'-v',
|
||||||
|
'--verbosity',
|
||||||
|
type=int,
|
||||||
|
choices=range(-1, 3),
|
||||||
|
default=0,
|
||||||
|
help='Display verbose progress to the console (from only errors to very verbose: -1, 0, 1, or 2)',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--syslog-verbosity',
|
||||||
|
type=int,
|
||||||
|
choices=range(-1, 3),
|
||||||
|
default=0,
|
||||||
|
help='Log verbose progress to syslog (from only errors to very verbose: -1, 0, 1, or 2). Ignored when console is interactive or --log-file is given',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--log-file-verbosity',
|
||||||
|
type=int,
|
||||||
|
choices=range(-1, 3),
|
||||||
|
default=0,
|
||||||
|
help='Log verbose progress to log file (from only errors to very verbose: -1, 0, 1, or 2). Only used when --log-file is given',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--monitoring-verbosity',
|
||||||
|
type=int,
|
||||||
|
choices=range(-1, 3),
|
||||||
|
default=0,
|
||||||
|
help='Log verbose progress to monitoring integrations that support logging (from only errors to very verbose: -1, 0, 1, or 2)',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--log-file',
|
||||||
|
type=str,
|
||||||
|
default=None,
|
||||||
|
help='Write log messages to this file instead of syslog',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--override',
|
||||||
|
metavar='SECTION.OPTION=VALUE',
|
||||||
|
nargs='+',
|
||||||
|
dest='overrides',
|
||||||
|
action='extend',
|
||||||
|
help='One or more configuration file options to override with specified values',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--no-environment-interpolation',
|
||||||
|
dest='resolve_env',
|
||||||
|
action='store_false',
|
||||||
|
help='Do not resolve environment variables in configuration file',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--bash-completion',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Show bash completion script and exit',
|
||||||
|
)
|
||||||
|
global_group.add_argument(
|
||||||
|
'--version',
|
||||||
|
dest='version',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Display installed version number of borgmatic and exit',
|
||||||
|
)
|
||||||
|
|
||||||
|
top_level_parser = ArgumentParser(
|
||||||
|
description='''
|
||||||
|
Simple, configuration-driven backup software for servers and workstations. If none of
|
||||||
|
the action options are given, then borgmatic defaults to: create, prune, compact, and
|
||||||
|
check.
|
||||||
|
''',
|
||||||
|
parents=[global_parser],
|
||||||
|
)
|
||||||
|
|
||||||
|
subparsers = top_level_parser.add_subparsers(
|
||||||
|
title='actions',
|
||||||
|
metavar='',
|
||||||
|
help='Specify zero or more actions. Defaults to creat, prune, compact, and check. Use --help with action for details:',
|
||||||
|
)
|
||||||
|
rcreate_parser = subparsers.add_parser(
|
||||||
|
'rcreate',
|
||||||
|
aliases=SUBPARSER_ALIASES['rcreate'],
|
||||||
|
help='Create a new, empty Borg repository',
|
||||||
|
description='Create a new, empty Borg repository',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
rcreate_group = rcreate_parser.add_argument_group('rcreate arguments')
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'-e',
|
||||||
|
'--encryption',
|
||||||
|
dest='encryption_mode',
|
||||||
|
help='Borg repository encryption mode',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'--source-repository',
|
||||||
|
'--other-repo',
|
||||||
|
metavar='KEY_REPOSITORY',
|
||||||
|
help='Path to an existing Borg repository whose key material should be reused (Borg 2.x+ only)',
|
||||||
|
)
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of the new repository to create (must be already specified in a borgmatic configuration file), defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'--copy-crypt-key',
|
||||||
|
action='store_true',
|
||||||
|
help='Copy the crypt key used for authenticated encryption from the source repository, defaults to a new random key (Borg 2.x+ only)',
|
||||||
|
)
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'--append-only', action='store_true', help='Create an append-only repository',
|
||||||
|
)
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'--storage-quota', help='Create a repository with a fixed storage quota',
|
||||||
|
)
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'--make-parent-dirs',
|
||||||
|
action='store_true',
|
||||||
|
help='Create any missing parent directories of the repository directory',
|
||||||
|
)
|
||||||
|
rcreate_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
|
||||||
|
transfer_parser = subparsers.add_parser(
|
||||||
|
'transfer',
|
||||||
|
aliases=SUBPARSER_ALIASES['transfer'],
|
||||||
|
help='Transfer archives from one repository to another, optionally upgrading the transferred data (Borg 2.0+ only)',
|
||||||
|
description='Transfer archives from one repository to another, optionally upgrading the transferred data (Borg 2.0+ only)',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
transfer_group = transfer_parser.add_argument_group('transfer arguments')
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of existing destination repository to transfer archives to, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--source-repository',
|
||||||
|
help='Path of existing source repository to transfer archives from',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--archive',
|
||||||
|
help='Name of single archive to transfer (or "latest"), defaults to transferring all archives',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--upgrader',
|
||||||
|
help='Upgrader type used to convert the transfered data, e.g. "From12To20" to upgrade data from Borg 1.2 to 2.0 format, defaults to no conversion',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--progress',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Display progress as each archive is transferred',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'-a',
|
||||||
|
'--match-archives',
|
||||||
|
'--glob-archives',
|
||||||
|
metavar='PATTERN',
|
||||||
|
help='Only transfer archives with names matching this pattern',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys'
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--first',
|
||||||
|
metavar='N',
|
||||||
|
help='Only transfer first N archives after other filters are applied',
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'--last', metavar='N', help='Only transfer last N archives after other filters are applied'
|
||||||
|
)
|
||||||
|
transfer_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
|
||||||
|
prune_parser = subparsers.add_parser(
|
||||||
|
'prune',
|
||||||
|
aliases=SUBPARSER_ALIASES['prune'],
|
||||||
|
help='Prune archives according to the retention policy (with Borg 1.2+, run compact afterwards to actually free space)',
|
||||||
|
description='Prune archives according to the retention policy (with Borg 1.2+, run compact afterwards to actually free space)',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
prune_group = prune_parser.add_argument_group('prune arguments')
|
||||||
|
prune_group.add_argument(
|
||||||
|
'--stats',
|
||||||
|
dest='stats',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Display statistics of archive',
|
||||||
|
)
|
||||||
|
prune_group.add_argument(
|
||||||
|
'--list', dest='list_archives', action='store_true', help='List archives kept/pruned'
|
||||||
|
)
|
||||||
|
prune_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
compact_parser = subparsers.add_parser(
|
||||||
|
'compact',
|
||||||
|
aliases=SUBPARSER_ALIASES['compact'],
|
||||||
|
help='Compact segments to free space (Borg 1.2+, borgmatic 1.5.23+ only)',
|
||||||
|
description='Compact segments to free space (Borg 1.2+, borgmatic 1.5.23+ only)',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
compact_group = compact_parser.add_argument_group('compact arguments')
|
||||||
|
compact_group.add_argument(
|
||||||
|
'--progress',
|
||||||
|
dest='progress',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Display progress as each segment is compacted',
|
||||||
|
)
|
||||||
|
compact_group.add_argument(
|
||||||
|
'--cleanup-commits',
|
||||||
|
dest='cleanup_commits',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Cleanup commit-only 17-byte segment files left behind by Borg 1.1 (flag in Borg 1.2 only)',
|
||||||
|
)
|
||||||
|
compact_group.add_argument(
|
||||||
|
'--threshold',
|
||||||
|
type=int,
|
||||||
|
dest='threshold',
|
||||||
|
help='Minimum saved space percentage threshold for compacting a segment, defaults to 10',
|
||||||
|
)
|
||||||
|
compact_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
|
||||||
|
create_parser = subparsers.add_parser(
|
||||||
|
'create',
|
||||||
|
aliases=SUBPARSER_ALIASES['create'],
|
||||||
|
help='Create an archive (actually perform a backup)',
|
||||||
|
description='Create an archive (actually perform a backup)',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
create_group = create_parser.add_argument_group('create arguments')
|
||||||
|
create_group.add_argument(
|
||||||
|
'--progress',
|
||||||
|
dest='progress',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Display progress for each file as it is backed up',
|
||||||
|
)
|
||||||
|
create_group.add_argument(
|
||||||
|
'--stats',
|
||||||
|
dest='stats',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Display statistics of archive',
|
||||||
|
)
|
||||||
|
create_group.add_argument(
|
||||||
|
'--list', '--files', dest='list_files', action='store_true', help='Show per-file details'
|
||||||
|
)
|
||||||
|
create_group.add_argument(
|
||||||
|
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
||||||
|
)
|
||||||
|
create_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
check_parser = subparsers.add_parser(
|
||||||
|
'check',
|
||||||
|
aliases=SUBPARSER_ALIASES['check'],
|
||||||
|
help='Check archives for consistency',
|
||||||
|
description='Check archives for consistency',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
check_group = check_parser.add_argument_group('check arguments')
|
||||||
|
check_group.add_argument(
|
||||||
|
'--progress',
|
||||||
|
dest='progress',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Display progress for each file as it is checked',
|
||||||
|
)
|
||||||
|
check_group.add_argument(
|
||||||
|
'--repair',
|
||||||
|
dest='repair',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Attempt to repair any inconsistencies found (for interactive use)',
|
||||||
|
)
|
||||||
|
check_group.add_argument(
|
||||||
|
'--only',
|
||||||
|
metavar='CHECK',
|
||||||
|
choices=('repository', 'archives', 'data', 'extract'),
|
||||||
|
dest='only',
|
||||||
|
action='append',
|
||||||
|
help='Run a particular consistency check (repository, archives, data, or extract) instead of configured checks (subject to configured frequency, can specify flag multiple times)',
|
||||||
|
)
|
||||||
|
check_group.add_argument(
|
||||||
|
'--force',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Ignore configured check frequencies and run checks unconditionally',
|
||||||
|
)
|
||||||
|
check_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
extract_parser = subparsers.add_parser(
|
||||||
|
'extract',
|
||||||
|
aliases=SUBPARSER_ALIASES['extract'],
|
||||||
|
help='Extract files from a named archive to the current directory',
|
||||||
|
description='Extract a named archive to the current directory',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
extract_group = extract_parser.add_argument_group('extract arguments')
|
||||||
|
extract_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository to extract, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
extract_group.add_argument(
|
||||||
|
'--archive', help='Name of archive to extract (or "latest")', required=True
|
||||||
|
)
|
||||||
|
extract_group.add_argument(
|
||||||
|
'--path',
|
||||||
|
'--restore-path',
|
||||||
|
metavar='PATH',
|
||||||
|
nargs='+',
|
||||||
|
dest='paths',
|
||||||
|
help='Paths to extract from archive, defaults to the entire archive',
|
||||||
|
)
|
||||||
|
extract_group.add_argument(
|
||||||
|
'--destination',
|
||||||
|
metavar='PATH',
|
||||||
|
dest='destination',
|
||||||
|
help='Directory to extract files into, defaults to the current directory',
|
||||||
|
)
|
||||||
|
extract_group.add_argument(
|
||||||
|
'--strip-components',
|
||||||
|
type=lambda number: number if number == 'all' else int(number),
|
||||||
|
metavar='NUMBER',
|
||||||
|
help='Number of leading path components to remove from each extracted path or "all" to strip all leading path components. Skip paths with fewer elements',
|
||||||
|
)
|
||||||
|
extract_group.add_argument(
|
||||||
|
'--progress',
|
||||||
|
dest='progress',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Display progress for each file as it is extracted',
|
||||||
|
)
|
||||||
|
extract_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
|
||||||
|
export_tar_parser = subparsers.add_parser(
|
||||||
|
'export-tar',
|
||||||
|
aliases=SUBPARSER_ALIASES['export-tar'],
|
||||||
|
help='Export an archive to a tar-formatted file or stream',
|
||||||
|
description='Export an archive to a tar-formatted file or stream',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
export_tar_group = export_tar_parser.add_argument_group('export-tar arguments')
|
||||||
|
export_tar_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository to export from, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
export_tar_group.add_argument(
|
||||||
|
'--archive', help='Name of archive to export (or "latest")', required=True
|
||||||
|
)
|
||||||
|
export_tar_group.add_argument(
|
||||||
|
'--path',
|
||||||
|
metavar='PATH',
|
||||||
|
nargs='+',
|
||||||
|
dest='paths',
|
||||||
|
help='Paths to export from archive, defaults to the entire archive',
|
||||||
|
)
|
||||||
|
export_tar_group.add_argument(
|
||||||
|
'--destination',
|
||||||
|
metavar='PATH',
|
||||||
|
dest='destination',
|
||||||
|
help='Path to destination export tar file, or "-" for stdout (but be careful about dirtying output with --verbosity or --list)',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
export_tar_group.add_argument(
|
||||||
|
'--tar-filter', help='Name of filter program to pipe data through'
|
||||||
|
)
|
||||||
|
export_tar_group.add_argument(
|
||||||
|
'--list', '--files', dest='list_files', action='store_true', help='Show per-file details'
|
||||||
|
)
|
||||||
|
export_tar_group.add_argument(
|
||||||
|
'--strip-components',
|
||||||
|
type=int,
|
||||||
|
metavar='NUMBER',
|
||||||
|
dest='strip_components',
|
||||||
|
help='Number of leading path components to remove from each exported path. Skip paths with fewer elements',
|
||||||
|
)
|
||||||
|
export_tar_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
|
||||||
|
mount_parser = subparsers.add_parser(
|
||||||
|
'mount',
|
||||||
|
aliases=SUBPARSER_ALIASES['mount'],
|
||||||
|
help='Mount files from a named archive as a FUSE filesystem',
|
||||||
|
description='Mount a named archive as a FUSE filesystem',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
mount_group = mount_parser.add_argument_group('mount arguments')
|
||||||
|
mount_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository to use, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
mount_group.add_argument('--archive', help='Name of archive to mount (or "latest")')
|
||||||
|
mount_group.add_argument(
|
||||||
|
'--mount-point',
|
||||||
|
metavar='PATH',
|
||||||
|
dest='mount_point',
|
||||||
|
help='Path where filesystem is to be mounted',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
mount_group.add_argument(
|
||||||
|
'--path',
|
||||||
|
metavar='PATH',
|
||||||
|
nargs='+',
|
||||||
|
dest='paths',
|
||||||
|
help='Paths to mount from archive, defaults to the entire archive',
|
||||||
|
)
|
||||||
|
mount_group.add_argument(
|
||||||
|
'--foreground',
|
||||||
|
dest='foreground',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Stay in foreground until ctrl-C is pressed',
|
||||||
|
)
|
||||||
|
mount_group.add_argument('--options', dest='options', help='Extra Borg mount options')
|
||||||
|
mount_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
umount_parser = subparsers.add_parser(
|
||||||
|
'umount',
|
||||||
|
aliases=SUBPARSER_ALIASES['umount'],
|
||||||
|
help='Unmount a FUSE filesystem that was mounted with "borgmatic mount"',
|
||||||
|
description='Unmount a mounted FUSE filesystem',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
umount_group = umount_parser.add_argument_group('umount arguments')
|
||||||
|
umount_group.add_argument(
|
||||||
|
'--mount-point',
|
||||||
|
metavar='PATH',
|
||||||
|
dest='mount_point',
|
||||||
|
help='Path of filesystem to unmount',
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
umount_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
restore_parser = subparsers.add_parser(
|
||||||
|
'restore',
|
||||||
|
aliases=SUBPARSER_ALIASES['restore'],
|
||||||
|
help='Restore database dumps from a named archive',
|
||||||
|
description='Restore database dumps from a named archive. (To extract files instead, use "borgmatic extract".)',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
restore_group = restore_parser.add_argument_group('restore arguments')
|
||||||
|
restore_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository to restore from, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
restore_group.add_argument(
|
||||||
|
'--archive', help='Name of archive to restore from (or "latest")', required=True
|
||||||
|
)
|
||||||
|
restore_group.add_argument(
|
||||||
|
'--database',
|
||||||
|
metavar='NAME',
|
||||||
|
nargs='+',
|
||||||
|
dest='databases',
|
||||||
|
help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration",
|
||||||
|
)
|
||||||
|
restore_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
|
||||||
|
rlist_parser = subparsers.add_parser(
|
||||||
|
'rlist',
|
||||||
|
aliases=SUBPARSER_ALIASES['rlist'],
|
||||||
|
help='List repository',
|
||||||
|
description='List the archives in a repository',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
rlist_group = rlist_parser.add_argument_group('rlist arguments')
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--repository', help='Path of repository to list, defaults to the configured repositories',
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--short', default=False, action='store_true', help='Output only archive names'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument('--format', help='Format for archive listing')
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--json', default=False, action='store_true', help='Output results as JSON'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'-P', '--prefix', help='Only list archive names starting with this prefix'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'-a',
|
||||||
|
'--match-archives',
|
||||||
|
'--glob-archives',
|
||||||
|
metavar='PATTERN',
|
||||||
|
help='Only list archive names matching this pattern',
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--first', metavar='N', help='List first N archives after other filters are applied'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument(
|
||||||
|
'--last', metavar='N', help='List last N archives after other filters are applied'
|
||||||
|
)
|
||||||
|
rlist_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
list_parser = subparsers.add_parser(
|
||||||
|
'list',
|
||||||
|
aliases=SUBPARSER_ALIASES['list'],
|
||||||
|
help='List archive',
|
||||||
|
description='List the files in an archive or search for a file across archives',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
list_group = list_parser.add_argument_group('list arguments')
|
||||||
|
list_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository containing archive to list, defaults to the configured repositories',
|
||||||
|
)
|
||||||
|
list_group.add_argument('--archive', help='Name of the archive to list (or "latest")')
|
||||||
|
list_group.add_argument(
|
||||||
|
'--path',
|
||||||
|
metavar='PATH',
|
||||||
|
nargs='+',
|
||||||
|
dest='paths',
|
||||||
|
help='Paths or patterns to list from a single selected archive (via "--archive"), defaults to listing the entire archive',
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'--find',
|
||||||
|
metavar='PATH',
|
||||||
|
nargs='+',
|
||||||
|
dest='find_paths',
|
||||||
|
help='Partial paths or patterns to search for and list across multiple archives',
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'--short', default=False, action='store_true', help='Output only path names'
|
||||||
|
)
|
||||||
|
list_group.add_argument('--format', help='Format for file listing')
|
||||||
|
list_group.add_argument(
|
||||||
|
'--json', default=False, action='store_true', help='Output results as JSON'
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'-P', '--prefix', help='Only list archive names starting with this prefix'
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'-a',
|
||||||
|
'--match-archives',
|
||||||
|
'--glob-archives',
|
||||||
|
metavar='PATTERN',
|
||||||
|
help='Only list archive names matching this pattern',
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'--successful',
|
||||||
|
default=True,
|
||||||
|
action='store_true',
|
||||||
|
help='Deprecated; no effect. Newer versions of Borg shows successful (non-checkpoint) archives by default.',
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys'
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'--first', metavar='N', help='List first N archives after other filters are applied'
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'--last', metavar='N', help='List last N archives after other filters are applied'
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'-e', '--exclude', metavar='PATTERN', help='Exclude paths matching the pattern'
|
||||||
|
)
|
||||||
|
list_group.add_argument(
|
||||||
|
'--exclude-from', metavar='FILENAME', help='Exclude paths from exclude file, one per line'
|
||||||
|
)
|
||||||
|
list_group.add_argument('--pattern', help='Include or exclude paths matching a pattern')
|
||||||
|
list_group.add_argument(
|
||||||
|
'--patterns-from',
|
||||||
|
metavar='FILENAME',
|
||||||
|
help='Include or exclude paths matching patterns from pattern file, one per line',
|
||||||
|
)
|
||||||
|
list_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
rinfo_parser = subparsers.add_parser(
|
||||||
|
'rinfo',
|
||||||
|
aliases=SUBPARSER_ALIASES['rinfo'],
|
||||||
|
help='Show repository summary information such as disk space used',
|
||||||
|
description='Show repository summary information such as disk space used',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
rinfo_group = rinfo_parser.add_argument_group('rinfo arguments')
|
||||||
|
rinfo_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository to show info for, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
rinfo_group.add_argument(
|
||||||
|
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
||||||
|
)
|
||||||
|
rinfo_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
info_parser = subparsers.add_parser(
|
||||||
|
'info',
|
||||||
|
aliases=SUBPARSER_ALIASES['info'],
|
||||||
|
help='Show archive summary information such as disk space used',
|
||||||
|
description='Show archive summary information such as disk space used',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
info_group = info_parser.add_argument_group('info arguments')
|
||||||
|
info_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository containing archive to show info for, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
info_group.add_argument('--archive', help='Name of archive to show info for (or "latest")')
|
||||||
|
info_group.add_argument(
|
||||||
|
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
||||||
|
)
|
||||||
|
info_group.add_argument(
|
||||||
|
'-P', '--prefix', help='Only show info for archive names starting with this prefix'
|
||||||
|
)
|
||||||
|
info_group.add_argument(
|
||||||
|
'-a',
|
||||||
|
'--match-archives',
|
||||||
|
'--glob-archives',
|
||||||
|
metavar='PATTERN',
|
||||||
|
help='Only show info for archive names matching this pattern',
|
||||||
|
)
|
||||||
|
info_group.add_argument(
|
||||||
|
'--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys'
|
||||||
|
)
|
||||||
|
info_group.add_argument(
|
||||||
|
'--first',
|
||||||
|
metavar='N',
|
||||||
|
help='Show info for first N archives after other filters are applied',
|
||||||
|
)
|
||||||
|
info_group.add_argument(
|
||||||
|
'--last', metavar='N', help='Show info for last N archives after other filters are applied'
|
||||||
|
)
|
||||||
|
info_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
break_lock_parser = subparsers.add_parser(
|
||||||
|
'break-lock',
|
||||||
|
aliases=SUBPARSER_ALIASES['break-lock'],
|
||||||
|
help='Break the repository and cache locks left behind by Borg aborting',
|
||||||
|
description='Break Borg repository and cache locks left behind by Borg aborting',
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
break_lock_group = break_lock_parser.add_argument_group('break-lock arguments')
|
||||||
|
break_lock_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository to break the lock for, defaults to the configured repository if there is only one',
|
||||||
|
)
|
||||||
|
break_lock_group.add_argument(
|
||||||
|
'-h', '--help', action='help', help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
|
||||||
|
borg_parser = subparsers.add_parser(
|
||||||
|
'borg',
|
||||||
|
aliases=SUBPARSER_ALIASES['borg'],
|
||||||
|
help='Run an arbitrary Borg command',
|
||||||
|
description="Run an arbitrary Borg command based on borgmatic's configuration",
|
||||||
|
add_help=False,
|
||||||
|
)
|
||||||
|
borg_group = borg_parser.add_argument_group('borg arguments')
|
||||||
|
borg_group.add_argument(
|
||||||
|
'--repository',
|
||||||
|
help='Path of repository to pass to Borg, defaults to the configured repositories',
|
||||||
|
)
|
||||||
|
borg_group.add_argument('--archive', help='Name of archive to pass to Borg (or "latest")')
|
||||||
|
borg_group.add_argument(
|
||||||
|
'--',
|
||||||
|
metavar='OPTION',
|
||||||
|
dest='options',
|
||||||
|
nargs='+',
|
||||||
|
help='Options to pass to Borg, command first ("create", "list", etc). "--" is optional. To specify the repository or the archive, you must use --repository or --archive instead of providing them here.',
|
||||||
|
)
|
||||||
|
borg_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
|
|
||||||
|
return top_level_parser, subparsers
|
||||||
|
|
||||||
|
|
||||||
|
def parse_arguments(*unparsed_arguments):
|
||||||
|
'''
|
||||||
|
Given command-line arguments with which this script was invoked, parse the arguments and return
|
||||||
|
them as a dict mapping from subparser name (or "global") to an argparse.Namespace instance.
|
||||||
|
'''
|
||||||
|
top_level_parser, subparsers = make_parsers()
|
||||||
|
|
||||||
|
arguments, remaining_arguments = parse_subparser_arguments(
|
||||||
|
unparsed_arguments, subparsers.choices
|
||||||
|
)
|
||||||
|
arguments['global'] = top_level_parser.parse_args(remaining_arguments)
|
||||||
|
|
||||||
|
if arguments['global'].excludes_filename:
|
||||||
|
raise ValueError(
|
||||||
|
'The --excludes flag has been replaced with exclude_patterns in configuration.'
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'create' in arguments and arguments['create'].list_files and arguments['create'].progress:
|
||||||
|
raise ValueError(
|
||||||
|
'With the create action, only one of --list (--files) and --progress flags can be used.'
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
('list' in arguments and 'rinfo' in arguments and arguments['list'].json)
|
||||||
|
or ('list' in arguments and 'info' in arguments and arguments['list'].json)
|
||||||
|
or ('rinfo' in arguments and 'info' in arguments and arguments['rinfo'].json)
|
||||||
|
):
|
||||||
|
raise ValueError('With the --json flag, multiple actions cannot be used together.')
|
||||||
|
|
||||||
|
if (
|
||||||
|
'transfer' in arguments
|
||||||
|
and arguments['transfer'].archive
|
||||||
|
and arguments['transfer'].match_archives
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
'With the transfer action, only one of --archive and --glob-archives flags can be used.'
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'info' in arguments and (
|
||||||
|
(arguments['info'].archive and arguments['info'].prefix)
|
||||||
|
or (arguments['info'].archive and arguments['info'].match_archives)
|
||||||
|
or (arguments['info'].prefix and arguments['info'].match_archives)
|
||||||
|
):
|
||||||
|
raise ValueError(
|
||||||
|
'With the info action, only one of --archive, --prefix, or --match-archives flags can be used.'
|
||||||
|
)
|
||||||
|
|
||||||
|
return arguments
|
735
borgmatic/commands/borgmatic.py
Normal file
735
borgmatic/commands/borgmatic.py
Normal file
|
@ -0,0 +1,735 @@
|
||||||
|
import collections
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from queue import Queue
|
||||||
|
from subprocess import CalledProcessError
|
||||||
|
|
||||||
|
import colorama
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
import borgmatic.actions.borg
|
||||||
|
import borgmatic.actions.break_lock
|
||||||
|
import borgmatic.actions.check
|
||||||
|
import borgmatic.actions.compact
|
||||||
|
import borgmatic.actions.create
|
||||||
|
import borgmatic.actions.export_tar
|
||||||
|
import borgmatic.actions.extract
|
||||||
|
import borgmatic.actions.info
|
||||||
|
import borgmatic.actions.list
|
||||||
|
import borgmatic.actions.mount
|
||||||
|
import borgmatic.actions.prune
|
||||||
|
import borgmatic.actions.rcreate
|
||||||
|
import borgmatic.actions.restore
|
||||||
|
import borgmatic.actions.rinfo
|
||||||
|
import borgmatic.actions.rlist
|
||||||
|
import borgmatic.actions.transfer
|
||||||
|
import borgmatic.commands.completion
|
||||||
|
from borgmatic.borg import umount as borg_umount
|
||||||
|
from borgmatic.borg import version as borg_version
|
||||||
|
from borgmatic.commands.arguments import parse_arguments
|
||||||
|
from borgmatic.config import checks, collect, convert, validate
|
||||||
|
from borgmatic.hooks import command, dispatch, monitor
|
||||||
|
from borgmatic.logger import add_custom_log_levels, configure_logging, should_do_markup
|
||||||
|
from borgmatic.signals import configure_signals
|
||||||
|
from borgmatic.verbosity import verbosity_to_log_level
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
LEGACY_CONFIG_PATH = '/etc/borgmatic/config'
|
||||||
|
|
||||||
|
|
||||||
|
def run_configuration(config_filename, config, arguments):
|
||||||
|
'''
|
||||||
|
Given a config filename, the corresponding parsed config dict, and command-line arguments as a
|
||||||
|
dict from subparser name to a namespace of parsed arguments, execute the defined create, prune,
|
||||||
|
compact, check, and/or other actions.
|
||||||
|
|
||||||
|
Yield a combination of:
|
||||||
|
|
||||||
|
* JSON output strings from successfully executing any actions that produce JSON
|
||||||
|
* logging.LogRecord instances containing errors from any actions or backup hooks that fail
|
||||||
|
'''
|
||||||
|
(location, storage, retention, consistency, hooks) = (
|
||||||
|
config.get(section_name, {})
|
||||||
|
for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')
|
||||||
|
)
|
||||||
|
global_arguments = arguments['global']
|
||||||
|
|
||||||
|
local_path = location.get('local_path', 'borg')
|
||||||
|
remote_path = location.get('remote_path')
|
||||||
|
retries = storage.get('retries', 0)
|
||||||
|
retry_wait = storage.get('retry_wait', 0)
|
||||||
|
encountered_error = None
|
||||||
|
error_repository = ''
|
||||||
|
using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments)
|
||||||
|
monitoring_log_level = verbosity_to_log_level(global_arguments.monitoring_verbosity)
|
||||||
|
|
||||||
|
try:
|
||||||
|
local_borg_version = borg_version.local_borg_version(storage, local_path)
|
||||||
|
except (OSError, CalledProcessError, ValueError) as error:
|
||||||
|
yield from log_error_records(
|
||||||
|
'{}: Error getting local Borg version'.format(config_filename), error
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
if using_primary_action:
|
||||||
|
dispatch.call_hooks(
|
||||||
|
'initialize_monitor',
|
||||||
|
hooks,
|
||||||
|
config_filename,
|
||||||
|
monitor.MONITOR_HOOK_NAMES,
|
||||||
|
monitoring_log_level,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
if using_primary_action:
|
||||||
|
dispatch.call_hooks(
|
||||||
|
'ping_monitor',
|
||||||
|
hooks,
|
||||||
|
config_filename,
|
||||||
|
monitor.MONITOR_HOOK_NAMES,
|
||||||
|
monitor.State.START,
|
||||||
|
monitoring_log_level,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
except (OSError, CalledProcessError) as error:
|
||||||
|
if command.considered_soft_failure(config_filename, error):
|
||||||
|
return
|
||||||
|
|
||||||
|
encountered_error = error
|
||||||
|
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
||||||
|
|
||||||
|
if not encountered_error:
|
||||||
|
repo_queue = Queue()
|
||||||
|
for repo in location['repositories']:
|
||||||
|
repo_queue.put((repo, 0),)
|
||||||
|
|
||||||
|
while not repo_queue.empty():
|
||||||
|
repository_path, retry_num = repo_queue.get()
|
||||||
|
timeout = retry_num * retry_wait
|
||||||
|
if timeout:
|
||||||
|
logger.warning(f'{config_filename}: Sleeping {timeout}s before next retry')
|
||||||
|
time.sleep(timeout)
|
||||||
|
try:
|
||||||
|
yield from run_actions(
|
||||||
|
arguments=arguments,
|
||||||
|
config_filename=config_filename,
|
||||||
|
location=location,
|
||||||
|
storage=storage,
|
||||||
|
retention=retention,
|
||||||
|
consistency=consistency,
|
||||||
|
hooks=hooks,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
local_borg_version=local_borg_version,
|
||||||
|
repository_path=repository_path,
|
||||||
|
)
|
||||||
|
except (OSError, CalledProcessError, ValueError) as error:
|
||||||
|
if retry_num < retries:
|
||||||
|
repo_queue.put((repository_path, retry_num + 1),)
|
||||||
|
tuple( # Consume the generator so as to trigger logging.
|
||||||
|
log_error_records(
|
||||||
|
'{}: Error running actions for repository'.format(repository_path),
|
||||||
|
error,
|
||||||
|
levelno=logging.WARNING,
|
||||||
|
log_command_error_output=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
f'{config_filename}: Retrying... attempt {retry_num + 1}/{retries}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if command.considered_soft_failure(config_filename, error):
|
||||||
|
return
|
||||||
|
|
||||||
|
yield from log_error_records(
|
||||||
|
'{}: Error running actions for repository'.format(repository_path), error
|
||||||
|
)
|
||||||
|
encountered_error = error
|
||||||
|
error_repository = repository_path
|
||||||
|
|
||||||
|
try:
|
||||||
|
if using_primary_action:
|
||||||
|
# send logs irrespective of error
|
||||||
|
dispatch.call_hooks(
|
||||||
|
'ping_monitor',
|
||||||
|
hooks,
|
||||||
|
config_filename,
|
||||||
|
monitor.MONITOR_HOOK_NAMES,
|
||||||
|
monitor.State.LOG,
|
||||||
|
monitoring_log_level,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
except (OSError, CalledProcessError) as error:
|
||||||
|
if command.considered_soft_failure(config_filename, error):
|
||||||
|
return
|
||||||
|
|
||||||
|
encountered_error = error
|
||||||
|
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
||||||
|
|
||||||
|
if not encountered_error:
|
||||||
|
try:
|
||||||
|
if using_primary_action:
|
||||||
|
dispatch.call_hooks(
|
||||||
|
'ping_monitor',
|
||||||
|
hooks,
|
||||||
|
config_filename,
|
||||||
|
monitor.MONITOR_HOOK_NAMES,
|
||||||
|
monitor.State.FINISH,
|
||||||
|
monitoring_log_level,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
dispatch.call_hooks(
|
||||||
|
'destroy_monitor',
|
||||||
|
hooks,
|
||||||
|
config_filename,
|
||||||
|
monitor.MONITOR_HOOK_NAMES,
|
||||||
|
monitoring_log_level,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
except (OSError, CalledProcessError) as error:
|
||||||
|
if command.considered_soft_failure(config_filename, error):
|
||||||
|
return
|
||||||
|
|
||||||
|
encountered_error = error
|
||||||
|
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
||||||
|
|
||||||
|
if encountered_error and using_primary_action:
|
||||||
|
try:
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('on_error'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'on-error',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
repository=error_repository,
|
||||||
|
error=encountered_error,
|
||||||
|
output=getattr(encountered_error, 'output', ''),
|
||||||
|
)
|
||||||
|
dispatch.call_hooks(
|
||||||
|
'ping_monitor',
|
||||||
|
hooks,
|
||||||
|
config_filename,
|
||||||
|
monitor.MONITOR_HOOK_NAMES,
|
||||||
|
monitor.State.FAIL,
|
||||||
|
monitoring_log_level,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
dispatch.call_hooks(
|
||||||
|
'destroy_monitor',
|
||||||
|
hooks,
|
||||||
|
config_filename,
|
||||||
|
monitor.MONITOR_HOOK_NAMES,
|
||||||
|
monitoring_log_level,
|
||||||
|
global_arguments.dry_run,
|
||||||
|
)
|
||||||
|
except (OSError, CalledProcessError) as error:
|
||||||
|
if command.considered_soft_failure(config_filename, error):
|
||||||
|
return
|
||||||
|
|
||||||
|
yield from log_error_records(
|
||||||
|
'{}: Error running on-error hook'.format(config_filename), error
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def run_actions(
|
||||||
|
*,
|
||||||
|
arguments,
|
||||||
|
config_filename,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
retention,
|
||||||
|
consistency,
|
||||||
|
hooks,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
local_borg_version,
|
||||||
|
repository_path,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given parsed command-line arguments as an argparse.ArgumentParser instance, the configuration
|
||||||
|
filename, several different configuration dicts, local and remote paths to Borg, a local Borg
|
||||||
|
version string, and a repository name, run all actions from the command-line arguments on the
|
||||||
|
given repository.
|
||||||
|
|
||||||
|
Yield JSON output strings from executing any actions that produce JSON.
|
||||||
|
|
||||||
|
Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an
|
||||||
|
action or a hook. Raise ValueError if the arguments or configuration passed to action are
|
||||||
|
invalid.
|
||||||
|
'''
|
||||||
|
add_custom_log_levels()
|
||||||
|
repository = os.path.expanduser(repository_path)
|
||||||
|
global_arguments = arguments['global']
|
||||||
|
dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else ''
|
||||||
|
hook_context = {
|
||||||
|
'repository': repository_path,
|
||||||
|
# Deprecated: For backwards compatibility with borgmatic < 1.6.0.
|
||||||
|
'repositories': ','.join(location['repositories']),
|
||||||
|
}
|
||||||
|
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('before_actions'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-actions',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
|
|
||||||
|
for (action_name, action_arguments) in arguments.items():
|
||||||
|
if action_name == 'rcreate':
|
||||||
|
borgmatic.actions.rcreate.run_rcreate(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'transfer':
|
||||||
|
borgmatic.actions.transfer.run_transfer(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'create':
|
||||||
|
yield from borgmatic.actions.create.run_create(
|
||||||
|
config_filename,
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
hooks,
|
||||||
|
hook_context,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
global_arguments,
|
||||||
|
dry_run_label,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'prune':
|
||||||
|
borgmatic.actions.prune.run_prune(
|
||||||
|
config_filename,
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
retention,
|
||||||
|
hooks,
|
||||||
|
hook_context,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
global_arguments,
|
||||||
|
dry_run_label,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'compact':
|
||||||
|
borgmatic.actions.compact.run_compact(
|
||||||
|
config_filename,
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
retention,
|
||||||
|
hooks,
|
||||||
|
hook_context,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
global_arguments,
|
||||||
|
dry_run_label,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'check':
|
||||||
|
if checks.repository_enabled_for_checks(repository, consistency):
|
||||||
|
borgmatic.actions.check.run_check(
|
||||||
|
config_filename,
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
consistency,
|
||||||
|
hooks,
|
||||||
|
hook_context,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'extract':
|
||||||
|
borgmatic.actions.extract.run_extract(
|
||||||
|
config_filename,
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
hooks,
|
||||||
|
hook_context,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'export-tar':
|
||||||
|
borgmatic.actions.export_tar.run_export_tar(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'mount':
|
||||||
|
borgmatic.actions.mount.run_mount(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
arguments['mount'],
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'restore':
|
||||||
|
borgmatic.actions.restore.run_restore(
|
||||||
|
repository,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
hooks,
|
||||||
|
local_borg_version,
|
||||||
|
action_arguments,
|
||||||
|
global_arguments,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'rlist':
|
||||||
|
yield from borgmatic.actions.rlist.run_rlist(
|
||||||
|
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'list':
|
||||||
|
yield from borgmatic.actions.list.run_list(
|
||||||
|
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'rinfo':
|
||||||
|
yield from borgmatic.actions.rinfo.run_rinfo(
|
||||||
|
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'info':
|
||||||
|
yield from borgmatic.actions.info.run_info(
|
||||||
|
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'break-lock':
|
||||||
|
borgmatic.actions.break_lock.run_break_lock(
|
||||||
|
repository,
|
||||||
|
storage,
|
||||||
|
local_borg_version,
|
||||||
|
arguments['break-lock'],
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
)
|
||||||
|
elif action_name == 'borg':
|
||||||
|
borgmatic.actions.borg.run_borg(
|
||||||
|
repository, storage, local_borg_version, action_arguments, local_path, remote_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('after_actions'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-actions',
|
||||||
|
global_arguments.dry_run,
|
||||||
|
**hook_context,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_configurations(config_filenames, overrides=None, resolve_env=True):
|
||||||
|
'''
|
||||||
|
Given a sequence of configuration filenames, load and validate each configuration file. Return
|
||||||
|
the results as a tuple of: dict of configuration filename to corresponding parsed configuration,
|
||||||
|
and sequence of logging.LogRecord instances containing any parse errors.
|
||||||
|
'''
|
||||||
|
# Dict mapping from config filename to corresponding parsed config dict.
|
||||||
|
configs = collections.OrderedDict()
|
||||||
|
logs = []
|
||||||
|
|
||||||
|
# Parse and load each configuration file.
|
||||||
|
for config_filename in config_filenames:
|
||||||
|
try:
|
||||||
|
configs[config_filename], parse_logs = validate.parse_configuration(
|
||||||
|
config_filename, validate.schema_filename(), overrides, resolve_env
|
||||||
|
)
|
||||||
|
logs.extend(parse_logs)
|
||||||
|
except PermissionError:
|
||||||
|
logs.extend(
|
||||||
|
[
|
||||||
|
logging.makeLogRecord(
|
||||||
|
dict(
|
||||||
|
levelno=logging.WARNING,
|
||||||
|
levelname='WARNING',
|
||||||
|
msg='{}: Insufficient permissions to read configuration file'.format(
|
||||||
|
config_filename
|
||||||
|
),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
except (ValueError, OSError, validate.Validation_error) as error:
|
||||||
|
logs.extend(
|
||||||
|
[
|
||||||
|
logging.makeLogRecord(
|
||||||
|
dict(
|
||||||
|
levelno=logging.CRITICAL,
|
||||||
|
levelname='CRITICAL',
|
||||||
|
msg='{}: Error parsing configuration file'.format(config_filename),
|
||||||
|
)
|
||||||
|
),
|
||||||
|
logging.makeLogRecord(
|
||||||
|
dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return (configs, logs)
|
||||||
|
|
||||||
|
|
||||||
|
def log_record(suppress_log=False, **kwargs):
|
||||||
|
'''
|
||||||
|
Create a log record based on the given makeLogRecord() arguments, one of which must be
|
||||||
|
named "levelno". Log the record (unless suppress log is set) and return it.
|
||||||
|
'''
|
||||||
|
record = logging.makeLogRecord(kwargs)
|
||||||
|
if suppress_log:
|
||||||
|
return record
|
||||||
|
|
||||||
|
logger.handle(record)
|
||||||
|
return record
|
||||||
|
|
||||||
|
|
||||||
|
def log_error_records(
|
||||||
|
message, error=None, levelno=logging.CRITICAL, log_command_error_output=False
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given error message text, an optional exception object, an optional log level, and whether to
|
||||||
|
log the error output of a CalledProcessError (if any), log error summary information and also
|
||||||
|
yield it as a series of logging.LogRecord instances.
|
||||||
|
|
||||||
|
Note that because the logs are yielded as a generator, logs won't get logged unless you consume
|
||||||
|
the generator output.
|
||||||
|
'''
|
||||||
|
level_name = logging._levelToName[levelno]
|
||||||
|
|
||||||
|
if not error:
|
||||||
|
yield log_record(levelno=levelno, levelname=level_name, msg=message)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
raise error
|
||||||
|
except CalledProcessError as error:
|
||||||
|
yield log_record(levelno=levelno, levelname=level_name, msg=message)
|
||||||
|
if error.output:
|
||||||
|
# Suppress these logs for now and save full error output for the log summary at the end.
|
||||||
|
yield log_record(
|
||||||
|
levelno=levelno,
|
||||||
|
levelname=level_name,
|
||||||
|
msg=error.output,
|
||||||
|
suppress_log=not log_command_error_output,
|
||||||
|
)
|
||||||
|
yield log_record(levelno=levelno, levelname=level_name, msg=error)
|
||||||
|
except (ValueError, OSError) as error:
|
||||||
|
yield log_record(levelno=levelno, levelname=level_name, msg=message)
|
||||||
|
yield log_record(levelno=levelno, levelname=level_name, msg=error)
|
||||||
|
except: # noqa: E722
|
||||||
|
# Raising above only as a means of determining the error type. Swallow the exception here
|
||||||
|
# because we don't want the exception to propagate out of this function.
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_local_path(configs):
|
||||||
|
'''
|
||||||
|
Arbitrarily return the local path from the first configuration dict. Default to "borg" if not
|
||||||
|
set.
|
||||||
|
'''
|
||||||
|
return next(iter(configs.values())).get('location', {}).get('local_path', 'borg')
|
||||||
|
|
||||||
|
|
||||||
|
def collect_configuration_run_summary_logs(configs, arguments):
|
||||||
|
'''
|
||||||
|
Given a dict of configuration filename to corresponding parsed configuration, and parsed
|
||||||
|
command-line arguments as a dict from subparser name to a parsed namespace of arguments, run
|
||||||
|
each configuration file and yield a series of logging.LogRecord instances containing summary
|
||||||
|
information about each run.
|
||||||
|
|
||||||
|
As a side effect of running through these configuration files, output their JSON results, if
|
||||||
|
any, to stdout.
|
||||||
|
'''
|
||||||
|
# Run cross-file validation checks.
|
||||||
|
repository = None
|
||||||
|
|
||||||
|
for action_name, action_arguments in arguments.items():
|
||||||
|
if hasattr(action_arguments, 'repository'):
|
||||||
|
repository = getattr(action_arguments, 'repository')
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
if 'extract' in arguments or 'mount' in arguments:
|
||||||
|
validate.guard_single_repository_selected(repository, configs)
|
||||||
|
|
||||||
|
validate.guard_configuration_contains_repository(repository, configs)
|
||||||
|
except ValueError as error:
|
||||||
|
yield from log_error_records(str(error))
|
||||||
|
return
|
||||||
|
|
||||||
|
if not configs:
|
||||||
|
yield from log_error_records(
|
||||||
|
'{}: No valid configuration files found'.format(
|
||||||
|
' '.join(arguments['global'].config_paths)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if 'create' in arguments:
|
||||||
|
try:
|
||||||
|
for config_filename, config in configs.items():
|
||||||
|
hooks = config.get('hooks', {})
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('before_everything'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'pre-everything',
|
||||||
|
arguments['global'].dry_run,
|
||||||
|
)
|
||||||
|
except (CalledProcessError, ValueError, OSError) as error:
|
||||||
|
yield from log_error_records('Error running pre-everything hook', error)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Execute the actions corresponding to each configuration file.
|
||||||
|
json_results = []
|
||||||
|
for config_filename, config in configs.items():
|
||||||
|
results = list(run_configuration(config_filename, config, arguments))
|
||||||
|
error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
|
||||||
|
|
||||||
|
if error_logs:
|
||||||
|
yield from log_error_records(
|
||||||
|
'{}: Error running configuration file'.format(config_filename)
|
||||||
|
)
|
||||||
|
yield from error_logs
|
||||||
|
else:
|
||||||
|
yield logging.makeLogRecord(
|
||||||
|
dict(
|
||||||
|
levelno=logging.INFO,
|
||||||
|
levelname='INFO',
|
||||||
|
msg='{}: Successfully ran configuration file'.format(config_filename),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if results:
|
||||||
|
json_results.extend(results)
|
||||||
|
|
||||||
|
if 'umount' in arguments:
|
||||||
|
logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point))
|
||||||
|
try:
|
||||||
|
borg_umount.unmount_archive(
|
||||||
|
mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs),
|
||||||
|
)
|
||||||
|
except (CalledProcessError, OSError) as error:
|
||||||
|
yield from log_error_records('Error unmounting mount point', error)
|
||||||
|
|
||||||
|
if json_results:
|
||||||
|
sys.stdout.write(json.dumps(json_results))
|
||||||
|
|
||||||
|
if 'create' in arguments:
|
||||||
|
try:
|
||||||
|
for config_filename, config in configs.items():
|
||||||
|
hooks = config.get('hooks', {})
|
||||||
|
command.execute_hook(
|
||||||
|
hooks.get('after_everything'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'post-everything',
|
||||||
|
arguments['global'].dry_run,
|
||||||
|
)
|
||||||
|
except (CalledProcessError, ValueError, OSError) as error:
|
||||||
|
yield from log_error_records('Error running post-everything hook', error)
|
||||||
|
|
||||||
|
|
||||||
|
def exit_with_help_link(): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Display a link to get help and exit with an error code.
|
||||||
|
'''
|
||||||
|
logger.critical('')
|
||||||
|
logger.critical('Need some help? https://torsion.org/borgmatic/#issues')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def main(): # pragma: no cover
|
||||||
|
configure_signals()
|
||||||
|
|
||||||
|
try:
|
||||||
|
arguments = parse_arguments(*sys.argv[1:])
|
||||||
|
except ValueError as error:
|
||||||
|
configure_logging(logging.CRITICAL)
|
||||||
|
logger.critical(error)
|
||||||
|
exit_with_help_link()
|
||||||
|
except SystemExit as error:
|
||||||
|
if error.code == 0:
|
||||||
|
raise error
|
||||||
|
configure_logging(logging.CRITICAL)
|
||||||
|
logger.critical('Error parsing arguments: {}'.format(' '.join(sys.argv)))
|
||||||
|
exit_with_help_link()
|
||||||
|
|
||||||
|
global_arguments = arguments['global']
|
||||||
|
if global_arguments.version:
|
||||||
|
print(pkg_resources.require('borgmatic')[0].version)
|
||||||
|
sys.exit(0)
|
||||||
|
if global_arguments.bash_completion:
|
||||||
|
print(borgmatic.commands.completion.bash_completion())
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
config_filenames = tuple(collect.collect_config_filenames(global_arguments.config_paths))
|
||||||
|
configs, parse_logs = load_configurations(
|
||||||
|
config_filenames, global_arguments.overrides, global_arguments.resolve_env
|
||||||
|
)
|
||||||
|
|
||||||
|
any_json_flags = any(
|
||||||
|
getattr(sub_arguments, 'json', False) for sub_arguments in arguments.values()
|
||||||
|
)
|
||||||
|
colorama.init(
|
||||||
|
autoreset=True,
|
||||||
|
strip=not should_do_markup(global_arguments.no_color or any_json_flags, configs),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
configure_logging(
|
||||||
|
verbosity_to_log_level(global_arguments.verbosity),
|
||||||
|
verbosity_to_log_level(global_arguments.syslog_verbosity),
|
||||||
|
verbosity_to_log_level(global_arguments.log_file_verbosity),
|
||||||
|
verbosity_to_log_level(global_arguments.monitoring_verbosity),
|
||||||
|
global_arguments.log_file,
|
||||||
|
)
|
||||||
|
except (FileNotFoundError, PermissionError) as error:
|
||||||
|
configure_logging(logging.CRITICAL)
|
||||||
|
logger.critical('Error configuring logging: {}'.format(error))
|
||||||
|
exit_with_help_link()
|
||||||
|
|
||||||
|
logger.debug('Ensuring legacy configuration is upgraded')
|
||||||
|
convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames)
|
||||||
|
|
||||||
|
summary_logs = parse_logs + list(collect_configuration_run_summary_logs(configs, arguments))
|
||||||
|
summary_logs_max_level = max(log.levelno for log in summary_logs)
|
||||||
|
|
||||||
|
for message in ('', 'summary:'):
|
||||||
|
log_record(
|
||||||
|
levelno=summary_logs_max_level,
|
||||||
|
levelname=logging.getLevelName(summary_logs_max_level),
|
||||||
|
msg=message,
|
||||||
|
)
|
||||||
|
|
||||||
|
for log in summary_logs:
|
||||||
|
logger.handle(log)
|
||||||
|
|
||||||
|
if summary_logs_max_level >= logging.CRITICAL:
|
||||||
|
exit_with_help_link()
|
57
borgmatic/commands/completion.py
Normal file
57
borgmatic/commands/completion.py
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
from borgmatic.commands import arguments
|
||||||
|
|
||||||
|
UPGRADE_MESSAGE = '''
|
||||||
|
Your bash completions script is from a different version of borgmatic than is
|
||||||
|
currently installed. Please upgrade your script so your completions match the
|
||||||
|
command-line flags in your installed borgmatic! Try this to upgrade:
|
||||||
|
|
||||||
|
sudo sh -c "borgmatic --bash-completion > $BASH_SOURCE"
|
||||||
|
source $BASH_SOURCE
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
def parser_flags(parser):
|
||||||
|
'''
|
||||||
|
Given an argparse.ArgumentParser instance, return its argument flags in a space-separated
|
||||||
|
string.
|
||||||
|
'''
|
||||||
|
return ' '.join(option for action in parser._actions for option in action.option_strings)
|
||||||
|
|
||||||
|
|
||||||
|
def bash_completion():
|
||||||
|
'''
|
||||||
|
Return a bash completion script for the borgmatic command. Produce this by introspecting
|
||||||
|
borgmatic's command-line argument parsers.
|
||||||
|
'''
|
||||||
|
top_level_parser, subparsers = arguments.make_parsers()
|
||||||
|
global_flags = parser_flags(top_level_parser)
|
||||||
|
actions = ' '.join(subparsers.choices.keys())
|
||||||
|
|
||||||
|
# Avert your eyes.
|
||||||
|
return '\n'.join(
|
||||||
|
(
|
||||||
|
'check_version() {',
|
||||||
|
' local this_script="$(cat "$BASH_SOURCE" 2> /dev/null)"',
|
||||||
|
' local installed_script="$(borgmatic --bash-completion 2> /dev/null)"',
|
||||||
|
' if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ];'
|
||||||
|
' then cat << EOF\n%s\nEOF' % UPGRADE_MESSAGE,
|
||||||
|
' fi',
|
||||||
|
'}',
|
||||||
|
'complete_borgmatic() {',
|
||||||
|
)
|
||||||
|
+ tuple(
|
||||||
|
''' if [[ " ${COMP_WORDS[*]} " =~ " %s " ]]; then
|
||||||
|
COMPREPLY=($(compgen -W "%s %s %s" -- "${COMP_WORDS[COMP_CWORD]}"))
|
||||||
|
return 0
|
||||||
|
fi'''
|
||||||
|
% (action, parser_flags(subparser), actions, global_flags)
|
||||||
|
for action, subparser in subparsers.choices.items()
|
||||||
|
)
|
||||||
|
+ (
|
||||||
|
' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))'
|
||||||
|
% (actions, global_flags),
|
||||||
|
' (check_version &)',
|
||||||
|
'}',
|
||||||
|
'\ncomplete -o bashdefault -o default -F complete_borgmatic borgmatic',
|
||||||
|
)
|
||||||
|
)
|
110
borgmatic/commands/convert_config.py
Normal file
110
borgmatic/commands/convert_config.py
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
from ruamel import yaml
|
||||||
|
|
||||||
|
from borgmatic.config import convert, generate, legacy, validate
|
||||||
|
|
||||||
|
DEFAULT_SOURCE_CONFIG_FILENAME = '/etc/borgmatic/config'
|
||||||
|
DEFAULT_SOURCE_EXCLUDES_FILENAME = '/etc/borgmatic/excludes'
|
||||||
|
DEFAULT_DESTINATION_CONFIG_FILENAME = '/etc/borgmatic/config.yaml'
|
||||||
|
|
||||||
|
|
||||||
|
def parse_arguments(*arguments):
|
||||||
|
'''
|
||||||
|
Given command-line arguments with which this script was invoked, parse the arguments and return
|
||||||
|
them as an ArgumentParser instance.
|
||||||
|
'''
|
||||||
|
parser = ArgumentParser(
|
||||||
|
description='''
|
||||||
|
Convert legacy INI-style borgmatic configuration and excludes files to a single YAML
|
||||||
|
configuration file. Note that this replaces any comments from the source files.
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-s',
|
||||||
|
'--source-config',
|
||||||
|
dest='source_config_filename',
|
||||||
|
default=DEFAULT_SOURCE_CONFIG_FILENAME,
|
||||||
|
help='Source INI-style configuration filename. Default: {}'.format(
|
||||||
|
DEFAULT_SOURCE_CONFIG_FILENAME
|
||||||
|
),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-e',
|
||||||
|
'--source-excludes',
|
||||||
|
dest='source_excludes_filename',
|
||||||
|
default=DEFAULT_SOURCE_EXCLUDES_FILENAME
|
||||||
|
if os.path.exists(DEFAULT_SOURCE_EXCLUDES_FILENAME)
|
||||||
|
else None,
|
||||||
|
help='Excludes filename',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-d',
|
||||||
|
'--destination-config',
|
||||||
|
dest='destination_config_filename',
|
||||||
|
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
|
||||||
|
help='Destination YAML configuration filename. Default: {}'.format(
|
||||||
|
DEFAULT_DESTINATION_CONFIG_FILENAME
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser.parse_args(arguments)
|
||||||
|
|
||||||
|
|
||||||
|
TEXT_WRAP_CHARACTERS = 80
|
||||||
|
|
||||||
|
|
||||||
|
def display_result(args): # pragma: no cover
|
||||||
|
result_lines = textwrap.wrap(
|
||||||
|
'Your borgmatic configuration has been upgraded. Please review the result in {}.'.format(
|
||||||
|
args.destination_config_filename
|
||||||
|
),
|
||||||
|
TEXT_WRAP_CHARACTERS,
|
||||||
|
)
|
||||||
|
|
||||||
|
delete_lines = textwrap.wrap(
|
||||||
|
'Once you are satisfied, you can safely delete {}{}.'.format(
|
||||||
|
args.source_config_filename,
|
||||||
|
' and {}'.format(args.source_excludes_filename)
|
||||||
|
if args.source_excludes_filename
|
||||||
|
else '',
|
||||||
|
),
|
||||||
|
TEXT_WRAP_CHARACTERS,
|
||||||
|
)
|
||||||
|
|
||||||
|
print('\n'.join(result_lines))
|
||||||
|
print()
|
||||||
|
print('\n'.join(delete_lines))
|
||||||
|
|
||||||
|
|
||||||
|
def main(): # pragma: no cover
|
||||||
|
try:
|
||||||
|
args = parse_arguments(*sys.argv[1:])
|
||||||
|
schema = yaml.round_trip_load(open(validate.schema_filename()).read())
|
||||||
|
source_config = legacy.parse_configuration(
|
||||||
|
args.source_config_filename, legacy.CONFIG_FORMAT
|
||||||
|
)
|
||||||
|
source_config_file_mode = os.stat(args.source_config_filename).st_mode
|
||||||
|
source_excludes = (
|
||||||
|
open(args.source_excludes_filename).read().splitlines()
|
||||||
|
if args.source_excludes_filename
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
|
||||||
|
destination_config = convert.convert_legacy_parsed_config(
|
||||||
|
source_config, source_excludes, schema
|
||||||
|
)
|
||||||
|
|
||||||
|
generate.write_configuration(
|
||||||
|
args.destination_config_filename,
|
||||||
|
generate.render_configuration(destination_config),
|
||||||
|
mode=source_config_file_mode,
|
||||||
|
)
|
||||||
|
|
||||||
|
display_result(args)
|
||||||
|
except (ValueError, OSError) as error:
|
||||||
|
print(error, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
69
borgmatic/commands/generate_config.py
Normal file
69
borgmatic/commands/generate_config.py
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
import sys
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
from borgmatic.config import generate, validate
|
||||||
|
|
||||||
|
DEFAULT_DESTINATION_CONFIG_FILENAME = '/etc/borgmatic/config.yaml'
|
||||||
|
|
||||||
|
|
||||||
|
def parse_arguments(*arguments):
|
||||||
|
'''
|
||||||
|
Given command-line arguments with which this script was invoked, parse the arguments and return
|
||||||
|
them as an ArgumentParser instance.
|
||||||
|
'''
|
||||||
|
parser = ArgumentParser(description='Generate a sample borgmatic YAML configuration file.')
|
||||||
|
parser.add_argument(
|
||||||
|
'-s',
|
||||||
|
'--source',
|
||||||
|
dest='source_filename',
|
||||||
|
help='Optional YAML configuration file to merge into the generated configuration, useful for upgrading your configuration',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-d',
|
||||||
|
'--destination',
|
||||||
|
dest='destination_filename',
|
||||||
|
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
|
||||||
|
help='Destination YAML configuration file, default: {}'.format(
|
||||||
|
DEFAULT_DESTINATION_CONFIG_FILENAME
|
||||||
|
),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--overwrite',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='Whether to overwrite any existing destination file, defaults to false',
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser.parse_args(arguments)
|
||||||
|
|
||||||
|
|
||||||
|
def main(): # pragma: no cover
|
||||||
|
try:
|
||||||
|
args = parse_arguments(*sys.argv[1:])
|
||||||
|
|
||||||
|
generate.generate_sample_configuration(
|
||||||
|
args.source_filename,
|
||||||
|
args.destination_filename,
|
||||||
|
validate.schema_filename(),
|
||||||
|
overwrite=args.overwrite,
|
||||||
|
)
|
||||||
|
|
||||||
|
print('Generated a sample configuration file at {}.'.format(args.destination_filename))
|
||||||
|
print()
|
||||||
|
if args.source_filename:
|
||||||
|
print(
|
||||||
|
'Merged in the contents of configuration file at {}.'.format(args.source_filename)
|
||||||
|
)
|
||||||
|
print('To review the changes made, run:')
|
||||||
|
print()
|
||||||
|
print(
|
||||||
|
' diff --unified {} {}'.format(args.source_filename, args.destination_filename)
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
print('This includes all available configuration options with example values. The few')
|
||||||
|
print('required options are indicated. Please edit the file to suit your needs.')
|
||||||
|
print()
|
||||||
|
print('If you ever need help: https://torsion.org/borgmatic/#issues')
|
||||||
|
except (ValueError, OSError) as error:
|
||||||
|
print(error, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
56
borgmatic/commands/validate_config.py
Normal file
56
borgmatic/commands/validate_config.py
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
from borgmatic.config import collect, validate
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_arguments(*arguments):
|
||||||
|
'''
|
||||||
|
Given command-line arguments with which this script was invoked, parse the arguments and return
|
||||||
|
them as an ArgumentParser instance.
|
||||||
|
'''
|
||||||
|
config_paths = collect.get_default_config_paths()
|
||||||
|
|
||||||
|
parser = ArgumentParser(description='Validate borgmatic configuration file(s).')
|
||||||
|
parser.add_argument(
|
||||||
|
'-c',
|
||||||
|
'--config',
|
||||||
|
nargs='+',
|
||||||
|
dest='config_paths',
|
||||||
|
default=config_paths,
|
||||||
|
help='Configuration filenames or directories, defaults to: {}'.format(
|
||||||
|
' '.join(config_paths)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser.parse_args(arguments)
|
||||||
|
|
||||||
|
|
||||||
|
def main(): # pragma: no cover
|
||||||
|
args = parse_arguments(*sys.argv[1:])
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
||||||
|
|
||||||
|
config_filenames = tuple(collect.collect_config_filenames(args.config_paths))
|
||||||
|
if len(config_filenames) == 0:
|
||||||
|
logger.critical('No files to validate found')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
found_issues = False
|
||||||
|
for config_filename in config_filenames:
|
||||||
|
try:
|
||||||
|
validate.parse_configuration(config_filename, validate.schema_filename())
|
||||||
|
except (ValueError, OSError, validate.Validation_error) as error:
|
||||||
|
logging.critical('{}: Error parsing configuration file'.format(config_filename))
|
||||||
|
logging.critical(error)
|
||||||
|
found_issues = True
|
||||||
|
|
||||||
|
if found_issues:
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
'All given configuration files are valid: {}'.format(', '.join(config_filenames))
|
||||||
|
)
|
0
borgmatic/config/__init__.py
Normal file
0
borgmatic/config/__init__.py
Normal file
9
borgmatic/config/checks.py
Normal file
9
borgmatic/config/checks.py
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
def repository_enabled_for_checks(repository, consistency):
|
||||||
|
'''
|
||||||
|
Given a repository name and a consistency configuration dict, return whether the repository
|
||||||
|
is enabled to have consistency checks run.
|
||||||
|
'''
|
||||||
|
if not consistency.get('check_repositories'):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return repository in consistency['check_repositories']
|
54
borgmatic/config/collect.py
Normal file
54
borgmatic/config/collect.py
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_config_paths(expand_home=True):
|
||||||
|
'''
|
||||||
|
Based on the value of the XDG_CONFIG_HOME and HOME environment variables, return a list of
|
||||||
|
default configuration paths. This includes both system-wide configuration and configuration in
|
||||||
|
the current user's home directory.
|
||||||
|
|
||||||
|
Don't expand the home directory ($HOME) if the expand home flag is False.
|
||||||
|
'''
|
||||||
|
user_config_directory = os.getenv('XDG_CONFIG_HOME') or os.path.join('$HOME', '.config')
|
||||||
|
if expand_home:
|
||||||
|
user_config_directory = os.path.expandvars(user_config_directory)
|
||||||
|
|
||||||
|
return [
|
||||||
|
'/etc/borgmatic/config.yaml',
|
||||||
|
'/etc/borgmatic.d',
|
||||||
|
'%s/borgmatic/config.yaml' % user_config_directory,
|
||||||
|
'%s/borgmatic.d' % user_config_directory,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def collect_config_filenames(config_paths):
|
||||||
|
'''
|
||||||
|
Given a sequence of config paths, both filenames and directories, resolve that to an iterable
|
||||||
|
of files. Accomplish this by listing any given directories looking for contained config files
|
||||||
|
(ending with the ".yaml" or ".yml" extension). This is non-recursive, so any directories within the given
|
||||||
|
directories are ignored.
|
||||||
|
|
||||||
|
Return paths even if they don't exist on disk, so the user can find out about missing
|
||||||
|
configuration paths. However, skip a default config path if it's missing, so the user doesn't
|
||||||
|
have to create a default config path unless they need it.
|
||||||
|
'''
|
||||||
|
real_default_config_paths = set(map(os.path.realpath, get_default_config_paths()))
|
||||||
|
|
||||||
|
for path in config_paths:
|
||||||
|
exists = os.path.exists(path)
|
||||||
|
|
||||||
|
if os.path.realpath(path) in real_default_config_paths and not exists:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not os.path.isdir(path) or not exists:
|
||||||
|
yield path
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not os.access(path, os.R_OK):
|
||||||
|
continue
|
||||||
|
|
||||||
|
for filename in sorted(os.listdir(path)):
|
||||||
|
full_filename = os.path.join(path, filename)
|
||||||
|
matching_filetype = full_filename.endswith('.yaml') or full_filename.endswith('.yml')
|
||||||
|
if matching_filetype and not os.path.isdir(full_filename):
|
||||||
|
yield full_filename
|
95
borgmatic/config/convert.py
Normal file
95
borgmatic/config/convert.py
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ruamel import yaml
|
||||||
|
|
||||||
|
from borgmatic.config import generate
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_section(source_section_config, section_schema):
|
||||||
|
'''
|
||||||
|
Given a legacy Parsed_config instance for a single section, convert it to its corresponding
|
||||||
|
yaml.comments.CommentedMap representation in preparation for actual serialization to YAML.
|
||||||
|
|
||||||
|
Where integer types exist in the given section schema, convert their values to integers.
|
||||||
|
'''
|
||||||
|
destination_section_config = yaml.comments.CommentedMap(
|
||||||
|
[
|
||||||
|
(
|
||||||
|
option_name,
|
||||||
|
int(option_value)
|
||||||
|
if section_schema['properties'].get(option_name, {}).get('type') == 'integer'
|
||||||
|
else option_value,
|
||||||
|
)
|
||||||
|
for option_name, option_value in source_section_config.items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return destination_section_config
|
||||||
|
|
||||||
|
|
||||||
|
def convert_legacy_parsed_config(source_config, source_excludes, schema):
|
||||||
|
'''
|
||||||
|
Given a legacy Parsed_config instance loaded from an INI-style config file and a list of exclude
|
||||||
|
patterns, convert them to a corresponding yaml.comments.CommentedMap representation in
|
||||||
|
preparation for serialization to a single YAML config file.
|
||||||
|
|
||||||
|
Additionally, use the given schema as a source of helpful comments to include within the
|
||||||
|
returned CommentedMap.
|
||||||
|
'''
|
||||||
|
destination_config = yaml.comments.CommentedMap(
|
||||||
|
[
|
||||||
|
(section_name, _convert_section(section_config, schema['properties'][section_name]))
|
||||||
|
for section_name, section_config in source_config._asdict().items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Split space-seperated values into actual lists, make "repository" into a list, and merge in
|
||||||
|
# excludes.
|
||||||
|
location = destination_config['location']
|
||||||
|
location['source_directories'] = source_config.location['source_directories'].split(' ')
|
||||||
|
location['repositories'] = [location.pop('repository')]
|
||||||
|
location['exclude_patterns'] = source_excludes
|
||||||
|
|
||||||
|
if source_config.consistency.get('checks'):
|
||||||
|
destination_config['consistency']['checks'] = source_config.consistency['checks'].split(' ')
|
||||||
|
|
||||||
|
# Add comments to each section, and then add comments to the fields in each section.
|
||||||
|
generate.add_comments_to_configuration_object(destination_config, schema)
|
||||||
|
|
||||||
|
for section_name, section_config in destination_config.items():
|
||||||
|
generate.add_comments_to_configuration_object(
|
||||||
|
section_config, schema['properties'][section_name], indent=generate.INDENT
|
||||||
|
)
|
||||||
|
|
||||||
|
return destination_config
|
||||||
|
|
||||||
|
|
||||||
|
class Legacy_configuration_not_upgraded(FileNotFoundError):
|
||||||
|
def __init__(self):
|
||||||
|
super(Legacy_configuration_not_upgraded, self).__init__(
|
||||||
|
'''borgmatic changed its configuration file format in version 1.1.0 from INI-style
|
||||||
|
to YAML. This better supports validation, and has a more natural way to express
|
||||||
|
lists of values. To upgrade your existing configuration, run:
|
||||||
|
|
||||||
|
sudo upgrade-borgmatic-config
|
||||||
|
|
||||||
|
That will generate a new YAML configuration file at /etc/borgmatic/config.yaml
|
||||||
|
(by default) using the values from both your existing configuration and excludes
|
||||||
|
files. The new version of borgmatic will consume the YAML configuration file
|
||||||
|
instead of the old one.'''
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def guard_configuration_upgraded(source_config_filename, destination_config_filenames):
|
||||||
|
'''
|
||||||
|
If legacy source configuration exists but no destination upgraded configs do, raise
|
||||||
|
Legacy_configuration_not_upgraded.
|
||||||
|
|
||||||
|
The idea is that we want to alert the user about upgrading their config if they haven't already.
|
||||||
|
'''
|
||||||
|
destination_config_exists = any(
|
||||||
|
os.path.exists(filename) for filename in destination_config_filenames
|
||||||
|
)
|
||||||
|
|
||||||
|
if os.path.exists(source_config_filename) and not destination_config_exists:
|
||||||
|
raise Legacy_configuration_not_upgraded()
|
42
borgmatic/config/environment.py
Normal file
42
borgmatic/config/environment.py
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
_VARIABLE_PATTERN = re.compile(
|
||||||
|
r'(?P<escape>\\)?(?P<variable>\$\{(?P<name>[A-Za-z0-9_]+)((:?-)(?P<default>[^}]+))?\})'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_string(matcher):
|
||||||
|
'''
|
||||||
|
Get the value from environment given a matcher containing a name and an optional default value.
|
||||||
|
If the variable is not defined in environment and no default value is provided, an Error is raised.
|
||||||
|
'''
|
||||||
|
if matcher.group('escape') is not None:
|
||||||
|
# in case of escaped envvar, unescape it
|
||||||
|
return matcher.group('variable')
|
||||||
|
# resolve the env var
|
||||||
|
name, default = matcher.group('name'), matcher.group('default')
|
||||||
|
out = os.getenv(name, default=default)
|
||||||
|
if out is None:
|
||||||
|
raise ValueError('Cannot find variable ${name} in environment'.format(name=name))
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_env_variables(item):
|
||||||
|
'''
|
||||||
|
Resolves variables like or ${FOO} from given configuration with values from process environment
|
||||||
|
Supported formats:
|
||||||
|
- ${FOO} will return FOO env variable
|
||||||
|
- ${FOO-bar} or ${FOO:-bar} will return FOO env variable if it exists, else "bar"
|
||||||
|
|
||||||
|
If any variable is missing in environment and no default value is provided, an Error is raised.
|
||||||
|
'''
|
||||||
|
if isinstance(item, str):
|
||||||
|
return _VARIABLE_PATTERN.sub(_resolve_string, item)
|
||||||
|
if isinstance(item, list):
|
||||||
|
for i, subitem in enumerate(item):
|
||||||
|
item[i] = resolve_env_variables(subitem)
|
||||||
|
if isinstance(item, dict):
|
||||||
|
for key, value in item.items():
|
||||||
|
item[key] = resolve_env_variables(value)
|
||||||
|
return item
|
296
borgmatic/config/generate.py
Normal file
296
borgmatic/config/generate.py
Normal file
|
@ -0,0 +1,296 @@
|
||||||
|
import collections
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
from ruamel import yaml
|
||||||
|
|
||||||
|
from borgmatic.config import load, normalize
|
||||||
|
|
||||||
|
INDENT = 4
|
||||||
|
SEQUENCE_INDENT = 2
|
||||||
|
|
||||||
|
|
||||||
|
def _insert_newline_before_comment(config, field_name):
|
||||||
|
'''
|
||||||
|
Using some ruamel.yaml black magic, insert a blank line in the config right before the given
|
||||||
|
field and its comments.
|
||||||
|
'''
|
||||||
|
config.ca.items[field_name][1].insert(
|
||||||
|
0, yaml.tokens.CommentToken('\n', yaml.error.CommentMark(0), None)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
|
||||||
|
'''
|
||||||
|
Given a loaded configuration schema, generate and return sample config for it. Include comments
|
||||||
|
for each section based on the schema "description".
|
||||||
|
'''
|
||||||
|
schema_type = schema.get('type')
|
||||||
|
example = schema.get('example')
|
||||||
|
if example is not None:
|
||||||
|
return example
|
||||||
|
|
||||||
|
if schema_type == 'array':
|
||||||
|
config = yaml.comments.CommentedSeq(
|
||||||
|
[_schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)]
|
||||||
|
)
|
||||||
|
add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT))
|
||||||
|
elif schema_type == 'object':
|
||||||
|
config = yaml.comments.CommentedMap(
|
||||||
|
[
|
||||||
|
(field_name, _schema_to_sample_configuration(sub_schema, level + 1))
|
||||||
|
for field_name, sub_schema in schema['properties'].items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
indent = (level * INDENT) + (SEQUENCE_INDENT if parent_is_sequence else 0)
|
||||||
|
add_comments_to_configuration_object(
|
||||||
|
config, schema, indent=indent, skip_first=parent_is_sequence
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ValueError('Schema at level {} is unsupported: {}'.format(level, schema))
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
def _comment_out_line(line):
|
||||||
|
# If it's already is commented out (or empty), there's nothing further to do!
|
||||||
|
stripped_line = line.lstrip()
|
||||||
|
if not stripped_line or stripped_line.startswith('#'):
|
||||||
|
return line
|
||||||
|
|
||||||
|
# Comment out the names of optional sections, inserting the '#' after any indent for aesthetics.
|
||||||
|
matches = re.match(r'(\s*)', line)
|
||||||
|
indent_spaces = matches.group(0) if matches else ''
|
||||||
|
count_indent_spaces = len(indent_spaces)
|
||||||
|
|
||||||
|
return '# '.join((indent_spaces, line[count_indent_spaces:]))
|
||||||
|
|
||||||
|
|
||||||
|
def _comment_out_optional_configuration(rendered_config):
|
||||||
|
'''
|
||||||
|
Post-process a rendered configuration string to comment out optional key/values, as determined
|
||||||
|
by a sentinel in the comment before each key.
|
||||||
|
|
||||||
|
The idea is that the pre-commented configuration prevents the user from having to comment out a
|
||||||
|
bunch of configuration they don't care about to get to a minimal viable configuration file.
|
||||||
|
|
||||||
|
Ideally ruamel.yaml would support commenting out keys during configuration generation, but it's
|
||||||
|
not terribly easy to accomplish that way.
|
||||||
|
'''
|
||||||
|
lines = []
|
||||||
|
optional = False
|
||||||
|
|
||||||
|
for line in rendered_config.split('\n'):
|
||||||
|
# Upon encountering an optional configuration option, comment out lines until the next blank
|
||||||
|
# line.
|
||||||
|
if line.strip().startswith('# {}'.format(COMMENTED_OUT_SENTINEL)):
|
||||||
|
optional = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Hit a blank line, so reset commenting.
|
||||||
|
if not line.strip():
|
||||||
|
optional = False
|
||||||
|
|
||||||
|
lines.append(_comment_out_line(line) if optional else line)
|
||||||
|
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def render_configuration(config):
|
||||||
|
'''
|
||||||
|
Given a config data structure of nested OrderedDicts, render the config as YAML and return it.
|
||||||
|
'''
|
||||||
|
dumper = yaml.YAML()
|
||||||
|
dumper.indent(mapping=INDENT, sequence=INDENT + SEQUENCE_INDENT, offset=INDENT)
|
||||||
|
rendered = io.StringIO()
|
||||||
|
dumper.dump(config, rendered)
|
||||||
|
|
||||||
|
return rendered.getvalue()
|
||||||
|
|
||||||
|
|
||||||
|
def write_configuration(config_filename, rendered_config, mode=0o600, overwrite=False):
|
||||||
|
'''
|
||||||
|
Given a target config filename and rendered config YAML, write it out to file. Create any
|
||||||
|
containing directories as needed. But if the file already exists and overwrite is False,
|
||||||
|
abort before writing anything.
|
||||||
|
'''
|
||||||
|
if not overwrite and os.path.exists(config_filename):
|
||||||
|
raise FileExistsError(
|
||||||
|
'{} already exists. Aborting. Use --overwrite to replace the file.'.format(
|
||||||
|
config_filename
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(config_filename), mode=0o700)
|
||||||
|
except (FileExistsError, FileNotFoundError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
with open(config_filename, 'w') as config_file:
|
||||||
|
config_file.write(rendered_config)
|
||||||
|
|
||||||
|
os.chmod(config_filename, mode)
|
||||||
|
|
||||||
|
|
||||||
|
def add_comments_to_configuration_sequence(config, schema, indent=0):
|
||||||
|
'''
|
||||||
|
If the given config sequence's items are object, then mine the schema for the description of the
|
||||||
|
object's first item, and slap that atop the sequence. Indent the comment the given number of
|
||||||
|
characters.
|
||||||
|
|
||||||
|
Doing this for sequences of maps results in nice comments that look like:
|
||||||
|
|
||||||
|
```
|
||||||
|
things:
|
||||||
|
# First key description. Added by this function.
|
||||||
|
- key: foo
|
||||||
|
# Second key description. Added by add_comments_to_configuration_object().
|
||||||
|
other: bar
|
||||||
|
```
|
||||||
|
'''
|
||||||
|
if schema['items'].get('type') != 'object':
|
||||||
|
return
|
||||||
|
|
||||||
|
for field_name in config[0].keys():
|
||||||
|
field_schema = schema['items']['properties'].get(field_name, {})
|
||||||
|
description = field_schema.get('description')
|
||||||
|
|
||||||
|
# No description to use? Skip it.
|
||||||
|
if not field_schema or not description:
|
||||||
|
return
|
||||||
|
|
||||||
|
config[0].yaml_set_start_comment(description, indent=indent)
|
||||||
|
|
||||||
|
# We only want the first key's description here, as the rest of the keys get commented by
|
||||||
|
# add_comments_to_configuration_object().
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
REQUIRED_SECTION_NAMES = {'location', 'retention'}
|
||||||
|
REQUIRED_KEYS = {'source_directories', 'repositories', 'keep_daily'}
|
||||||
|
COMMENTED_OUT_SENTINEL = 'COMMENT_OUT'
|
||||||
|
|
||||||
|
|
||||||
|
def add_comments_to_configuration_object(config, schema, indent=0, skip_first=False):
|
||||||
|
'''
|
||||||
|
Using descriptions from a schema as a source, add those descriptions as comments to the given
|
||||||
|
config mapping, before each field. Indent the comment the given number of characters.
|
||||||
|
'''
|
||||||
|
for index, field_name in enumerate(config.keys()):
|
||||||
|
if skip_first and index == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
field_schema = schema['properties'].get(field_name, {})
|
||||||
|
description = field_schema.get('description', '').strip()
|
||||||
|
|
||||||
|
# If this is an optional key, add an indicator to the comment flagging it to be commented
|
||||||
|
# out from the sample configuration. This sentinel is consumed by downstream processing that
|
||||||
|
# does the actual commenting out.
|
||||||
|
if field_name not in REQUIRED_SECTION_NAMES and field_name not in REQUIRED_KEYS:
|
||||||
|
description = (
|
||||||
|
'\n'.join((description, COMMENTED_OUT_SENTINEL))
|
||||||
|
if description
|
||||||
|
else COMMENTED_OUT_SENTINEL
|
||||||
|
)
|
||||||
|
|
||||||
|
# No description to use? Skip it.
|
||||||
|
if not field_schema or not description: # pragma: no cover
|
||||||
|
continue
|
||||||
|
|
||||||
|
config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent)
|
||||||
|
|
||||||
|
if index > 0:
|
||||||
|
_insert_newline_before_comment(config, field_name)
|
||||||
|
|
||||||
|
|
||||||
|
RUAMEL_YAML_COMMENTS_INDEX = 1
|
||||||
|
|
||||||
|
|
||||||
|
def remove_commented_out_sentinel(config, field_name):
|
||||||
|
'''
|
||||||
|
Given a configuration CommentedMap and a top-level field name in it, remove any "commented out"
|
||||||
|
sentinel found at the end of its YAML comments. This prevents the given field name from getting
|
||||||
|
commented out by downstream processing that consumes the sentinel.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
last_comment_value = config.ca.items[field_name][RUAMEL_YAML_COMMENTS_INDEX][-1].value
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
|
||||||
|
if last_comment_value == '# {}\n'.format(COMMENTED_OUT_SENTINEL):
|
||||||
|
config.ca.items[field_name][RUAMEL_YAML_COMMENTS_INDEX].pop()
|
||||||
|
|
||||||
|
|
||||||
|
def merge_source_configuration_into_destination(destination_config, source_config):
|
||||||
|
'''
|
||||||
|
Deep merge the given source configuration dict into the destination configuration CommentedMap,
|
||||||
|
favoring values from the source when there are collisions.
|
||||||
|
|
||||||
|
The purpose of this is to upgrade configuration files from old versions of borgmatic by adding
|
||||||
|
new
|
||||||
|
configuration keys and comments.
|
||||||
|
'''
|
||||||
|
if not source_config:
|
||||||
|
return destination_config
|
||||||
|
if not destination_config or not isinstance(source_config, collections.abc.Mapping):
|
||||||
|
return source_config
|
||||||
|
|
||||||
|
for field_name, source_value in source_config.items():
|
||||||
|
# Since this key/value is from the source configuration, leave it uncommented and remove any
|
||||||
|
# sentinel that would cause it to get commented out.
|
||||||
|
remove_commented_out_sentinel(destination_config, field_name)
|
||||||
|
|
||||||
|
# This is a mapping. Recurse for this key/value.
|
||||||
|
if isinstance(source_value, collections.abc.Mapping):
|
||||||
|
destination_config[field_name] = merge_source_configuration_into_destination(
|
||||||
|
destination_config[field_name], source_value
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# This is a sequence. Recurse for each item in it.
|
||||||
|
if isinstance(source_value, collections.abc.Sequence) and not isinstance(source_value, str):
|
||||||
|
destination_value = destination_config[field_name]
|
||||||
|
destination_config[field_name] = yaml.comments.CommentedSeq(
|
||||||
|
[
|
||||||
|
merge_source_configuration_into_destination(
|
||||||
|
destination_value[index] if index < len(destination_value) else None,
|
||||||
|
source_item,
|
||||||
|
)
|
||||||
|
for index, source_item in enumerate(source_value)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# This is some sort of scalar. Simply set it into the destination.
|
||||||
|
destination_config[field_name] = source_config[field_name]
|
||||||
|
|
||||||
|
return destination_config
|
||||||
|
|
||||||
|
|
||||||
|
def generate_sample_configuration(
|
||||||
|
source_filename, destination_filename, schema_filename, overwrite=False
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Given an optional source configuration filename, and a required destination configuration
|
||||||
|
filename, the path to a schema filename in a YAML rendition of the JSON Schema format, and
|
||||||
|
whether to overwrite a destination file, write out a sample configuration file based on that
|
||||||
|
schema. If a source filename is provided, merge the parsed contents of that configuration into
|
||||||
|
the generated configuration.
|
||||||
|
'''
|
||||||
|
schema = yaml.round_trip_load(open(schema_filename))
|
||||||
|
source_config = None
|
||||||
|
|
||||||
|
if source_filename:
|
||||||
|
source_config = load.load_configuration(source_filename)
|
||||||
|
normalize.normalize(source_filename, source_config)
|
||||||
|
|
||||||
|
destination_config = merge_source_configuration_into_destination(
|
||||||
|
_schema_to_sample_configuration(schema), source_config
|
||||||
|
)
|
||||||
|
|
||||||
|
write_configuration(
|
||||||
|
destination_filename,
|
||||||
|
_comment_out_optional_configuration(render_configuration(destination_config)),
|
||||||
|
overwrite=overwrite,
|
||||||
|
)
|
|
@ -1,12 +1,5 @@
|
||||||
from collections import OrderedDict, namedtuple
|
from collections import OrderedDict, namedtuple
|
||||||
|
from configparser import RawConfigParser
|
||||||
try:
|
|
||||||
# Python 2
|
|
||||||
from ConfigParser import ConfigParser
|
|
||||||
except ImportError:
|
|
||||||
# Python 3
|
|
||||||
from configparser import ConfigParser
|
|
||||||
|
|
||||||
|
|
||||||
Section_format = namedtuple('Section_format', ('name', 'options'))
|
Section_format = namedtuple('Section_format', ('name', 'options'))
|
||||||
Config_option = namedtuple('Config_option', ('name', 'value_type', 'required'))
|
Config_option = namedtuple('Config_option', ('name', 'value_type', 'required'))
|
||||||
|
@ -25,9 +18,19 @@ CONFIG_FORMAT = (
|
||||||
'location',
|
'location',
|
||||||
(
|
(
|
||||||
option('source_directories'),
|
option('source_directories'),
|
||||||
|
option('one_file_system', value_type=bool, required=False),
|
||||||
|
option('remote_path', required=False),
|
||||||
option('repository'),
|
option('repository'),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
Section_format(
|
||||||
|
'storage',
|
||||||
|
(
|
||||||
|
option('encryption_passphrase', required=False),
|
||||||
|
option('compression', required=False),
|
||||||
|
option('umask', required=False),
|
||||||
|
),
|
||||||
|
),
|
||||||
Section_format(
|
Section_format(
|
||||||
'retention',
|
'retention',
|
||||||
(
|
(
|
||||||
|
@ -41,17 +44,14 @@ CONFIG_FORMAT = (
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
Section_format(
|
Section_format(
|
||||||
'consistency',
|
'consistency', (option('checks', required=False), option('check_last', required=False))
|
||||||
(
|
),
|
||||||
option('checks', required=False),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def validate_configuration_format(parser, config_format):
|
def validate_configuration_format(parser, config_format):
|
||||||
'''
|
'''
|
||||||
Given an open ConfigParser and an expected config file format, validate that the parsed
|
Given an open RawConfigParser and an expected config file format, validate that the parsed
|
||||||
configuration file has the expected sections, that any required options are present in those
|
configuration file has the expected sections, that any required options are present in those
|
||||||
sections, and that there aren't any unexpected options.
|
sections, and that there aren't any unexpected options.
|
||||||
|
|
||||||
|
@ -61,7 +61,8 @@ def validate_configuration_format(parser, config_format):
|
||||||
'''
|
'''
|
||||||
section_names = set(parser.sections())
|
section_names = set(parser.sections())
|
||||||
required_section_names = tuple(
|
required_section_names = tuple(
|
||||||
section.name for section in config_format
|
section.name
|
||||||
|
for section in config_format
|
||||||
if any(option.required for option in section.options)
|
if any(option.required for option in section.options)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -75,9 +76,7 @@ def validate_configuration_format(parser, config_format):
|
||||||
|
|
||||||
missing_section_names = set(required_section_names) - section_names
|
missing_section_names = set(required_section_names) - section_names
|
||||||
if missing_section_names:
|
if missing_section_names:
|
||||||
raise ValueError(
|
raise ValueError('Missing config sections: {}'.format(', '.join(missing_section_names)))
|
||||||
'Missing config sections: {}'.format(', '.join(missing_section_names))
|
|
||||||
)
|
|
||||||
|
|
||||||
for section_format in config_format:
|
for section_format in config_format:
|
||||||
if section_format.name not in section_names:
|
if section_format.name not in section_names:
|
||||||
|
@ -86,47 +85,41 @@ def validate_configuration_format(parser, config_format):
|
||||||
option_names = parser.options(section_format.name)
|
option_names = parser.options(section_format.name)
|
||||||
expected_options = section_format.options
|
expected_options = section_format.options
|
||||||
|
|
||||||
unexpected_option_names = set(option_names) - set(option.name for option in expected_options)
|
unexpected_option_names = set(option_names) - set(
|
||||||
|
option.name for option in expected_options
|
||||||
|
)
|
||||||
|
|
||||||
if unexpected_option_names:
|
if unexpected_option_names:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Unexpected options found in config section {}: {}'.format(
|
'Unexpected options found in config section {}: {}'.format(
|
||||||
section_format.name,
|
section_format.name, ', '.join(sorted(unexpected_option_names))
|
||||||
', '.join(sorted(unexpected_option_names)),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
missing_option_names = tuple(
|
missing_option_names = tuple(
|
||||||
option.name for option in expected_options if option.required
|
option.name
|
||||||
|
for option in expected_options
|
||||||
|
if option.required
|
||||||
if option.name not in option_names
|
if option.name not in option_names
|
||||||
)
|
)
|
||||||
|
|
||||||
if missing_option_names:
|
if missing_option_names:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Required options missing from config section {}: {}'.format(
|
'Required options missing from config section {}: {}'.format(
|
||||||
section_format.name,
|
section_format.name, ', '.join(missing_option_names)
|
||||||
', '.join(missing_option_names)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# Describes a parsed configuration, where each attribute is the name of a configuration file section
|
|
||||||
# and each value is a dict of that section's parsed options.
|
|
||||||
Parsed_config = namedtuple('Config', (section_format.name for section_format in CONFIG_FORMAT))
|
|
||||||
|
|
||||||
|
|
||||||
def parse_section_options(parser, section_format):
|
def parse_section_options(parser, section_format):
|
||||||
'''
|
'''
|
||||||
Given an open ConfigParser and an expected section format, return the option values from that
|
Given an open RawConfigParser and an expected section format, return the option values from that
|
||||||
section as a dict mapping from option name to value. Omit those options that are not present in
|
section as a dict mapping from option name to value. Omit those options that are not present in
|
||||||
the parsed options.
|
the parsed options.
|
||||||
|
|
||||||
Raise ValueError if any option values cannot be coerced to the expected Python data type.
|
Raise ValueError if any option values cannot be coerced to the expected Python data type.
|
||||||
'''
|
'''
|
||||||
type_getter = {
|
type_getter = {str: parser.get, int: parser.getint, bool: parser.getboolean}
|
||||||
str: parser.get,
|
|
||||||
int: parser.getint,
|
|
||||||
}
|
|
||||||
|
|
||||||
return OrderedDict(
|
return OrderedDict(
|
||||||
(option.name, type_getter[option.value_type](section_format.name, option.name))
|
(option.name, type_getter[option.value_type](section_format.name, option.name))
|
||||||
|
@ -135,21 +128,25 @@ def parse_section_options(parser, section_format):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_configuration(config_filename):
|
def parse_configuration(config_filename, config_format):
|
||||||
'''
|
'''
|
||||||
Given a config filename of the expected format, return the parsed configuration as Parsed_config
|
Given a config filename and an expected config file format, return the parsed configuration
|
||||||
data structure.
|
as a namedtuple with one attribute for each parsed section.
|
||||||
|
|
||||||
Raise IOError if the file cannot be read, or ValueError if the format is not as expected.
|
Raise IOError if the file cannot be read, or ValueError if the format is not as expected.
|
||||||
'''
|
'''
|
||||||
parser = ConfigParser()
|
parser = RawConfigParser()
|
||||||
parser.readfp(open(config_filename))
|
if not parser.read(config_filename):
|
||||||
|
raise ValueError('Configuration file cannot be opened: {}'.format(config_filename))
|
||||||
|
|
||||||
validate_configuration_format(parser, CONFIG_FORMAT)
|
validate_configuration_format(parser, config_format)
|
||||||
|
|
||||||
|
# Describes a parsed configuration, where each attribute is the name of a configuration file
|
||||||
|
# section and each value is a dict of that section's parsed options.
|
||||||
|
Parsed_config = namedtuple(
|
||||||
|
'Parsed_config', (section_format.name for section_format in config_format)
|
||||||
|
)
|
||||||
|
|
||||||
return Parsed_config(
|
return Parsed_config(
|
||||||
*(
|
*(parse_section_options(parser, section_format) for section_format in config_format)
|
||||||
parse_section_options(parser, section_format)
|
|
||||||
for section_format in CONFIG_FORMAT
|
|
||||||
)
|
|
||||||
)
|
)
|
216
borgmatic/config/load.py
Normal file
216
borgmatic/config/load.py
Normal file
|
@ -0,0 +1,216 @@
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
import ruamel.yaml
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def include_configuration(loader, filename_node, include_directory):
|
||||||
|
'''
|
||||||
|
Given a ruamel.yaml.loader.Loader, a ruamel.yaml.serializer.ScalarNode containing the included
|
||||||
|
filename, and an include directory path to search for matching files, load the given YAML
|
||||||
|
filename (ignoring the given loader so we can use our own) and return its contents as a data
|
||||||
|
structure of nested dicts and lists. If the filename is relative, probe for it within 1. the
|
||||||
|
current working directory and 2. the given include directory.
|
||||||
|
|
||||||
|
Raise FileNotFoundError if an included file was not found.
|
||||||
|
'''
|
||||||
|
include_directories = [os.getcwd(), os.path.abspath(include_directory)]
|
||||||
|
include_filename = os.path.expanduser(filename_node.value)
|
||||||
|
|
||||||
|
if not os.path.isabs(include_filename):
|
||||||
|
candidate_filenames = [
|
||||||
|
os.path.join(directory, include_filename) for directory in include_directories
|
||||||
|
]
|
||||||
|
|
||||||
|
for candidate_filename in candidate_filenames:
|
||||||
|
if os.path.exists(candidate_filename):
|
||||||
|
include_filename = candidate_filename
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise FileNotFoundError(
|
||||||
|
f'Could not find include {filename_node.value} at {" or ".join(candidate_filenames)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
return load_configuration(include_filename)
|
||||||
|
|
||||||
|
|
||||||
|
class Include_constructor(ruamel.yaml.SafeConstructor):
|
||||||
|
'''
|
||||||
|
A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including
|
||||||
|
separate YAML configuration files. Example syntax: `retention: !include common.yaml`
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, preserve_quotes=None, loader=None, include_directory=None):
|
||||||
|
super(Include_constructor, self).__init__(preserve_quotes, loader)
|
||||||
|
self.add_constructor(
|
||||||
|
'!include',
|
||||||
|
functools.partial(include_configuration, include_directory=include_directory),
|
||||||
|
)
|
||||||
|
|
||||||
|
def flatten_mapping(self, node):
|
||||||
|
'''
|
||||||
|
Support the special case of deep merging included configuration into an existing mapping
|
||||||
|
using the YAML '<<' merge key. Example syntax:
|
||||||
|
|
||||||
|
```
|
||||||
|
retention:
|
||||||
|
keep_daily: 1
|
||||||
|
|
||||||
|
<<: !include common.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
These includes are deep merged into the current configuration file. For instance, in this
|
||||||
|
example, any "retention" options in common.yaml will get merged into the "retention" section
|
||||||
|
in the example configuration file.
|
||||||
|
'''
|
||||||
|
representer = ruamel.yaml.representer.SafeRepresenter()
|
||||||
|
|
||||||
|
for index, (key_node, value_node) in enumerate(node.value):
|
||||||
|
if key_node.tag == u'tag:yaml.org,2002:merge' and value_node.tag == '!include':
|
||||||
|
included_value = representer.represent_data(self.construct_object(value_node))
|
||||||
|
node.value[index] = (key_node, included_value)
|
||||||
|
|
||||||
|
super(Include_constructor, self).flatten_mapping(node)
|
||||||
|
|
||||||
|
node.value = deep_merge_nodes(node.value)
|
||||||
|
|
||||||
|
|
||||||
|
def load_configuration(filename):
|
||||||
|
'''
|
||||||
|
Load the given configuration file and return its contents as a data structure of nested dicts
|
||||||
|
and lists.
|
||||||
|
|
||||||
|
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
|
||||||
|
if there are too many recursive includes.
|
||||||
|
'''
|
||||||
|
# Use an embedded derived class for the include constructor so as to capture the filename
|
||||||
|
# value. (functools.partial doesn't work for this use case because yaml.Constructor has to be
|
||||||
|
# an actual class.)
|
||||||
|
class Include_constructor_with_include_directory(Include_constructor):
|
||||||
|
def __init__(self, preserve_quotes=None, loader=None):
|
||||||
|
super(Include_constructor_with_include_directory, self).__init__(
|
||||||
|
preserve_quotes, loader, include_directory=os.path.dirname(filename)
|
||||||
|
)
|
||||||
|
|
||||||
|
yaml = ruamel.yaml.YAML(typ='safe')
|
||||||
|
yaml.Constructor = Include_constructor_with_include_directory
|
||||||
|
|
||||||
|
return yaml.load(open(filename))
|
||||||
|
|
||||||
|
|
||||||
|
DELETED_NODE = object()
|
||||||
|
|
||||||
|
|
||||||
|
def deep_merge_nodes(nodes):
|
||||||
|
'''
|
||||||
|
Given a nested borgmatic configuration data structure as a list of tuples in the form of:
|
||||||
|
|
||||||
|
(
|
||||||
|
ruamel.yaml.nodes.ScalarNode as a key,
|
||||||
|
ruamel.yaml.nodes.MappingNode or other Node as a value,
|
||||||
|
),
|
||||||
|
|
||||||
|
... deep merge any node values corresponding to duplicate keys and return the result. If
|
||||||
|
there are colliding keys with non-MappingNode values (e.g., integers or strings), the last
|
||||||
|
of the values wins.
|
||||||
|
|
||||||
|
For instance, given node values of:
|
||||||
|
|
||||||
|
[
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_hourly'),
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:int', value='24')
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'),
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:int', value='7')
|
||||||
|
),
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'),
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:int', value='5')
|
||||||
|
),
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
... the returned result would be:
|
||||||
|
|
||||||
|
[
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
|
||||||
|
MappingNode(tag='tag:yaml.org,2002:map', value=[
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_hourly'),
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:int', value='24')
|
||||||
|
),
|
||||||
|
(
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'),
|
||||||
|
ScalarNode(tag='tag:yaml.org,2002:int', value='5')
|
||||||
|
),
|
||||||
|
]),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
The purpose of deep merging like this is to support, for instance, merging one borgmatic
|
||||||
|
configuration file into another for reuse, such that a configuration section ("retention",
|
||||||
|
etc.) does not completely replace the corresponding section in a merged file.
|
||||||
|
'''
|
||||||
|
# Map from original node key/value to the replacement merged node. DELETED_NODE as a replacement
|
||||||
|
# node indications deletion.
|
||||||
|
replaced_nodes = {}
|
||||||
|
|
||||||
|
# To find nodes that require merging, compare each node with each other node.
|
||||||
|
for a_key, a_value in nodes:
|
||||||
|
for b_key, b_value in nodes:
|
||||||
|
# If we've already considered one of the nodes for merging, skip it.
|
||||||
|
if (a_key, a_value) in replaced_nodes or (b_key, b_value) in replaced_nodes:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If the keys match and the values are different, we need to merge these two A and B nodes.
|
||||||
|
if a_key.tag == b_key.tag and a_key.value == b_key.value and a_value != b_value:
|
||||||
|
# Since we're merging into the B node, consider the A node a duplicate and remove it.
|
||||||
|
replaced_nodes[(a_key, a_value)] = DELETED_NODE
|
||||||
|
|
||||||
|
# If we're dealing with MappingNodes, recurse and merge its values as well.
|
||||||
|
if isinstance(b_value, ruamel.yaml.nodes.MappingNode):
|
||||||
|
replaced_nodes[(b_key, b_value)] = (
|
||||||
|
b_key,
|
||||||
|
ruamel.yaml.nodes.MappingNode(
|
||||||
|
tag=b_value.tag,
|
||||||
|
value=deep_merge_nodes(a_value.value + b_value.value),
|
||||||
|
start_mark=b_value.start_mark,
|
||||||
|
end_mark=b_value.end_mark,
|
||||||
|
flow_style=b_value.flow_style,
|
||||||
|
comment=b_value.comment,
|
||||||
|
anchor=b_value.anchor,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
# If we're dealing with SequenceNodes, merge by appending one sequence to the other.
|
||||||
|
elif isinstance(b_value, ruamel.yaml.nodes.SequenceNode):
|
||||||
|
replaced_nodes[(b_key, b_value)] = (
|
||||||
|
b_key,
|
||||||
|
ruamel.yaml.nodes.SequenceNode(
|
||||||
|
tag=b_value.tag,
|
||||||
|
value=a_value.value + b_value.value,
|
||||||
|
start_mark=b_value.start_mark,
|
||||||
|
end_mark=b_value.end_mark,
|
||||||
|
flow_style=b_value.flow_style,
|
||||||
|
comment=b_value.comment,
|
||||||
|
anchor=b_value.anchor,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return [
|
||||||
|
replaced_nodes.get(node, node) for node in nodes if replaced_nodes.get(node) != DELETED_NODE
|
||||||
|
]
|
88
borgmatic/config/normalize.py
Normal file
88
borgmatic/config/normalize.py
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
def normalize(config_filename, config):
|
||||||
|
'''
|
||||||
|
Given a configuration filename and a configuration dict of its loaded contents, apply particular
|
||||||
|
hard-coded rules to normalize the configuration to adhere to the current schema. Return any log
|
||||||
|
message warnings produced based on the normalization performed.
|
||||||
|
'''
|
||||||
|
logs = []
|
||||||
|
location = config.get('location') or {}
|
||||||
|
storage = config.get('storage') or {}
|
||||||
|
consistency = config.get('consistency') or {}
|
||||||
|
hooks = config.get('hooks') or {}
|
||||||
|
|
||||||
|
# Upgrade exclude_if_present from a string to a list.
|
||||||
|
exclude_if_present = location.get('exclude_if_present')
|
||||||
|
if isinstance(exclude_if_present, str):
|
||||||
|
config['location']['exclude_if_present'] = [exclude_if_present]
|
||||||
|
|
||||||
|
# Upgrade various monitoring hooks from a string to a dict.
|
||||||
|
healthchecks = hooks.get('healthchecks')
|
||||||
|
if isinstance(healthchecks, str):
|
||||||
|
config['hooks']['healthchecks'] = {'ping_url': healthchecks}
|
||||||
|
|
||||||
|
cronitor = hooks.get('cronitor')
|
||||||
|
if isinstance(cronitor, str):
|
||||||
|
config['hooks']['cronitor'] = {'ping_url': cronitor}
|
||||||
|
|
||||||
|
pagerduty = hooks.get('pagerduty')
|
||||||
|
if isinstance(pagerduty, str):
|
||||||
|
config['hooks']['pagerduty'] = {'integration_key': pagerduty}
|
||||||
|
|
||||||
|
cronhub = hooks.get('cronhub')
|
||||||
|
if isinstance(cronhub, str):
|
||||||
|
config['hooks']['cronhub'] = {'ping_url': cronhub}
|
||||||
|
|
||||||
|
# Upgrade consistency checks from a list of strings to a list of dicts.
|
||||||
|
checks = consistency.get('checks')
|
||||||
|
if isinstance(checks, list) and len(checks) and isinstance(checks[0], str):
|
||||||
|
config['consistency']['checks'] = [{'name': check_type} for check_type in checks]
|
||||||
|
|
||||||
|
# Rename various configuration options.
|
||||||
|
numeric_owner = location.pop('numeric_owner', None)
|
||||||
|
if numeric_owner is not None:
|
||||||
|
config['location']['numeric_ids'] = numeric_owner
|
||||||
|
|
||||||
|
bsd_flags = location.pop('bsd_flags', None)
|
||||||
|
if bsd_flags is not None:
|
||||||
|
config['location']['flags'] = bsd_flags
|
||||||
|
|
||||||
|
remote_rate_limit = storage.pop('remote_rate_limit', None)
|
||||||
|
if remote_rate_limit is not None:
|
||||||
|
config['storage']['upload_rate_limit'] = remote_rate_limit
|
||||||
|
|
||||||
|
# Upgrade remote repositories to ssh:// syntax, required in Borg 2.
|
||||||
|
repositories = location.get('repositories')
|
||||||
|
if repositories:
|
||||||
|
config['location']['repositories'] = []
|
||||||
|
for repository in repositories:
|
||||||
|
if '~' in repository:
|
||||||
|
logs.append(
|
||||||
|
logging.makeLogRecord(
|
||||||
|
dict(
|
||||||
|
levelno=logging.WARNING,
|
||||||
|
levelname='WARNING',
|
||||||
|
msg=f'{config_filename}: Repository paths containing "~" are deprecated in borgmatic and no longer work in Borg 2.x+.',
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if ':' in repository and not repository.startswith('ssh://'):
|
||||||
|
rewritten_repository = (
|
||||||
|
f"ssh://{repository.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}"
|
||||||
|
)
|
||||||
|
logs.append(
|
||||||
|
logging.makeLogRecord(
|
||||||
|
dict(
|
||||||
|
levelno=logging.WARNING,
|
||||||
|
levelname='WARNING',
|
||||||
|
msg=f'{config_filename}: Remote repository paths without ssh:// syntax are deprecated. Interpreting "{repository}" as "{rewritten_repository}"',
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
config['location']['repositories'].append(rewritten_repository)
|
||||||
|
else:
|
||||||
|
config['location']['repositories'].append(repository)
|
||||||
|
|
||||||
|
return logs
|
79
borgmatic/config/override.py
Normal file
79
borgmatic/config/override.py
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
import io
|
||||||
|
|
||||||
|
import ruamel.yaml
|
||||||
|
|
||||||
|
|
||||||
|
def set_values(config, keys, value):
|
||||||
|
'''
|
||||||
|
Given a hierarchy of configuration dicts, a sequence of parsed key strings, and a string value,
|
||||||
|
descend into the hierarchy based on the keys to set the value into the right place.
|
||||||
|
'''
|
||||||
|
if not keys:
|
||||||
|
return
|
||||||
|
|
||||||
|
first_key = keys[0]
|
||||||
|
if len(keys) == 1:
|
||||||
|
config[first_key] = value
|
||||||
|
return
|
||||||
|
|
||||||
|
if first_key not in config:
|
||||||
|
config[first_key] = {}
|
||||||
|
|
||||||
|
set_values(config[first_key], keys[1:], value)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_value_type(value):
|
||||||
|
'''
|
||||||
|
Given a string value, determine its logical type (string, boolean, integer, etc.), and return it
|
||||||
|
converted to that type.
|
||||||
|
|
||||||
|
Raise ruamel.yaml.error.YAMLError if there's a parse issue with the YAML.
|
||||||
|
'''
|
||||||
|
return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value))
|
||||||
|
|
||||||
|
|
||||||
|
def parse_overrides(raw_overrides):
|
||||||
|
'''
|
||||||
|
Given a sequence of configuration file override strings in the form of "section.option=value",
|
||||||
|
parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For
|
||||||
|
instance, given the following raw overrides:
|
||||||
|
|
||||||
|
['section.my_option=value1', 'section.other_option=value2']
|
||||||
|
|
||||||
|
... return this:
|
||||||
|
|
||||||
|
(
|
||||||
|
(('section', 'my_option'), 'value1'),
|
||||||
|
(('section', 'other_option'), 'value2'),
|
||||||
|
)
|
||||||
|
|
||||||
|
Raise ValueError if an override can't be parsed.
|
||||||
|
'''
|
||||||
|
if not raw_overrides:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
parsed_overrides = []
|
||||||
|
|
||||||
|
for raw_override in raw_overrides:
|
||||||
|
try:
|
||||||
|
raw_keys, value = raw_override.split('=', 1)
|
||||||
|
parsed_overrides.append((tuple(raw_keys.split('.')), convert_value_type(value),))
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE"
|
||||||
|
)
|
||||||
|
except ruamel.yaml.error.YAMLError as error:
|
||||||
|
raise ValueError(f"Invalid override '{raw_override}': {error.problem}")
|
||||||
|
|
||||||
|
return tuple(parsed_overrides)
|
||||||
|
|
||||||
|
|
||||||
|
def apply_overrides(config, raw_overrides):
|
||||||
|
'''
|
||||||
|
Given a configuration dict and a sequence of configuration file override strings in the form of
|
||||||
|
"section.option=value", parse each override and set it the configuration dict.
|
||||||
|
'''
|
||||||
|
overrides = parse_overrides(raw_overrides)
|
||||||
|
|
||||||
|
for (keys, value) in overrides:
|
||||||
|
set_values(config, keys, value)
|
1269
borgmatic/config/schema.yaml
Normal file
1269
borgmatic/config/schema.yaml
Normal file
File diff suppressed because it is too large
Load Diff
190
borgmatic/config/validate.py
Normal file
190
borgmatic/config/validate.py
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
import pkg_resources
|
||||||
|
import ruamel.yaml
|
||||||
|
|
||||||
|
from borgmatic.config import environment, load, normalize, override
|
||||||
|
|
||||||
|
|
||||||
|
def schema_filename():
|
||||||
|
'''
|
||||||
|
Path to the installed YAML configuration schema file, used to validate and parse the
|
||||||
|
configuration.
|
||||||
|
'''
|
||||||
|
return pkg_resources.resource_filename('borgmatic', 'config/schema.yaml')
|
||||||
|
|
||||||
|
|
||||||
|
def format_json_error_path_element(path_element):
|
||||||
|
'''
|
||||||
|
Given a path element into a JSON data structure, format it for display as a string.
|
||||||
|
'''
|
||||||
|
if isinstance(path_element, int):
|
||||||
|
return str('[{}]'.format(path_element))
|
||||||
|
|
||||||
|
return str('.{}'.format(path_element))
|
||||||
|
|
||||||
|
|
||||||
|
def format_json_error(error):
|
||||||
|
'''
|
||||||
|
Given an instance of jsonschema.exceptions.ValidationError, format it for display as a string.
|
||||||
|
'''
|
||||||
|
if not error.path:
|
||||||
|
return 'At the top level: {}'.format(error.message)
|
||||||
|
|
||||||
|
formatted_path = ''.join(format_json_error_path_element(element) for element in error.path)
|
||||||
|
return "At '{}': {}".format(formatted_path.lstrip('.'), error.message)
|
||||||
|
|
||||||
|
|
||||||
|
class Validation_error(ValueError):
|
||||||
|
'''
|
||||||
|
A collection of error messages generated when attempting to validate a particular
|
||||||
|
configuration file.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, config_filename, errors):
|
||||||
|
'''
|
||||||
|
Given a configuration filename path and a sequence of string error messages, create a
|
||||||
|
Validation_error.
|
||||||
|
'''
|
||||||
|
self.config_filename = config_filename
|
||||||
|
self.errors = errors
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
'''
|
||||||
|
Render a validation error as a user-facing string.
|
||||||
|
'''
|
||||||
|
return 'An error occurred while parsing a configuration file at {}:\n'.format(
|
||||||
|
self.config_filename
|
||||||
|
) + '\n'.join(error for error in self.errors)
|
||||||
|
|
||||||
|
|
||||||
|
def apply_logical_validation(config_filename, parsed_configuration):
|
||||||
|
'''
|
||||||
|
Given a parsed and schematically valid configuration as a data structure of nested dicts (see
|
||||||
|
below), run through any additional logical validation checks. If there are any such validation
|
||||||
|
problems, raise a Validation_error.
|
||||||
|
'''
|
||||||
|
location_repositories = parsed_configuration.get('location', {}).get('repositories')
|
||||||
|
check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', [])
|
||||||
|
for repository in check_repositories:
|
||||||
|
if repository not in location_repositories:
|
||||||
|
raise Validation_error(
|
||||||
|
config_filename,
|
||||||
|
(
|
||||||
|
'Unknown repository in the "consistency" section\'s "check_repositories": {}'.format(
|
||||||
|
repository
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_configuration(config_filename, schema_filename, overrides=None, resolve_env=True):
|
||||||
|
'''
|
||||||
|
Given the path to a config filename in YAML format, the path to a schema filename in a YAML
|
||||||
|
rendition of JSON Schema format, a sequence of configuration file override strings in the form
|
||||||
|
of "section.option=value", return the parsed configuration as a data structure of nested dicts
|
||||||
|
and lists corresponding to the schema. Example return value:
|
||||||
|
|
||||||
|
{'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'},
|
||||||
|
'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}}
|
||||||
|
|
||||||
|
Also return a sequence of logging.LogRecord instances containing any warnings about the
|
||||||
|
configuration.
|
||||||
|
|
||||||
|
Raise FileNotFoundError if the file does not exist, PermissionError if the user does not
|
||||||
|
have permissions to read the file, or Validation_error if the config does not match the schema.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
config = load.load_configuration(config_filename)
|
||||||
|
schema = load.load_configuration(schema_filename)
|
||||||
|
except (ruamel.yaml.error.YAMLError, RecursionError) as error:
|
||||||
|
raise Validation_error(config_filename, (str(error),))
|
||||||
|
|
||||||
|
override.apply_overrides(config, overrides)
|
||||||
|
logs = normalize.normalize(config_filename, config)
|
||||||
|
if resolve_env:
|
||||||
|
environment.resolve_env_variables(config)
|
||||||
|
|
||||||
|
try:
|
||||||
|
validator = jsonschema.Draft7Validator(schema)
|
||||||
|
except AttributeError: # pragma: no cover
|
||||||
|
validator = jsonschema.Draft4Validator(schema)
|
||||||
|
validation_errors = tuple(validator.iter_errors(config))
|
||||||
|
|
||||||
|
if validation_errors:
|
||||||
|
raise Validation_error(
|
||||||
|
config_filename, tuple(format_json_error(error) for error in validation_errors)
|
||||||
|
)
|
||||||
|
|
||||||
|
apply_logical_validation(config_filename, config)
|
||||||
|
|
||||||
|
return config, logs
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_repository_path(repository):
|
||||||
|
'''
|
||||||
|
Given a repository path, return the absolute path of it (for local repositories).
|
||||||
|
'''
|
||||||
|
# A colon in the repository indicates it's a remote repository. Bail.
|
||||||
|
if ':' in repository:
|
||||||
|
return repository
|
||||||
|
|
||||||
|
return os.path.abspath(repository)
|
||||||
|
|
||||||
|
|
||||||
|
def repositories_match(first, second):
|
||||||
|
'''
|
||||||
|
Given two repository paths (relative and/or absolute), return whether they match.
|
||||||
|
'''
|
||||||
|
return normalize_repository_path(first) == normalize_repository_path(second)
|
||||||
|
|
||||||
|
|
||||||
|
def guard_configuration_contains_repository(repository, configurations):
|
||||||
|
'''
|
||||||
|
Given a repository path and a dict mapping from config filename to corresponding parsed config
|
||||||
|
dict, ensure that the repository is declared exactly once in all of the configurations. If no
|
||||||
|
repository is given, skip this check.
|
||||||
|
|
||||||
|
Raise ValueError if the repository is not found in a configuration, or is declared multiple
|
||||||
|
times.
|
||||||
|
'''
|
||||||
|
if not repository:
|
||||||
|
return
|
||||||
|
|
||||||
|
count = len(
|
||||||
|
tuple(
|
||||||
|
config_repository
|
||||||
|
for config in configurations.values()
|
||||||
|
for config_repository in config['location']['repositories']
|
||||||
|
if repositories_match(repository, config_repository)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if count == 0:
|
||||||
|
raise ValueError('Repository {} not found in configuration files'.format(repository))
|
||||||
|
if count > 1:
|
||||||
|
raise ValueError('Repository {} found in multiple configuration files'.format(repository))
|
||||||
|
|
||||||
|
|
||||||
|
def guard_single_repository_selected(repository, configurations):
|
||||||
|
'''
|
||||||
|
Given a repository path and a dict mapping from config filename to corresponding parsed config
|
||||||
|
dict, ensure either a single repository exists across all configuration files or a repository
|
||||||
|
path was given.
|
||||||
|
'''
|
||||||
|
if repository:
|
||||||
|
return
|
||||||
|
|
||||||
|
count = len(
|
||||||
|
tuple(
|
||||||
|
config_repository
|
||||||
|
for config in configurations.values()
|
||||||
|
for config_repository in config['location']['repositories']
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if count != 1:
|
||||||
|
raise ValueError(
|
||||||
|
"Can't determine which repository to use. Use --repository to disambiguate"
|
||||||
|
)
|
305
borgmatic/execute.py
Normal file
305
borgmatic/execute.py
Normal file
|
@ -0,0 +1,305 @@
|
||||||
|
import collections
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import select
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
ERROR_OUTPUT_MAX_LINE_COUNT = 25
|
||||||
|
BORG_ERROR_EXIT_CODE = 2
|
||||||
|
|
||||||
|
|
||||||
|
def exit_code_indicates_error(process, exit_code, borg_local_path=None):
|
||||||
|
'''
|
||||||
|
Return True if the given exit code from running a command corresponds to an error. If a Borg
|
||||||
|
local path is given and matches the process' command, then treat exit code 1 as a warning
|
||||||
|
instead of an error.
|
||||||
|
'''
|
||||||
|
if exit_code is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
command = process.args.split(' ') if isinstance(process.args, str) else process.args
|
||||||
|
|
||||||
|
if borg_local_path and command[0] == borg_local_path:
|
||||||
|
return bool(exit_code < 0 or exit_code >= BORG_ERROR_EXIT_CODE)
|
||||||
|
|
||||||
|
return bool(exit_code != 0)
|
||||||
|
|
||||||
|
|
||||||
|
def command_for_process(process):
|
||||||
|
'''
|
||||||
|
Given a process as an instance of subprocess.Popen, return the command string that was used to
|
||||||
|
invoke it.
|
||||||
|
'''
|
||||||
|
return process.args if isinstance(process.args, str) else ' '.join(process.args)
|
||||||
|
|
||||||
|
|
||||||
|
def output_buffer_for_process(process, exclude_stdouts):
|
||||||
|
'''
|
||||||
|
Given a process as an instance of subprocess.Popen and a sequence of stdouts to exclude, return
|
||||||
|
either the process's stdout or stderr. The idea is that if stdout is excluded for a process, we
|
||||||
|
still have stderr to log.
|
||||||
|
'''
|
||||||
|
return process.stderr if process.stdout in exclude_stdouts else process.stdout
|
||||||
|
|
||||||
|
|
||||||
|
def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
|
||||||
|
'''
|
||||||
|
Given a sequence of subprocess.Popen() instances for multiple processes, log the output for each
|
||||||
|
process with the requested log level. Additionally, raise a CalledProcessError if a process
|
||||||
|
exits with an error (or a warning for exit code 1, if that process does not match the Borg local
|
||||||
|
path).
|
||||||
|
|
||||||
|
If output log level is None, then instead of logging, capture output for each process and return
|
||||||
|
it as a dict from the process to its output.
|
||||||
|
|
||||||
|
For simplicity, it's assumed that the output buffer for each process is its stdout. But if any
|
||||||
|
stdouts are given to exclude, then for any matching processes, log from their stderr instead.
|
||||||
|
|
||||||
|
Note that stdout for a process can be None if output is intentionally not captured. In which
|
||||||
|
case it won't be logged.
|
||||||
|
'''
|
||||||
|
# Map from output buffer to sequence of last lines.
|
||||||
|
buffer_last_lines = collections.defaultdict(list)
|
||||||
|
process_for_output_buffer = {
|
||||||
|
output_buffer_for_process(process, exclude_stdouts): process
|
||||||
|
for process in processes
|
||||||
|
if process.stdout or process.stderr
|
||||||
|
}
|
||||||
|
output_buffers = list(process_for_output_buffer.keys())
|
||||||
|
captured_outputs = collections.defaultdict(list)
|
||||||
|
still_running = True
|
||||||
|
|
||||||
|
# Log output for each process until they all exit.
|
||||||
|
while True:
|
||||||
|
if output_buffers:
|
||||||
|
(ready_buffers, _, _) = select.select(output_buffers, [], [])
|
||||||
|
|
||||||
|
for ready_buffer in ready_buffers:
|
||||||
|
ready_process = process_for_output_buffer.get(ready_buffer)
|
||||||
|
|
||||||
|
# The "ready" process has exited, but it might be a pipe destination with other
|
||||||
|
# processes (pipe sources) waiting to be read from. So as a measure to prevent
|
||||||
|
# hangs, vent all processes when one exits.
|
||||||
|
if ready_process and ready_process.poll() is not None:
|
||||||
|
for other_process in processes:
|
||||||
|
if (
|
||||||
|
other_process.poll() is None
|
||||||
|
and other_process.stdout
|
||||||
|
and other_process.stdout not in output_buffers
|
||||||
|
):
|
||||||
|
# Add the process's output to output_buffers to ensure it'll get read.
|
||||||
|
output_buffers.append(other_process.stdout)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
line = ready_buffer.readline().rstrip().decode()
|
||||||
|
if not line or not ready_process:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Keep the last few lines of output in case the process errors, and we need the output for
|
||||||
|
# the exception below.
|
||||||
|
last_lines = buffer_last_lines[ready_buffer]
|
||||||
|
last_lines.append(line)
|
||||||
|
if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT:
|
||||||
|
last_lines.pop(0)
|
||||||
|
|
||||||
|
if output_log_level is None:
|
||||||
|
captured_outputs[ready_process].append(line)
|
||||||
|
else:
|
||||||
|
logger.log(output_log_level, line)
|
||||||
|
|
||||||
|
if not still_running:
|
||||||
|
break
|
||||||
|
|
||||||
|
still_running = False
|
||||||
|
|
||||||
|
for process in processes:
|
||||||
|
exit_code = process.poll() if output_buffers else process.wait()
|
||||||
|
|
||||||
|
if exit_code is None:
|
||||||
|
still_running = True
|
||||||
|
|
||||||
|
# If any process errors, then raise accordingly.
|
||||||
|
if exit_code_indicates_error(process, exit_code, borg_local_path):
|
||||||
|
# If an error occurs, include its output in the raised exception so that we don't
|
||||||
|
# inadvertently hide error output.
|
||||||
|
output_buffer = output_buffer_for_process(process, exclude_stdouts)
|
||||||
|
|
||||||
|
last_lines = buffer_last_lines[output_buffer] if output_buffer else []
|
||||||
|
if len(last_lines) == ERROR_OUTPUT_MAX_LINE_COUNT:
|
||||||
|
last_lines.insert(0, '...')
|
||||||
|
|
||||||
|
# Something has gone wrong. So vent each process' output buffer to prevent it from
|
||||||
|
# hanging. And then kill the process.
|
||||||
|
for other_process in processes:
|
||||||
|
if other_process.poll() is None:
|
||||||
|
other_process.stdout.read(0)
|
||||||
|
other_process.kill()
|
||||||
|
|
||||||
|
raise subprocess.CalledProcessError(
|
||||||
|
exit_code, command_for_process(process), '\n'.join(last_lines)
|
||||||
|
)
|
||||||
|
|
||||||
|
if captured_outputs:
|
||||||
|
return {
|
||||||
|
process: '\n'.join(output_lines) for process, output_lines in captured_outputs.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def log_command(full_command, input_file=None, output_file=None):
|
||||||
|
'''
|
||||||
|
Log the given command (a sequence of command/argument strings), along with its input/output file
|
||||||
|
paths.
|
||||||
|
'''
|
||||||
|
logger.debug(
|
||||||
|
' '.join(full_command)
|
||||||
|
+ (' < {}'.format(getattr(input_file, 'name', '')) if input_file else '')
|
||||||
|
+ (' > {}'.format(getattr(output_file, 'name', '')) if output_file else '')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# An sentinel passed as an output file to execute_command() to indicate that the command's output
|
||||||
|
# should be allowed to flow through to stdout without being captured for logging. Useful for
|
||||||
|
# commands with interactive prompts or those that mess directly with the console.
|
||||||
|
DO_NOT_CAPTURE = object()
|
||||||
|
|
||||||
|
|
||||||
|
def execute_command(
|
||||||
|
full_command,
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
output_file=None,
|
||||||
|
input_file=None,
|
||||||
|
shell=False,
|
||||||
|
extra_environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
borg_local_path=None,
|
||||||
|
run_to_completion=True,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Execute the given command (a sequence of command/argument strings) and log its output at the
|
||||||
|
given log level. If an open output file object is given, then write stdout to the file and only
|
||||||
|
log stderr. If an open input file object is given, then read stdin from the file. If shell is
|
||||||
|
True, execute the command within a shell. If an extra environment dict is given, then use it to
|
||||||
|
augment the current environment, and pass the result into the command. If a working directory is
|
||||||
|
given, use that as the present working directory when running the command. If a Borg local path
|
||||||
|
is given, and the command matches it (regardless of arguments), treat exit code 1 as a warning
|
||||||
|
instead of an error. If run to completion is False, then return the process for the command
|
||||||
|
without executing it to completion.
|
||||||
|
|
||||||
|
Raise subprocesses.CalledProcessError if an error occurs while running the command.
|
||||||
|
'''
|
||||||
|
log_command(full_command, input_file, output_file)
|
||||||
|
environment = {**os.environ, **extra_environment} if extra_environment else None
|
||||||
|
do_not_capture = bool(output_file is DO_NOT_CAPTURE)
|
||||||
|
command = ' '.join(full_command) if shell else full_command
|
||||||
|
|
||||||
|
process = subprocess.Popen(
|
||||||
|
command,
|
||||||
|
stdin=input_file,
|
||||||
|
stdout=None if do_not_capture else (output_file or subprocess.PIPE),
|
||||||
|
stderr=None if do_not_capture else (subprocess.PIPE if output_file else subprocess.STDOUT),
|
||||||
|
shell=shell,
|
||||||
|
env=environment,
|
||||||
|
cwd=working_directory,
|
||||||
|
)
|
||||||
|
if not run_to_completion:
|
||||||
|
return process
|
||||||
|
|
||||||
|
log_outputs(
|
||||||
|
(process,), (input_file, output_file), output_log_level, borg_local_path=borg_local_path
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def execute_command_and_capture_output(
|
||||||
|
full_command, capture_stderr=False, shell=False, extra_environment=None, working_directory=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Execute the given command (a sequence of command/argument strings), capturing and returning its
|
||||||
|
output (stdout). If capture stderr is True, then capture and return stderr in addition to
|
||||||
|
stdout. If shell is True, execute the command within a shell. If an extra environment dict is
|
||||||
|
given, then use it to augment the current environment, and pass the result into the command. If
|
||||||
|
a working directory is given, use that as the present working directory when running the command.
|
||||||
|
|
||||||
|
Raise subprocesses.CalledProcessError if an error occurs while running the command.
|
||||||
|
'''
|
||||||
|
log_command(full_command)
|
||||||
|
environment = {**os.environ, **extra_environment} if extra_environment else None
|
||||||
|
command = ' '.join(full_command) if shell else full_command
|
||||||
|
|
||||||
|
output = subprocess.check_output(
|
||||||
|
command,
|
||||||
|
stderr=subprocess.STDOUT if capture_stderr else None,
|
||||||
|
shell=shell,
|
||||||
|
env=environment,
|
||||||
|
cwd=working_directory,
|
||||||
|
)
|
||||||
|
|
||||||
|
return output.decode() if output is not None else None
|
||||||
|
|
||||||
|
|
||||||
|
def execute_command_with_processes(
|
||||||
|
full_command,
|
||||||
|
processes,
|
||||||
|
output_log_level=logging.INFO,
|
||||||
|
output_file=None,
|
||||||
|
input_file=None,
|
||||||
|
shell=False,
|
||||||
|
extra_environment=None,
|
||||||
|
working_directory=None,
|
||||||
|
borg_local_path=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Execute the given command (a sequence of command/argument strings) and log its output at the
|
||||||
|
given log level. Simultaneously, continue to poll one or more active processes so that they
|
||||||
|
run as well. This is useful, for instance, for processes that are streaming output to a named
|
||||||
|
pipe that the given command is consuming from.
|
||||||
|
|
||||||
|
If an open output file object is given, then write stdout to the file and only log stderr. But
|
||||||
|
if output log level is None, instead suppress logging and return the captured output for (only)
|
||||||
|
the given command. If an open input file object is given, then read stdin from the file. If
|
||||||
|
shell is True, execute the command within a shell. If an extra environment dict is given, then
|
||||||
|
use it to augment the current environment, and pass the result into the command. If a working
|
||||||
|
directory is given, use that as the present working directory when running the command. If a
|
||||||
|
Borg local path is given, then for any matching command or process (regardless of arguments),
|
||||||
|
treat exit code 1 as a warning instead of an error.
|
||||||
|
|
||||||
|
Raise subprocesses.CalledProcessError if an error occurs while running the command or in the
|
||||||
|
upstream process.
|
||||||
|
'''
|
||||||
|
log_command(full_command, input_file, output_file)
|
||||||
|
environment = {**os.environ, **extra_environment} if extra_environment else None
|
||||||
|
do_not_capture = bool(output_file is DO_NOT_CAPTURE)
|
||||||
|
command = ' '.join(full_command) if shell else full_command
|
||||||
|
|
||||||
|
try:
|
||||||
|
command_process = subprocess.Popen(
|
||||||
|
command,
|
||||||
|
stdin=input_file,
|
||||||
|
stdout=None if do_not_capture else (output_file or subprocess.PIPE),
|
||||||
|
stderr=None
|
||||||
|
if do_not_capture
|
||||||
|
else (subprocess.PIPE if output_file else subprocess.STDOUT),
|
||||||
|
shell=shell,
|
||||||
|
env=environment,
|
||||||
|
cwd=working_directory,
|
||||||
|
)
|
||||||
|
except (subprocess.CalledProcessError, OSError):
|
||||||
|
# Something has gone wrong. So vent each process' output buffer to prevent it from hanging.
|
||||||
|
# And then kill the process.
|
||||||
|
for process in processes:
|
||||||
|
if process.poll() is None:
|
||||||
|
process.stdout.read(0)
|
||||||
|
process.kill()
|
||||||
|
raise
|
||||||
|
|
||||||
|
captured_outputs = log_outputs(
|
||||||
|
tuple(processes) + (command_process,),
|
||||||
|
(input_file, output_file),
|
||||||
|
output_log_level,
|
||||||
|
borg_local_path=borg_local_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
if output_log_level is None:
|
||||||
|
return captured_outputs.get(command_process)
|
0
borgmatic/hooks/__init__.py
Normal file
0
borgmatic/hooks/__init__.py
Normal file
102
borgmatic/hooks/command.py
Normal file
102
borgmatic/hooks/command.py
Normal file
|
@ -0,0 +1,102 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
from borgmatic import execute
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
SOFT_FAIL_EXIT_CODE = 75
|
||||||
|
|
||||||
|
|
||||||
|
def interpolate_context(config_filename, hook_description, command, context):
|
||||||
|
'''
|
||||||
|
Given a config filename, a hook description, a single hook command, and a dict of context
|
||||||
|
names/values, interpolate the values by "{name}" into the command and return the result.
|
||||||
|
'''
|
||||||
|
for name, value in context.items():
|
||||||
|
command = command.replace('{%s}' % name, str(value))
|
||||||
|
|
||||||
|
for unsupported_variable in re.findall(r'{\w+}', command):
|
||||||
|
logger.warning(
|
||||||
|
f"{config_filename}: Variable '{unsupported_variable}' is not supported in {hook_description} hook"
|
||||||
|
)
|
||||||
|
|
||||||
|
return command
|
||||||
|
|
||||||
|
|
||||||
|
def execute_hook(commands, umask, config_filename, description, dry_run, **context):
|
||||||
|
'''
|
||||||
|
Given a list of hook commands to execute, a umask to execute with (or None), a config filename,
|
||||||
|
a hook description, and whether this is a dry run, run the given commands. Or, don't run them
|
||||||
|
if this is a dry run.
|
||||||
|
|
||||||
|
The context contains optional values interpolated by name into the hook commands.
|
||||||
|
|
||||||
|
Raise ValueError if the umask cannot be parsed.
|
||||||
|
Raise subprocesses.CalledProcessError if an error occurs in a hook.
|
||||||
|
'''
|
||||||
|
if not commands:
|
||||||
|
logger.debug('{}: No commands to run for {} hook'.format(config_filename, description))
|
||||||
|
return
|
||||||
|
|
||||||
|
dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
|
||||||
|
|
||||||
|
context['configuration_filename'] = config_filename
|
||||||
|
commands = [
|
||||||
|
interpolate_context(config_filename, description, command, context) for command in commands
|
||||||
|
]
|
||||||
|
|
||||||
|
if len(commands) == 1:
|
||||||
|
logger.info(
|
||||||
|
'{}: Running command for {} hook{}'.format(config_filename, description, dry_run_label)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
'{}: Running {} commands for {} hook{}'.format(
|
||||||
|
config_filename, len(commands), description, dry_run_label
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if umask:
|
||||||
|
parsed_umask = int(str(umask), 8)
|
||||||
|
logger.debug('{}: Set hook umask to {}'.format(config_filename, oct(parsed_umask)))
|
||||||
|
original_umask = os.umask(parsed_umask)
|
||||||
|
else:
|
||||||
|
original_umask = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
for command in commands:
|
||||||
|
if not dry_run:
|
||||||
|
execute.execute_command(
|
||||||
|
[command],
|
||||||
|
output_log_level=logging.ERROR
|
||||||
|
if description == 'on-error'
|
||||||
|
else logging.WARNING,
|
||||||
|
shell=True,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
if original_umask:
|
||||||
|
os.umask(original_umask)
|
||||||
|
|
||||||
|
|
||||||
|
def considered_soft_failure(config_filename, error):
|
||||||
|
'''
|
||||||
|
Given a configuration filename and an exception object, return whether the exception object
|
||||||
|
represents a subprocess.CalledProcessError with a return code of SOFT_FAIL_EXIT_CODE. If so,
|
||||||
|
that indicates that the error is a "soft failure", and should not result in an error.
|
||||||
|
'''
|
||||||
|
exit_code = getattr(error, 'returncode', None)
|
||||||
|
if exit_code is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if exit_code == SOFT_FAIL_EXIT_CODE:
|
||||||
|
logger.info(
|
||||||
|
'{}: Command hook exited with soft failure exit code ({}); skipping remaining actions'.format(
|
||||||
|
config_filename, SOFT_FAIL_EXIT_CODE
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
65
borgmatic/hooks/cronhub.py
Normal file
65
borgmatic/hooks/cronhub.py
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from borgmatic.hooks import monitor
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
MONITOR_STATE_TO_CRONHUB = {
|
||||||
|
monitor.State.START: 'start',
|
||||||
|
monitor.State.FINISH: 'finish',
|
||||||
|
monitor.State.FAIL: 'fail',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_monitor(
|
||||||
|
ping_url, config_filename, monitoring_log_level, dry_run
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
No initialization is necessary for this monitor.
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
|
'''
|
||||||
|
Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration
|
||||||
|
filename in any log entries. If this is a dry run, then don't actually ping anything.
|
||||||
|
'''
|
||||||
|
if state not in MONITOR_STATE_TO_CRONHUB:
|
||||||
|
logger.debug(
|
||||||
|
f'{config_filename}: Ignoring unsupported monitoring {state.name.lower()} in Cronhub hook'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
|
formatted_state = '/{}/'.format(MONITOR_STATE_TO_CRONHUB[state])
|
||||||
|
ping_url = (
|
||||||
|
hook_config['ping_url']
|
||||||
|
.replace('/start/', formatted_state)
|
||||||
|
.replace('/ping/', formatted_state)
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
'{}: Pinging Cronhub {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
||||||
|
)
|
||||||
|
logger.debug('{}: Using Cronhub ping URL {}'.format(config_filename, ping_url))
|
||||||
|
|
||||||
|
if not dry_run:
|
||||||
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
|
try:
|
||||||
|
response = requests.get(ping_url)
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: Cronhub error: {error}')
|
||||||
|
|
||||||
|
|
||||||
|
def destroy_monitor(
|
||||||
|
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
No destruction is necessary for this monitor.
|
||||||
|
'''
|
||||||
|
pass
|
60
borgmatic/hooks/cronitor.py
Normal file
60
borgmatic/hooks/cronitor.py
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from borgmatic.hooks import monitor
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
MONITOR_STATE_TO_CRONITOR = {
|
||||||
|
monitor.State.START: 'run',
|
||||||
|
monitor.State.FINISH: 'complete',
|
||||||
|
monitor.State.FAIL: 'fail',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_monitor(
|
||||||
|
ping_url, config_filename, monitoring_log_level, dry_run
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
No initialization is necessary for this monitor.
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
|
'''
|
||||||
|
Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration
|
||||||
|
filename in any log entries. If this is a dry run, then don't actually ping anything.
|
||||||
|
'''
|
||||||
|
if state not in MONITOR_STATE_TO_CRONITOR:
|
||||||
|
logger.debug(
|
||||||
|
f'{config_filename}: Ignoring unsupported monitoring {state.name.lower()} in Cronitor hook'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
|
ping_url = '{}/{}'.format(hook_config['ping_url'], MONITOR_STATE_TO_CRONITOR[state])
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
'{}: Pinging Cronitor {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
||||||
|
)
|
||||||
|
logger.debug('{}: Using Cronitor ping URL {}'.format(config_filename, ping_url))
|
||||||
|
|
||||||
|
if not dry_run:
|
||||||
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
|
try:
|
||||||
|
response = requests.get(ping_url)
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: Cronitor error: {error}')
|
||||||
|
|
||||||
|
|
||||||
|
def destroy_monitor(
|
||||||
|
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
No destruction is necessary for this monitor.
|
||||||
|
'''
|
||||||
|
pass
|
86
borgmatic/hooks/dispatch.py
Normal file
86
borgmatic/hooks/dispatch.py
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from borgmatic.hooks import (
|
||||||
|
cronhub,
|
||||||
|
cronitor,
|
||||||
|
healthchecks,
|
||||||
|
mongodb,
|
||||||
|
mysql,
|
||||||
|
ntfy,
|
||||||
|
pagerduty,
|
||||||
|
postgresql,
|
||||||
|
sqlite,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
HOOK_NAME_TO_MODULE = {
|
||||||
|
'cronhub': cronhub,
|
||||||
|
'cronitor': cronitor,
|
||||||
|
'healthchecks': healthchecks,
|
||||||
|
'mongodb_databases': mongodb,
|
||||||
|
'mysql_databases': mysql,
|
||||||
|
'ntfy': ntfy,
|
||||||
|
'pagerduty': pagerduty,
|
||||||
|
'postgresql_databases': postgresql,
|
||||||
|
'sqlite_databases': sqlite,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
|
||||||
|
'''
|
||||||
|
Given the hooks configuration dict and a prefix to use in log entries, call the requested
|
||||||
|
function of the Python module corresponding to the given hook name. Supply that call with the
|
||||||
|
configuration for this hook (if any), the log prefix, and any given args and kwargs. Return any
|
||||||
|
return value.
|
||||||
|
|
||||||
|
Raise ValueError if the hook name is unknown.
|
||||||
|
Raise AttributeError if the function name is not found in the module.
|
||||||
|
Raise anything else that the called function raises.
|
||||||
|
'''
|
||||||
|
config = hooks.get(hook_name, {})
|
||||||
|
|
||||||
|
try:
|
||||||
|
module = HOOK_NAME_TO_MODULE[hook_name]
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError('Unknown hook name: {}'.format(hook_name))
|
||||||
|
|
||||||
|
logger.debug('{}: Calling {} hook function {}'.format(log_prefix, hook_name, function_name))
|
||||||
|
return getattr(module, function_name)(config, log_prefix, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
|
||||||
|
'''
|
||||||
|
Given the hooks configuration dict and a prefix to use in log entries, call the requested
|
||||||
|
function of the Python module corresponding to each given hook name. Supply each call with the
|
||||||
|
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return
|
||||||
|
values into a dict from hook name to return value.
|
||||||
|
|
||||||
|
If the hook name is not present in the hooks configuration, then don't call the function for it
|
||||||
|
and omit it from the return values.
|
||||||
|
|
||||||
|
Raise ValueError if the hook name is unknown.
|
||||||
|
Raise AttributeError if the function name is not found in the module.
|
||||||
|
Raise anything else that a called function raises. An error stops calls to subsequent functions.
|
||||||
|
'''
|
||||||
|
return {
|
||||||
|
hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs)
|
||||||
|
for hook_name in hook_names
|
||||||
|
if hooks.get(hook_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def call_hooks_even_if_unconfigured(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
|
||||||
|
'''
|
||||||
|
Given the hooks configuration dict and a prefix to use in log entries, call the requested
|
||||||
|
function of the Python module corresponding to each given hook name. Supply each call with the
|
||||||
|
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return
|
||||||
|
values into a dict from hook name to return value.
|
||||||
|
|
||||||
|
Raise AttributeError if the function name is not found in the module.
|
||||||
|
Raise anything else that a called function raises. An error stops calls to subsequent functions.
|
||||||
|
'''
|
||||||
|
return {
|
||||||
|
hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs)
|
||||||
|
for hook_name in hook_names
|
||||||
|
}
|
81
borgmatic/hooks/dump.py
Normal file
81
borgmatic/hooks/dump.py
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from borgmatic.borg.state import DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DATABASE_HOOK_NAMES = (
|
||||||
|
'postgresql_databases',
|
||||||
|
'mysql_databases',
|
||||||
|
'mongodb_databases',
|
||||||
|
'sqlite_databases',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_database_dump_path(borgmatic_source_directory, database_hook_name):
|
||||||
|
'''
|
||||||
|
Given a borgmatic source directory (or None) and a database hook name, construct a database dump
|
||||||
|
path.
|
||||||
|
'''
|
||||||
|
if not borgmatic_source_directory:
|
||||||
|
borgmatic_source_directory = DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||||
|
|
||||||
|
return os.path.join(borgmatic_source_directory, database_hook_name)
|
||||||
|
|
||||||
|
|
||||||
|
def make_database_dump_filename(dump_path, name, hostname=None):
|
||||||
|
'''
|
||||||
|
Based on the given dump directory path, database name, and hostname, return a filename to use
|
||||||
|
for the database dump. The hostname defaults to localhost.
|
||||||
|
|
||||||
|
Raise ValueError if the database name is invalid.
|
||||||
|
'''
|
||||||
|
if os.path.sep in name:
|
||||||
|
raise ValueError('Invalid database name {}'.format(name))
|
||||||
|
|
||||||
|
return os.path.join(os.path.expanduser(dump_path), hostname or 'localhost', name)
|
||||||
|
|
||||||
|
|
||||||
|
def create_parent_directory_for_dump(dump_path):
|
||||||
|
'''
|
||||||
|
Create a directory to contain the given dump path.
|
||||||
|
'''
|
||||||
|
os.makedirs(os.path.dirname(dump_path), mode=0o700, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
def create_named_pipe_for_dump(dump_path):
|
||||||
|
'''
|
||||||
|
Create a named pipe at the given dump path.
|
||||||
|
'''
|
||||||
|
create_parent_directory_for_dump(dump_path)
|
||||||
|
os.mkfifo(dump_path, mode=0o600)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_database_dumps(dump_path, database_type_name, log_prefix, dry_run):
|
||||||
|
'''
|
||||||
|
Remove all database dumps in the given dump directory path (including the directory itself). If
|
||||||
|
this is a dry run, then don't actually remove anything.
|
||||||
|
'''
|
||||||
|
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
'{}: Removing {} database dumps{}'.format(log_prefix, database_type_name, dry_run_label)
|
||||||
|
)
|
||||||
|
|
||||||
|
expanded_path = os.path.expanduser(dump_path)
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.path.exists(expanded_path):
|
||||||
|
shutil.rmtree(expanded_path)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_glob_patterns_to_borg_patterns(patterns):
|
||||||
|
'''
|
||||||
|
Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
|
||||||
|
patterns like "sh:etc/*".
|
||||||
|
'''
|
||||||
|
return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns]
|
147
borgmatic/hooks/healthchecks.py
Normal file
147
borgmatic/hooks/healthchecks.py
Normal file
|
@ -0,0 +1,147 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from borgmatic.hooks import monitor
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
MONITOR_STATE_TO_HEALTHCHECKS = {
|
||||||
|
monitor.State.START: 'start',
|
||||||
|
monitor.State.FINISH: None, # Healthchecks doesn't append to the URL for the finished state.
|
||||||
|
monitor.State.FAIL: 'fail',
|
||||||
|
monitor.State.LOG: 'log',
|
||||||
|
}
|
||||||
|
|
||||||
|
PAYLOAD_TRUNCATION_INDICATOR = '...\n'
|
||||||
|
DEFAULT_PING_BODY_LIMIT_BYTES = 100000
|
||||||
|
|
||||||
|
|
||||||
|
class Forgetful_buffering_handler(logging.Handler):
|
||||||
|
'''
|
||||||
|
A buffering log handler that stores log messages in memory, and throws away messages (oldest
|
||||||
|
first) once a particular capacity in bytes is reached. But if the given byte capacity is zero,
|
||||||
|
don't throw away any messages.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, byte_capacity, log_level):
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
self.byte_capacity = byte_capacity
|
||||||
|
self.byte_count = 0
|
||||||
|
self.buffer = []
|
||||||
|
self.forgot = False
|
||||||
|
self.setLevel(log_level)
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
message = record.getMessage() + '\n'
|
||||||
|
self.byte_count += len(message)
|
||||||
|
self.buffer.append(message)
|
||||||
|
|
||||||
|
if not self.byte_capacity:
|
||||||
|
return
|
||||||
|
|
||||||
|
while self.byte_count > self.byte_capacity and self.buffer:
|
||||||
|
self.byte_count -= len(self.buffer[0])
|
||||||
|
self.buffer.pop(0)
|
||||||
|
self.forgot = True
|
||||||
|
|
||||||
|
|
||||||
|
def format_buffered_logs_for_payload():
|
||||||
|
'''
|
||||||
|
Get the handler previously added to the root logger, and slurp buffered logs out of it to
|
||||||
|
send to Healthchecks.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
buffering_handler = next(
|
||||||
|
handler
|
||||||
|
for handler in logging.getLogger().handlers
|
||||||
|
if isinstance(handler, Forgetful_buffering_handler)
|
||||||
|
)
|
||||||
|
except StopIteration:
|
||||||
|
# No handler means no payload.
|
||||||
|
return ''
|
||||||
|
|
||||||
|
payload = ''.join(message for message in buffering_handler.buffer)
|
||||||
|
|
||||||
|
if buffering_handler.forgot:
|
||||||
|
return PAYLOAD_TRUNCATION_INDICATOR + payload
|
||||||
|
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_run):
|
||||||
|
'''
|
||||||
|
Add a handler to the root logger that stores in memory the most recent logs emitted. That way,
|
||||||
|
we can send them all to Healthchecks upon a finish or failure state. But skip this if the
|
||||||
|
"send_logs" option is false.
|
||||||
|
'''
|
||||||
|
if hook_config.get('send_logs') is False:
|
||||||
|
return
|
||||||
|
|
||||||
|
ping_body_limit = max(
|
||||||
|
hook_config.get('ping_body_limit', DEFAULT_PING_BODY_LIMIT_BYTES)
|
||||||
|
- len(PAYLOAD_TRUNCATION_INDICATOR),
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
|
||||||
|
logging.getLogger().addHandler(
|
||||||
|
Forgetful_buffering_handler(ping_body_limit, monitoring_log_level)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
|
'''
|
||||||
|
Ping the configured Healthchecks URL or UUID, modified with the monitor.State. Use the given
|
||||||
|
configuration filename in any log entries, and log to Healthchecks with the giving log level.
|
||||||
|
If this is a dry run, then don't actually ping anything.
|
||||||
|
'''
|
||||||
|
ping_url = (
|
||||||
|
hook_config['ping_url']
|
||||||
|
if hook_config['ping_url'].startswith('http')
|
||||||
|
else 'https://hc-ping.com/{}'.format(hook_config['ping_url'])
|
||||||
|
)
|
||||||
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
|
|
||||||
|
if 'states' in hook_config and state.name.lower() not in hook_config['states']:
|
||||||
|
logger.info(
|
||||||
|
f'{config_filename}: Skipping Healthchecks {state.name.lower()} ping due to configured states'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
healthchecks_state = MONITOR_STATE_TO_HEALTHCHECKS.get(state)
|
||||||
|
if healthchecks_state:
|
||||||
|
ping_url = '{}/{}'.format(ping_url, healthchecks_state)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
'{}: Pinging Healthchecks {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
||||||
|
)
|
||||||
|
logger.debug('{}: Using Healthchecks ping URL {}'.format(config_filename, ping_url))
|
||||||
|
|
||||||
|
if state in (monitor.State.FINISH, monitor.State.FAIL, monitor.State.LOG):
|
||||||
|
payload = format_buffered_logs_for_payload()
|
||||||
|
else:
|
||||||
|
payload = ''
|
||||||
|
|
||||||
|
if not dry_run:
|
||||||
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
|
try:
|
||||||
|
response = requests.post(
|
||||||
|
ping_url, data=payload.encode('utf-8'), verify=hook_config.get('verify_tls', True)
|
||||||
|
)
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: Healthchecks error: {error}')
|
||||||
|
|
||||||
|
|
||||||
|
def destroy_monitor(hook_config, config_filename, monitoring_log_level, dry_run):
|
||||||
|
'''
|
||||||
|
Remove the monitor handler that was added to the root logger. This prevents the handler from
|
||||||
|
getting reused by other instances of this monitor.
|
||||||
|
'''
|
||||||
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
for handler in tuple(logger.handlers):
|
||||||
|
if isinstance(handler, Forgetful_buffering_handler):
|
||||||
|
logger.removeHandler(handler)
|
168
borgmatic/hooks/mongodb.py
Normal file
168
borgmatic/hooks/mongodb.py
Normal file
|
@ -0,0 +1,168 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from borgmatic.execute import execute_command, execute_command_with_processes
|
||||||
|
from borgmatic.hooks import dump
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def make_dump_path(location_config): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Make the dump path from the given location configuration and the name of this hook.
|
||||||
|
'''
|
||||||
|
return dump.make_database_dump_path(
|
||||||
|
location_config.get('borgmatic_source_directory'), 'mongodb_databases'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
|
'''
|
||||||
|
Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
|
||||||
|
dicts, one dict describing each database as per the configuration schema. Use the given log
|
||||||
|
prefix in any log entries. Use the given location configuration dict to construct the
|
||||||
|
destination path.
|
||||||
|
|
||||||
|
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||||
|
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||||
|
'''
|
||||||
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||||
|
|
||||||
|
logger.info('{}: Dumping MongoDB databases{}'.format(log_prefix, dry_run_label))
|
||||||
|
|
||||||
|
processes = []
|
||||||
|
for database in databases:
|
||||||
|
name = database['name']
|
||||||
|
dump_filename = dump.make_database_dump_filename(
|
||||||
|
make_dump_path(location_config), name, database.get('hostname')
|
||||||
|
)
|
||||||
|
dump_format = database.get('format', 'archive')
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
'{}: Dumping MongoDB database {} to {}{}'.format(
|
||||||
|
log_prefix, name, dump_filename, dry_run_label
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if dry_run:
|
||||||
|
continue
|
||||||
|
|
||||||
|
command = build_dump_command(database, dump_filename, dump_format)
|
||||||
|
|
||||||
|
if dump_format == 'directory':
|
||||||
|
dump.create_parent_directory_for_dump(dump_filename)
|
||||||
|
execute_command(command, shell=True)
|
||||||
|
else:
|
||||||
|
dump.create_named_pipe_for_dump(dump_filename)
|
||||||
|
processes.append(execute_command(command, shell=True, run_to_completion=False))
|
||||||
|
|
||||||
|
return processes
|
||||||
|
|
||||||
|
|
||||||
|
def build_dump_command(database, dump_filename, dump_format):
|
||||||
|
'''
|
||||||
|
Return the mongodump command from a single database configuration.
|
||||||
|
'''
|
||||||
|
all_databases = database['name'] == 'all'
|
||||||
|
command = ['mongodump']
|
||||||
|
if dump_format == 'directory':
|
||||||
|
command.extend(('--out', dump_filename))
|
||||||
|
if 'hostname' in database:
|
||||||
|
command.extend(('--host', database['hostname']))
|
||||||
|
if 'port' in database:
|
||||||
|
command.extend(('--port', str(database['port'])))
|
||||||
|
if 'username' in database:
|
||||||
|
command.extend(('--username', database['username']))
|
||||||
|
if 'password' in database:
|
||||||
|
command.extend(('--password', database['password']))
|
||||||
|
if 'authentication_database' in database:
|
||||||
|
command.extend(('--authenticationDatabase', database['authentication_database']))
|
||||||
|
if not all_databases:
|
||||||
|
command.extend(('--db', database['name']))
|
||||||
|
if 'options' in database:
|
||||||
|
command.extend(database['options'].split(' '))
|
||||||
|
if dump_format != 'directory':
|
||||||
|
command.extend(('--archive', '>', dump_filename))
|
||||||
|
return command
|
||||||
|
|
||||||
|
|
||||||
|
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Remove all database dump files for this hook regardless of the given databases. Use the log
|
||||||
|
prefix in any log entries. Use the given location configuration dict to construct the
|
||||||
|
destination path. If this is a dry run, then don't actually remove anything.
|
||||||
|
'''
|
||||||
|
dump.remove_database_dumps(make_dump_path(location_config), 'MongoDB', log_prefix, dry_run)
|
||||||
|
|
||||||
|
|
||||||
|
def make_database_dump_pattern(
|
||||||
|
databases, log_prefix, location_config, name=None
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict,
|
||||||
|
and a database name to match, return the corresponding glob patterns to match the database dump
|
||||||
|
in an archive.
|
||||||
|
'''
|
||||||
|
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*')
|
||||||
|
|
||||||
|
|
||||||
|
def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process):
|
||||||
|
'''
|
||||||
|
Restore the given MongoDB database from an extract stream. The database is supplied as a
|
||||||
|
one-element sequence containing a dict describing the database, as per the configuration schema.
|
||||||
|
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
|
||||||
|
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
|
||||||
|
output to consume.
|
||||||
|
|
||||||
|
If the extract process is None, then restore the dump from the filesystem rather than from an
|
||||||
|
extract stream.
|
||||||
|
'''
|
||||||
|
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
||||||
|
|
||||||
|
if len(database_config) != 1:
|
||||||
|
raise ValueError('The database configuration value is invalid')
|
||||||
|
|
||||||
|
database = database_config[0]
|
||||||
|
dump_filename = dump.make_database_dump_filename(
|
||||||
|
make_dump_path(location_config), database['name'], database.get('hostname')
|
||||||
|
)
|
||||||
|
restore_command = build_restore_command(extract_process, database, dump_filename)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
'{}: Restoring MongoDB database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
||||||
|
)
|
||||||
|
if dry_run:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
|
||||||
|
# if the restore paths don't exist in the archive.
|
||||||
|
execute_command_with_processes(
|
||||||
|
restore_command,
|
||||||
|
[extract_process] if extract_process else [],
|
||||||
|
output_log_level=logging.DEBUG,
|
||||||
|
input_file=extract_process.stdout if extract_process else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def build_restore_command(extract_process, database, dump_filename):
|
||||||
|
'''
|
||||||
|
Return the mongorestore command from a single database configuration.
|
||||||
|
'''
|
||||||
|
command = ['mongorestore']
|
||||||
|
if extract_process:
|
||||||
|
command.append('--archive')
|
||||||
|
else:
|
||||||
|
command.extend(('--dir', dump_filename))
|
||||||
|
if database['name'] != 'all':
|
||||||
|
command.extend(('--drop', '--db', database['name']))
|
||||||
|
if 'hostname' in database:
|
||||||
|
command.extend(('--host', database['hostname']))
|
||||||
|
if 'port' in database:
|
||||||
|
command.extend(('--port', str(database['port'])))
|
||||||
|
if 'username' in database:
|
||||||
|
command.extend(('--username', database['username']))
|
||||||
|
if 'password' in database:
|
||||||
|
command.extend(('--password', database['password']))
|
||||||
|
if 'authentication_database' in database:
|
||||||
|
command.extend(('--authenticationDatabase', database['authentication_database']))
|
||||||
|
if 'restore_options' in database:
|
||||||
|
command.extend(database['restore_options'].split(' '))
|
||||||
|
return command
|
10
borgmatic/hooks/monitor.py
Normal file
10
borgmatic/hooks/monitor.py
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
MONITOR_HOOK_NAMES = ('healthchecks', 'cronitor', 'cronhub', 'pagerduty', 'ntfy')
|
||||||
|
|
||||||
|
|
||||||
|
class State(Enum):
|
||||||
|
START = 1
|
||||||
|
FINISH = 2
|
||||||
|
FAIL = 3
|
||||||
|
LOG = 4
|
226
borgmatic/hooks/mysql.py
Normal file
226
borgmatic/hooks/mysql.py
Normal file
|
@ -0,0 +1,226 @@
|
||||||
|
import copy
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from borgmatic.execute import (
|
||||||
|
execute_command,
|
||||||
|
execute_command_and_capture_output,
|
||||||
|
execute_command_with_processes,
|
||||||
|
)
|
||||||
|
from borgmatic.hooks import dump
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def make_dump_path(location_config): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Make the dump path from the given location configuration and the name of this hook.
|
||||||
|
'''
|
||||||
|
return dump.make_database_dump_path(
|
||||||
|
location_config.get('borgmatic_source_directory'), 'mysql_databases'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
|
||||||
|
|
||||||
|
|
||||||
|
def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
|
||||||
|
'''
|
||||||
|
Given a requested database config, return the corresponding sequence of database names to dump.
|
||||||
|
In the case of "all", query for the names of databases on the configured host and return them,
|
||||||
|
excluding any system databases that will cause problems during restore.
|
||||||
|
'''
|
||||||
|
if database['name'] != 'all':
|
||||||
|
return (database['name'],)
|
||||||
|
if dry_run:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
show_command = (
|
||||||
|
('mysql',)
|
||||||
|
+ (tuple(database['list_options'].split(' ')) if 'list_options' in database else ())
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
|
||||||
|
+ (('--user', database['username']) if 'username' in database else ())
|
||||||
|
+ ('--skip-column-names', '--batch')
|
||||||
|
+ ('--execute', 'show schemas')
|
||||||
|
)
|
||||||
|
logger.debug(f'{log_prefix}: Querying for "all" MySQL databases to dump')
|
||||||
|
show_output = execute_command_and_capture_output(
|
||||||
|
show_command, extra_environment=extra_environment
|
||||||
|
)
|
||||||
|
|
||||||
|
return tuple(
|
||||||
|
show_name
|
||||||
|
for show_name in show_output.strip().splitlines()
|
||||||
|
if show_name not in SYSTEM_DATABASE_NAMES
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def execute_dump_command(
|
||||||
|
database, log_prefix, dump_path, database_names, extra_environment, dry_run, dry_run_label
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Kick off a dump for the given MySQL/MariaDB database (provided as a configuration dict) to a
|
||||||
|
named pipe constructed from the given dump path and database names. Use the given log prefix in
|
||||||
|
any log entries.
|
||||||
|
|
||||||
|
Return a subprocess.Popen instance for the dump process ready to spew to a named pipe. But if
|
||||||
|
this is a dry run, then don't actually dump anything and return None.
|
||||||
|
'''
|
||||||
|
database_name = database['name']
|
||||||
|
dump_filename = dump.make_database_dump_filename(
|
||||||
|
dump_path, database['name'], database.get('hostname')
|
||||||
|
)
|
||||||
|
if os.path.exists(dump_filename):
|
||||||
|
logger.warning(
|
||||||
|
f'{log_prefix}: Skipping duplicate dump of MySQL database "{database_name}" to {dump_filename}'
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
dump_command = (
|
||||||
|
('mysqldump',)
|
||||||
|
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
|
||||||
|
+ (('--add-drop-database',) if database.get('add_drop_database', True) else ())
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
|
||||||
|
+ (('--user', database['username']) if 'username' in database else ())
|
||||||
|
+ ('--databases',)
|
||||||
|
+ database_names
|
||||||
|
# Use shell redirection rather than execute_command(output_file=open(...)) to prevent
|
||||||
|
# the open() call on a named pipe from hanging the main borgmatic process.
|
||||||
|
+ ('>', dump_filename)
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
f'{log_prefix}: Dumping MySQL database "{database_name}" to {dump_filename}{dry_run_label}'
|
||||||
|
)
|
||||||
|
if dry_run:
|
||||||
|
return None
|
||||||
|
|
||||||
|
dump.create_named_pipe_for_dump(dump_filename)
|
||||||
|
|
||||||
|
return execute_command(
|
||||||
|
dump_command, shell=True, extra_environment=extra_environment, run_to_completion=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
|
'''
|
||||||
|
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
|
||||||
|
of dicts, one dict describing each database as per the configuration schema. Use the given log
|
||||||
|
prefix in any log entries. Use the given location configuration dict to construct the
|
||||||
|
destination path.
|
||||||
|
|
||||||
|
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||||
|
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||||
|
'''
|
||||||
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||||
|
processes = []
|
||||||
|
|
||||||
|
logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label))
|
||||||
|
|
||||||
|
for database in databases:
|
||||||
|
dump_path = make_dump_path(location_config)
|
||||||
|
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
||||||
|
dump_database_names = database_names_to_dump(
|
||||||
|
database, extra_environment, log_prefix, dry_run
|
||||||
|
)
|
||||||
|
|
||||||
|
if not dump_database_names:
|
||||||
|
if dry_run:
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise ValueError('Cannot find any MySQL databases to dump.')
|
||||||
|
|
||||||
|
if database['name'] == 'all' and database.get('format'):
|
||||||
|
for dump_name in dump_database_names:
|
||||||
|
renamed_database = copy.copy(database)
|
||||||
|
renamed_database['name'] = dump_name
|
||||||
|
processes.append(
|
||||||
|
execute_dump_command(
|
||||||
|
renamed_database,
|
||||||
|
log_prefix,
|
||||||
|
dump_path,
|
||||||
|
(dump_name,),
|
||||||
|
extra_environment,
|
||||||
|
dry_run,
|
||||||
|
dry_run_label,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
processes.append(
|
||||||
|
execute_dump_command(
|
||||||
|
database,
|
||||||
|
log_prefix,
|
||||||
|
dump_path,
|
||||||
|
dump_database_names,
|
||||||
|
extra_environment,
|
||||||
|
dry_run,
|
||||||
|
dry_run_label,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return [process for process in processes if process]
|
||||||
|
|
||||||
|
|
||||||
|
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Remove all database dump files for this hook regardless of the given databases. Use the log
|
||||||
|
prefix in any log entries. Use the given location configuration dict to construct the
|
||||||
|
destination path. If this is a dry run, then don't actually remove anything.
|
||||||
|
'''
|
||||||
|
dump.remove_database_dumps(make_dump_path(location_config), 'MySQL', log_prefix, dry_run)
|
||||||
|
|
||||||
|
|
||||||
|
def make_database_dump_pattern(
|
||||||
|
databases, log_prefix, location_config, name=None
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict,
|
||||||
|
and a database name to match, return the corresponding glob patterns to match the database dump
|
||||||
|
in an archive.
|
||||||
|
'''
|
||||||
|
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*')
|
||||||
|
|
||||||
|
|
||||||
|
def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process):
|
||||||
|
'''
|
||||||
|
Restore the given MySQL/MariaDB database from an extract stream. The database is supplied as a
|
||||||
|
one-element sequence containing a dict describing the database, as per the configuration schema.
|
||||||
|
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
|
||||||
|
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
|
||||||
|
output to consume.
|
||||||
|
'''
|
||||||
|
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
||||||
|
|
||||||
|
if len(database_config) != 1:
|
||||||
|
raise ValueError('The database configuration value is invalid')
|
||||||
|
|
||||||
|
database = database_config[0]
|
||||||
|
restore_command = (
|
||||||
|
('mysql', '--batch')
|
||||||
|
+ (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ())
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
|
||||||
|
+ (('--user', database['username']) if 'username' in database else ())
|
||||||
|
)
|
||||||
|
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
'{}: Restoring MySQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
||||||
|
)
|
||||||
|
if dry_run:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
|
||||||
|
# if the restore paths don't exist in the archive.
|
||||||
|
execute_command_with_processes(
|
||||||
|
restore_command,
|
||||||
|
[extract_process],
|
||||||
|
output_log_level=logging.DEBUG,
|
||||||
|
input_file=extract_process.stdout,
|
||||||
|
extra_environment=extra_environment,
|
||||||
|
)
|
83
borgmatic/hooks/ntfy.py
Normal file
83
borgmatic/hooks/ntfy.py
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_monitor(
|
||||||
|
ping_url, config_filename, monitoring_log_level, dry_run
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
No initialization is necessary for this monitor.
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
|
'''
|
||||||
|
Ping the configured Ntfy topic. Use the given configuration filename in any log entries.
|
||||||
|
If this is a dry run, then don't actually ping anything.
|
||||||
|
'''
|
||||||
|
|
||||||
|
run_states = hook_config.get('states', ['fail'])
|
||||||
|
|
||||||
|
if state.name.lower() in run_states:
|
||||||
|
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||||
|
|
||||||
|
state_config = hook_config.get(
|
||||||
|
state.name.lower(),
|
||||||
|
{
|
||||||
|
'title': f'A Borgmatic {state.name} event happened',
|
||||||
|
'message': f'A Borgmatic {state.name} event happened',
|
||||||
|
'priority': 'default',
|
||||||
|
'tags': 'borgmatic',
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
base_url = hook_config.get('server', 'https://ntfy.sh')
|
||||||
|
topic = hook_config.get('topic')
|
||||||
|
|
||||||
|
logger.info(f'{config_filename}: Pinging ntfy topic {topic}{dry_run_label}')
|
||||||
|
logger.debug(f'{config_filename}: Using Ntfy ping URL {base_url}/{topic}')
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'X-Title': state_config.get('title'),
|
||||||
|
'X-Message': state_config.get('message'),
|
||||||
|
'X-Priority': state_config.get('priority'),
|
||||||
|
'X-Tags': state_config.get('tags'),
|
||||||
|
}
|
||||||
|
|
||||||
|
username = hook_config.get('username')
|
||||||
|
password = hook_config.get('password')
|
||||||
|
|
||||||
|
auth = None
|
||||||
|
if (username and password) is not None:
|
||||||
|
auth = requests.auth.HTTPBasicAuth(username, password)
|
||||||
|
logger.info(f'{config_filename}: Using basic auth with user {username} for ntfy')
|
||||||
|
elif username is not None:
|
||||||
|
logger.warning(
|
||||||
|
f'{config_filename}: Password missing for ntfy authentication, defaulting to no auth'
|
||||||
|
)
|
||||||
|
elif password is not None:
|
||||||
|
logger.warning(
|
||||||
|
f'{config_filename}: Username missing for ntfy authentication, defaulting to no auth'
|
||||||
|
)
|
||||||
|
|
||||||
|
if not dry_run:
|
||||||
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
|
try:
|
||||||
|
response = requests.post(f'{base_url}/{topic}', headers=headers, auth=auth)
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: ntfy error: {error}')
|
||||||
|
|
||||||
|
|
||||||
|
def destroy_monitor(
|
||||||
|
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
No destruction is necessary for this monitor.
|
||||||
|
'''
|
||||||
|
pass
|
85
borgmatic/hooks/pagerduty.py
Normal file
85
borgmatic/hooks/pagerduty.py
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import platform
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from borgmatic.hooks import monitor
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
EVENTS_API_URL = 'https://events.pagerduty.com/v2/enqueue'
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_monitor(
|
||||||
|
integration_key, config_filename, monitoring_log_level, dry_run
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
No initialization is necessary for this monitor.
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run):
|
||||||
|
'''
|
||||||
|
If this is an error state, create a PagerDuty event with the configured integration key. Use
|
||||||
|
the given configuration filename in any log entries. If this is a dry run, then don't actually
|
||||||
|
create an event.
|
||||||
|
'''
|
||||||
|
if state != monitor.State.FAIL:
|
||||||
|
logger.debug(
|
||||||
|
'{}: Ignoring unsupported monitoring {} in PagerDuty hook'.format(
|
||||||
|
config_filename, state.name.lower()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
dry_run_label = ' (dry run; not actually sending)' if dry_run else ''
|
||||||
|
logger.info('{}: Sending failure event to PagerDuty {}'.format(config_filename, dry_run_label))
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
return
|
||||||
|
|
||||||
|
hostname = platform.node()
|
||||||
|
local_timestamp = (
|
||||||
|
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).astimezone().isoformat()
|
||||||
|
)
|
||||||
|
payload = json.dumps(
|
||||||
|
{
|
||||||
|
'routing_key': hook_config['integration_key'],
|
||||||
|
'event_action': 'trigger',
|
||||||
|
'payload': {
|
||||||
|
'summary': 'backup failed on {}'.format(hostname),
|
||||||
|
'severity': 'error',
|
||||||
|
'source': hostname,
|
||||||
|
'timestamp': local_timestamp,
|
||||||
|
'component': 'borgmatic',
|
||||||
|
'group': 'backups',
|
||||||
|
'class': 'backup failure',
|
||||||
|
'custom_details': {
|
||||||
|
'hostname': hostname,
|
||||||
|
'configuration filename': config_filename,
|
||||||
|
'server time': local_timestamp,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
logger.debug('{}: Using PagerDuty payload: {}'.format(config_filename, payload))
|
||||||
|
|
||||||
|
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||||
|
try:
|
||||||
|
response = requests.post(EVENTS_API_URL, data=payload.encode('utf-8'))
|
||||||
|
if not response.ok:
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.exceptions.RequestException as error:
|
||||||
|
logger.warning(f'{config_filename}: PagerDuty error: {error}')
|
||||||
|
|
||||||
|
|
||||||
|
def destroy_monitor(
|
||||||
|
ping_url_or_uuid, config_filename, monitoring_log_level, dry_run
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
No destruction is necessary for this monitor.
|
||||||
|
'''
|
||||||
|
pass
|
246
borgmatic/hooks/postgresql.py
Normal file
246
borgmatic/hooks/postgresql.py
Normal file
|
@ -0,0 +1,246 @@
|
||||||
|
import csv
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from borgmatic.execute import (
|
||||||
|
execute_command,
|
||||||
|
execute_command_and_capture_output,
|
||||||
|
execute_command_with_processes,
|
||||||
|
)
|
||||||
|
from borgmatic.hooks import dump
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def make_dump_path(location_config): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Make the dump path from the given location configuration and the name of this hook.
|
||||||
|
'''
|
||||||
|
return dump.make_database_dump_path(
|
||||||
|
location_config.get('borgmatic_source_directory'), 'postgresql_databases'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def make_extra_environment(database):
|
||||||
|
'''
|
||||||
|
Make the extra_environment dict from the given database configuration.
|
||||||
|
'''
|
||||||
|
extra = dict()
|
||||||
|
if 'password' in database:
|
||||||
|
extra['PGPASSWORD'] = database['password']
|
||||||
|
extra['PGSSLMODE'] = database.get('ssl_mode', 'disable')
|
||||||
|
if 'ssl_cert' in database:
|
||||||
|
extra['PGSSLCERT'] = database['ssl_cert']
|
||||||
|
if 'ssl_key' in database:
|
||||||
|
extra['PGSSLKEY'] = database['ssl_key']
|
||||||
|
if 'ssl_root_cert' in database:
|
||||||
|
extra['PGSSLROOTCERT'] = database['ssl_root_cert']
|
||||||
|
if 'ssl_crl' in database:
|
||||||
|
extra['PGSSLCRL'] = database['ssl_crl']
|
||||||
|
return extra
|
||||||
|
|
||||||
|
|
||||||
|
EXCLUDED_DATABASE_NAMES = ('template0', 'template1')
|
||||||
|
|
||||||
|
|
||||||
|
def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
|
||||||
|
'''
|
||||||
|
Given a requested database config, return the corresponding sequence of database names to dump.
|
||||||
|
In the case of "all" when a database format is given, query for the names of databases on the
|
||||||
|
configured host and return them. For "all" without a database format, just return a sequence
|
||||||
|
containing "all".
|
||||||
|
'''
|
||||||
|
requested_name = database['name']
|
||||||
|
|
||||||
|
if requested_name != 'all':
|
||||||
|
return (requested_name,)
|
||||||
|
if not database.get('format'):
|
||||||
|
return ('all',)
|
||||||
|
if dry_run:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
list_command = (
|
||||||
|
('psql', '--list', '--no-password', '--csv', '--tuples-only')
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
|
+ (tuple(database['list_options'].split(' ')) if 'list_options' in database else ())
|
||||||
|
)
|
||||||
|
logger.debug(f'{log_prefix}: Querying for "all" PostgreSQL databases to dump')
|
||||||
|
list_output = execute_command_and_capture_output(
|
||||||
|
list_command, extra_environment=extra_environment
|
||||||
|
)
|
||||||
|
|
||||||
|
return tuple(
|
||||||
|
row[0]
|
||||||
|
for row in csv.reader(list_output.splitlines(), delimiter=',', quotechar='"')
|
||||||
|
if row[0] not in EXCLUDED_DATABASE_NAMES
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
|
'''
|
||||||
|
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
|
||||||
|
dicts, one dict describing each database as per the configuration schema. Use the given log
|
||||||
|
prefix in any log entries. Use the given location configuration dict to construct the
|
||||||
|
destination path.
|
||||||
|
|
||||||
|
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||||
|
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||||
|
|
||||||
|
Raise ValueError if the databases to dump cannot be determined.
|
||||||
|
'''
|
||||||
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||||
|
processes = []
|
||||||
|
|
||||||
|
logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label))
|
||||||
|
|
||||||
|
for database in databases:
|
||||||
|
extra_environment = make_extra_environment(database)
|
||||||
|
dump_path = make_dump_path(location_config)
|
||||||
|
dump_database_names = database_names_to_dump(
|
||||||
|
database, extra_environment, log_prefix, dry_run
|
||||||
|
)
|
||||||
|
|
||||||
|
if not dump_database_names:
|
||||||
|
if dry_run:
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise ValueError('Cannot find any PostgreSQL databases to dump.')
|
||||||
|
|
||||||
|
for database_name in dump_database_names:
|
||||||
|
dump_format = database.get('format', None if database_name == 'all' else 'custom')
|
||||||
|
default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
|
||||||
|
dump_command = database.get('pg_dump_command') or default_dump_command
|
||||||
|
dump_filename = dump.make_database_dump_filename(
|
||||||
|
dump_path, database_name, database.get('hostname')
|
||||||
|
)
|
||||||
|
if os.path.exists(dump_filename):
|
||||||
|
logger.warning(
|
||||||
|
f'{log_prefix}: Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
command = (
|
||||||
|
(dump_command, '--no-password', '--clean', '--if-exists',)
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
|
+ (('--format', dump_format) if dump_format else ())
|
||||||
|
+ (('--file', dump_filename) if dump_format == 'directory' else ())
|
||||||
|
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
|
||||||
|
+ (() if database_name == 'all' else (database_name,))
|
||||||
|
# Use shell redirection rather than the --file flag to sidestep synchronization issues
|
||||||
|
# when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump
|
||||||
|
# format in a particular, a named destination is required, and redirection doesn't work.
|
||||||
|
+ (('>', dump_filename) if dump_format != 'directory' else ())
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
f'{log_prefix}: Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
|
||||||
|
)
|
||||||
|
if dry_run:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if dump_format == 'directory':
|
||||||
|
dump.create_parent_directory_for_dump(dump_filename)
|
||||||
|
execute_command(
|
||||||
|
command, shell=True, extra_environment=extra_environment,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
dump.create_named_pipe_for_dump(dump_filename)
|
||||||
|
processes.append(
|
||||||
|
execute_command(
|
||||||
|
command,
|
||||||
|
shell=True,
|
||||||
|
extra_environment=extra_environment,
|
||||||
|
run_to_completion=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return processes
|
||||||
|
|
||||||
|
|
||||||
|
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Remove all database dump files for this hook regardless of the given databases. Use the log
|
||||||
|
prefix in any log entries. Use the given location configuration dict to construct the
|
||||||
|
destination path. If this is a dry run, then don't actually remove anything.
|
||||||
|
'''
|
||||||
|
dump.remove_database_dumps(make_dump_path(location_config), 'PostgreSQL', log_prefix, dry_run)
|
||||||
|
|
||||||
|
|
||||||
|
def make_database_dump_pattern(
|
||||||
|
databases, log_prefix, location_config, name=None
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict,
|
||||||
|
and a database name to match, return the corresponding glob patterns to match the database dump
|
||||||
|
in an archive.
|
||||||
|
'''
|
||||||
|
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*')
|
||||||
|
|
||||||
|
|
||||||
|
def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process):
|
||||||
|
'''
|
||||||
|
Restore the given PostgreSQL database from an extract stream. The database is supplied as a
|
||||||
|
one-element sequence containing a dict describing the database, as per the configuration schema.
|
||||||
|
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
|
||||||
|
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
|
||||||
|
output to consume.
|
||||||
|
|
||||||
|
If the extract process is None, then restore the dump from the filesystem rather than from an
|
||||||
|
extract stream.
|
||||||
|
'''
|
||||||
|
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
||||||
|
|
||||||
|
if len(database_config) != 1:
|
||||||
|
raise ValueError('The database configuration value is invalid')
|
||||||
|
|
||||||
|
database = database_config[0]
|
||||||
|
all_databases = bool(database['name'] == 'all')
|
||||||
|
dump_filename = dump.make_database_dump_filename(
|
||||||
|
make_dump_path(location_config), database['name'], database.get('hostname')
|
||||||
|
)
|
||||||
|
psql_command = database.get('psql_command') or 'psql'
|
||||||
|
analyze_command = (
|
||||||
|
(psql_command, '--no-password', '--quiet')
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
|
+ (('--dbname', database['name']) if not all_databases else ())
|
||||||
|
+ (tuple(database['analyze_options'].split(' ')) if 'analyze_options' in database else ())
|
||||||
|
+ ('--command', 'ANALYZE')
|
||||||
|
)
|
||||||
|
pg_restore_command = database.get('pg_restore_command') or 'pg_restore'
|
||||||
|
restore_command = (
|
||||||
|
(psql_command if all_databases else pg_restore_command, '--no-password')
|
||||||
|
+ (
|
||||||
|
('--if-exists', '--exit-on-error', '--clean', '--dbname', database['name'])
|
||||||
|
if not all_databases
|
||||||
|
else ()
|
||||||
|
)
|
||||||
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||||
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||||
|
+ (('--username', database['username']) if 'username' in database else ())
|
||||||
|
+ (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ())
|
||||||
|
+ (() if extract_process else (dump_filename,))
|
||||||
|
)
|
||||||
|
extra_environment = make_extra_environment(database)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
'{}: Restoring PostgreSQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
||||||
|
)
|
||||||
|
if dry_run:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
|
||||||
|
# if the restore paths don't exist in the archive.
|
||||||
|
execute_command_with_processes(
|
||||||
|
restore_command,
|
||||||
|
[extract_process] if extract_process else [],
|
||||||
|
output_log_level=logging.DEBUG,
|
||||||
|
input_file=extract_process.stdout if extract_process else None,
|
||||||
|
extra_environment=extra_environment,
|
||||||
|
)
|
||||||
|
execute_command(analyze_command, extra_environment=extra_environment)
|
125
borgmatic/hooks/sqlite.py
Normal file
125
borgmatic/hooks/sqlite.py
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from borgmatic.execute import execute_command, execute_command_with_processes
|
||||||
|
from borgmatic.hooks import dump
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def make_dump_path(location_config): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Make the dump path from the given location configuration and the name of this hook.
|
||||||
|
'''
|
||||||
|
return dump.make_database_dump_path(
|
||||||
|
location_config.get('borgmatic_source_directory'), 'sqlite_databases'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_databases(databases, log_prefix, location_config, dry_run):
|
||||||
|
'''
|
||||||
|
Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of
|
||||||
|
configuration dicts, as per the configuration schema. Use the given log prefix in any log
|
||||||
|
entries. Use the given location configuration dict to construct the destination path. If this
|
||||||
|
is a dry run, then don't actually dump anything.
|
||||||
|
'''
|
||||||
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||||
|
processes = []
|
||||||
|
|
||||||
|
logger.info('{}: Dumping SQLite databases{}'.format(log_prefix, dry_run_label))
|
||||||
|
|
||||||
|
for database in databases:
|
||||||
|
database_path = database['path']
|
||||||
|
|
||||||
|
if database['name'] == 'all':
|
||||||
|
logger.warning('The "all" database name has no meaning for SQLite3 databases')
|
||||||
|
if not os.path.exists(database_path):
|
||||||
|
logger.warning(
|
||||||
|
f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped'
|
||||||
|
)
|
||||||
|
|
||||||
|
dump_path = make_dump_path(location_config)
|
||||||
|
dump_filename = dump.make_database_dump_filename(dump_path, database['name'])
|
||||||
|
if os.path.exists(dump_filename):
|
||||||
|
logger.warning(
|
||||||
|
f'{log_prefix}: Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
command = (
|
||||||
|
'sqlite3',
|
||||||
|
database_path,
|
||||||
|
'.dump',
|
||||||
|
'>',
|
||||||
|
dump_filename,
|
||||||
|
)
|
||||||
|
logger.debug(
|
||||||
|
f'{log_prefix}: Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
|
||||||
|
)
|
||||||
|
if dry_run:
|
||||||
|
continue
|
||||||
|
|
||||||
|
dump.create_parent_directory_for_dump(dump_filename)
|
||||||
|
processes.append(execute_command(command, shell=True, run_to_completion=False))
|
||||||
|
|
||||||
|
return processes
|
||||||
|
|
||||||
|
|
||||||
|
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a
|
||||||
|
sequence of configuration dicts, as per the configuration schema. Use the given log prefix in
|
||||||
|
any log entries. Use the given location configuration dict to construct the destination path.
|
||||||
|
If this is a dry run, then don't actually remove anything.
|
||||||
|
'''
|
||||||
|
dump.remove_database_dumps(make_dump_path(location_config), 'SQLite', log_prefix, dry_run)
|
||||||
|
|
||||||
|
|
||||||
|
def make_database_dump_pattern(
|
||||||
|
databases, log_prefix, location_config, name=None
|
||||||
|
): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Make a pattern that matches the given SQLite3 databases. The databases are supplied as a
|
||||||
|
sequence of configuration dicts, as per the configuration schema.
|
||||||
|
'''
|
||||||
|
return dump.make_database_dump_filename(make_dump_path(location_config), name)
|
||||||
|
|
||||||
|
|
||||||
|
def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process):
|
||||||
|
'''
|
||||||
|
Restore the given SQLite3 database from an extract stream. The database is supplied as a
|
||||||
|
one-element sequence containing a dict describing the database, as per the configuration schema.
|
||||||
|
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
|
||||||
|
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
|
||||||
|
output to consume.
|
||||||
|
'''
|
||||||
|
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
||||||
|
|
||||||
|
if len(database_config) != 1:
|
||||||
|
raise ValueError('The database configuration value is invalid')
|
||||||
|
|
||||||
|
database_path = database_config[0]['path']
|
||||||
|
|
||||||
|
logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}')
|
||||||
|
if dry_run:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.remove(database_path)
|
||||||
|
logger.warning(f'{log_prefix}: Removed existing SQLite database at {database_path}')
|
||||||
|
except FileNotFoundError: # pragma: no cover
|
||||||
|
pass
|
||||||
|
|
||||||
|
restore_command = (
|
||||||
|
'sqlite3',
|
||||||
|
database_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
|
||||||
|
# if the restore paths don't exist in the archive.
|
||||||
|
execute_command_with_processes(
|
||||||
|
restore_command,
|
||||||
|
[extract_process],
|
||||||
|
output_log_level=logging.DEBUG,
|
||||||
|
input_file=extract_process.stdout,
|
||||||
|
)
|
217
borgmatic/logger.py
Normal file
217
borgmatic/logger.py
Normal file
|
@ -0,0 +1,217 @@
|
||||||
|
import logging
|
||||||
|
import logging.handlers
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import colorama
|
||||||
|
|
||||||
|
|
||||||
|
def to_bool(arg):
|
||||||
|
'''
|
||||||
|
Return a boolean value based on `arg`.
|
||||||
|
'''
|
||||||
|
if arg is None or isinstance(arg, bool):
|
||||||
|
return arg
|
||||||
|
|
||||||
|
if isinstance(arg, str):
|
||||||
|
arg = arg.lower()
|
||||||
|
|
||||||
|
if arg in ('yes', 'on', '1', 'true', 1):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def interactive_console():
|
||||||
|
'''
|
||||||
|
Return whether the current console is "interactive". Meaning: Capable of
|
||||||
|
user input and not just something like a cron job.
|
||||||
|
'''
|
||||||
|
return sys.stderr.isatty() and os.environ.get('TERM') != 'dumb'
|
||||||
|
|
||||||
|
|
||||||
|
def should_do_markup(no_color, configs):
|
||||||
|
'''
|
||||||
|
Given the value of the command-line no-color argument, and a dict of configuration filename to
|
||||||
|
corresponding parsed configuration, determine if we should enable colorama marking up.
|
||||||
|
'''
|
||||||
|
if no_color:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if any(config.get('output', {}).get('color') is False for config in configs.values()):
|
||||||
|
return False
|
||||||
|
|
||||||
|
py_colors = os.environ.get('PY_COLORS', None)
|
||||||
|
|
||||||
|
if py_colors is not None:
|
||||||
|
return to_bool(py_colors)
|
||||||
|
|
||||||
|
return interactive_console()
|
||||||
|
|
||||||
|
|
||||||
|
class Multi_stream_handler(logging.Handler):
|
||||||
|
'''
|
||||||
|
A logging handler that dispatches each log record to one of multiple stream handlers depending
|
||||||
|
on the record's log level.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, log_level_to_stream_handler):
|
||||||
|
super(Multi_stream_handler, self).__init__()
|
||||||
|
self.log_level_to_handler = log_level_to_stream_handler
|
||||||
|
self.handlers = set(self.log_level_to_handler.values())
|
||||||
|
|
||||||
|
def flush(self): # pragma: no cover
|
||||||
|
super(Multi_stream_handler, self).flush()
|
||||||
|
|
||||||
|
for handler in self.handlers:
|
||||||
|
handler.flush()
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
'''
|
||||||
|
Dispatch the log record to the approriate stream handler for the record's log level.
|
||||||
|
'''
|
||||||
|
self.log_level_to_handler[record.levelno].emit(record)
|
||||||
|
|
||||||
|
def setFormatter(self, formatter): # pragma: no cover
|
||||||
|
super(Multi_stream_handler, self).setFormatter(formatter)
|
||||||
|
|
||||||
|
for handler in self.handlers:
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
def setLevel(self, level): # pragma: no cover
|
||||||
|
super(Multi_stream_handler, self).setLevel(level)
|
||||||
|
|
||||||
|
for handler in self.handlers:
|
||||||
|
handler.setLevel(level)
|
||||||
|
|
||||||
|
|
||||||
|
class Console_color_formatter(logging.Formatter):
|
||||||
|
def format(self, record):
|
||||||
|
add_custom_log_levels()
|
||||||
|
|
||||||
|
color = {
|
||||||
|
logging.CRITICAL: colorama.Fore.RED,
|
||||||
|
logging.ERROR: colorama.Fore.RED,
|
||||||
|
logging.WARN: colorama.Fore.YELLOW,
|
||||||
|
logging.ANSWER: colorama.Fore.MAGENTA,
|
||||||
|
logging.INFO: colorama.Fore.GREEN,
|
||||||
|
logging.DEBUG: colorama.Fore.CYAN,
|
||||||
|
}.get(record.levelno)
|
||||||
|
|
||||||
|
return color_text(color, record.msg)
|
||||||
|
|
||||||
|
|
||||||
|
def color_text(color, message):
|
||||||
|
'''
|
||||||
|
Give colored text.
|
||||||
|
'''
|
||||||
|
if not color:
|
||||||
|
return message
|
||||||
|
|
||||||
|
return '{}{}{}'.format(color, message, colorama.Style.RESET_ALL)
|
||||||
|
|
||||||
|
|
||||||
|
def add_logging_level(level_name, level_number):
|
||||||
|
'''
|
||||||
|
Globally add a custom logging level based on the given (all uppercase) level name and number.
|
||||||
|
Do this idempotently.
|
||||||
|
|
||||||
|
Inspired by https://stackoverflow.com/questions/2183233/how-to-add-a-custom-loglevel-to-pythons-logging-facility/35804945#35804945
|
||||||
|
'''
|
||||||
|
method_name = level_name.lower()
|
||||||
|
|
||||||
|
if not hasattr(logging, level_name):
|
||||||
|
logging.addLevelName(level_number, level_name)
|
||||||
|
setattr(logging, level_name, level_number)
|
||||||
|
|
||||||
|
if not hasattr(logging, method_name):
|
||||||
|
|
||||||
|
def log_for_level(self, message, *args, **kwargs): # pragma: no cover
|
||||||
|
if self.isEnabledFor(level_number):
|
||||||
|
self._log(level_number, message, args, **kwargs)
|
||||||
|
|
||||||
|
setattr(logging.getLoggerClass(), method_name, log_for_level)
|
||||||
|
|
||||||
|
if not hasattr(logging.getLoggerClass(), method_name):
|
||||||
|
|
||||||
|
def log_to_root(message, *args, **kwargs): # pragma: no cover
|
||||||
|
logging.log(level_number, message, *args, **kwargs)
|
||||||
|
|
||||||
|
setattr(logging, method_name, log_to_root)
|
||||||
|
|
||||||
|
|
||||||
|
ANSWER = logging.WARN - 5
|
||||||
|
|
||||||
|
|
||||||
|
def add_custom_log_levels(): # pragma: no cover
|
||||||
|
'''
|
||||||
|
Add a custom log level between WARN and INFO for user-requested answers.
|
||||||
|
'''
|
||||||
|
add_logging_level('ANSWER', ANSWER)
|
||||||
|
|
||||||
|
|
||||||
|
def configure_logging(
|
||||||
|
console_log_level,
|
||||||
|
syslog_log_level=None,
|
||||||
|
log_file_log_level=None,
|
||||||
|
monitoring_log_level=None,
|
||||||
|
log_file=None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Configure logging to go to both the console and (syslog or log file). Use the given log levels,
|
||||||
|
respectively.
|
||||||
|
|
||||||
|
Raise FileNotFoundError or PermissionError if the log file could not be opened for writing.
|
||||||
|
'''
|
||||||
|
if syslog_log_level is None:
|
||||||
|
syslog_log_level = console_log_level
|
||||||
|
if log_file_log_level is None:
|
||||||
|
log_file_log_level = console_log_level
|
||||||
|
if monitoring_log_level is None:
|
||||||
|
monitoring_log_level = console_log_level
|
||||||
|
|
||||||
|
add_custom_log_levels()
|
||||||
|
|
||||||
|
# Log certain log levels to console stderr and others to stdout. This supports use cases like
|
||||||
|
# grepping (non-error) output.
|
||||||
|
console_error_handler = logging.StreamHandler(sys.stderr)
|
||||||
|
console_standard_handler = logging.StreamHandler(sys.stdout)
|
||||||
|
console_handler = Multi_stream_handler(
|
||||||
|
{
|
||||||
|
logging.CRITICAL: console_error_handler,
|
||||||
|
logging.ERROR: console_error_handler,
|
||||||
|
logging.WARN: console_error_handler,
|
||||||
|
logging.ANSWER: console_standard_handler,
|
||||||
|
logging.INFO: console_standard_handler,
|
||||||
|
logging.DEBUG: console_standard_handler,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
console_handler.setFormatter(Console_color_formatter())
|
||||||
|
console_handler.setLevel(console_log_level)
|
||||||
|
|
||||||
|
syslog_path = None
|
||||||
|
if log_file is None:
|
||||||
|
if os.path.exists('/dev/log'):
|
||||||
|
syslog_path = '/dev/log'
|
||||||
|
elif os.path.exists('/var/run/syslog'):
|
||||||
|
syslog_path = '/var/run/syslog'
|
||||||
|
elif os.path.exists('/var/run/log'):
|
||||||
|
syslog_path = '/var/run/log'
|
||||||
|
|
||||||
|
if syslog_path and not interactive_console():
|
||||||
|
syslog_handler = logging.handlers.SysLogHandler(address=syslog_path)
|
||||||
|
syslog_handler.setFormatter(logging.Formatter('borgmatic: %(levelname)s %(message)s'))
|
||||||
|
syslog_handler.setLevel(syslog_log_level)
|
||||||
|
handlers = (console_handler, syslog_handler)
|
||||||
|
elif log_file:
|
||||||
|
file_handler = logging.handlers.WatchedFileHandler(log_file)
|
||||||
|
file_handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s'))
|
||||||
|
file_handler.setLevel(log_file_log_level)
|
||||||
|
handlers = (console_handler, file_handler)
|
||||||
|
else:
|
||||||
|
handlers = (console_handler,)
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=min(console_log_level, syslog_log_level, log_file_log_level, monitoring_log_level),
|
||||||
|
handlers=handlers,
|
||||||
|
)
|
34
borgmatic/signals.py
Normal file
34
borgmatic/signals.py
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
EXIT_CODE_FROM_SIGNAL = 128
|
||||||
|
|
||||||
|
|
||||||
|
def handle_signal(signal_number, frame):
|
||||||
|
'''
|
||||||
|
Send the signal to all processes in borgmatic's process group, which includes child processes.
|
||||||
|
'''
|
||||||
|
# Prevent infinite signal handler recursion. If the parent frame is this very same handler
|
||||||
|
# function, we know we're recursing.
|
||||||
|
if frame.f_back.f_code.co_name == handle_signal.__name__:
|
||||||
|
return
|
||||||
|
|
||||||
|
os.killpg(os.getpgrp(), signal_number)
|
||||||
|
|
||||||
|
if signal_number == signal.SIGTERM:
|
||||||
|
logger.critical('Exiting due to TERM signal')
|
||||||
|
sys.exit(EXIT_CODE_FROM_SIGNAL + signal.SIGTERM)
|
||||||
|
|
||||||
|
|
||||||
|
def configure_signals():
|
||||||
|
'''
|
||||||
|
Configure borgmatic's signal handlers to pass relevant signals through to any child processes
|
||||||
|
like Borg. Note that SIGINT gets passed through even without these changes.
|
||||||
|
'''
|
||||||
|
for signal_number in (signal.SIGHUP, signal.SIGTERM, signal.SIGUSR1, signal.SIGUSR2):
|
||||||
|
signal.signal(signal_number, handle_signal)
|
22
borgmatic/verbosity.py
Normal file
22
borgmatic/verbosity.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import borgmatic.logger
|
||||||
|
|
||||||
|
VERBOSITY_ERROR = -1
|
||||||
|
VERBOSITY_ANSWER = 0
|
||||||
|
VERBOSITY_SOME = 1
|
||||||
|
VERBOSITY_LOTS = 2
|
||||||
|
|
||||||
|
|
||||||
|
def verbosity_to_log_level(verbosity):
|
||||||
|
'''
|
||||||
|
Given a borgmatic verbosity value, return the corresponding Python log level.
|
||||||
|
'''
|
||||||
|
borgmatic.logger.add_custom_log_levels()
|
||||||
|
|
||||||
|
return {
|
||||||
|
VERBOSITY_ERROR: logging.ERROR,
|
||||||
|
VERBOSITY_ANSWER: logging.ANSWER,
|
||||||
|
VERBOSITY_SOME: logging.INFO,
|
||||||
|
VERBOSITY_LOTS: logging.DEBUG,
|
||||||
|
}.get(verbosity, logging.WARNING)
|
33
docs/Dockerfile
Normal file
33
docs/Dockerfile
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
FROM alpine:3.17.1 as borgmatic
|
||||||
|
|
||||||
|
COPY . /app
|
||||||
|
RUN apk add --no-cache py3-pip py3-ruamel.yaml py3-ruamel.yaml.clib
|
||||||
|
RUN pip install --no-cache /app && generate-borgmatic-config && chmod +r /etc/borgmatic/config.yaml
|
||||||
|
RUN borgmatic --help > /command-line.txt \
|
||||||
|
&& for action in rcreate transfer create prune compact check extract export-tar mount umount restore rlist list rinfo info break-lock borg; do \
|
||||||
|
echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \
|
||||||
|
&& borgmatic "$action" --help >> /command-line.txt; done
|
||||||
|
|
||||||
|
FROM node:19.5.0-alpine as html
|
||||||
|
|
||||||
|
ARG ENVIRONMENT=production
|
||||||
|
|
||||||
|
WORKDIR /source
|
||||||
|
|
||||||
|
RUN npm install @11ty/eleventy \
|
||||||
|
@11ty/eleventy-plugin-syntaxhighlight \
|
||||||
|
@11ty/eleventy-plugin-inclusive-language \
|
||||||
|
@11ty/eleventy-navigation \
|
||||||
|
markdown-it \
|
||||||
|
markdown-it-anchor \
|
||||||
|
markdown-it-replace-link
|
||||||
|
COPY --from=borgmatic /etc/borgmatic/config.yaml /source/docs/_includes/borgmatic/config.yaml
|
||||||
|
COPY --from=borgmatic /command-line.txt /source/docs/_includes/borgmatic/command-line.txt
|
||||||
|
COPY . /source
|
||||||
|
RUN NODE_ENV=${ENVIRONMENT} npx eleventy --input=/source/docs --output=/output/docs \
|
||||||
|
&& mv /output/docs/index.html /output/index.html
|
||||||
|
|
||||||
|
FROM nginx:1.22.1-alpine
|
||||||
|
|
||||||
|
COPY --from=html /output /usr/share/nginx/html
|
||||||
|
COPY --from=borgmatic /etc/borgmatic/config.yaml /usr/share/nginx/html/docs/reference/config.yaml
|
1
docs/README.md
Symbolic link
1
docs/README.md
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../README.md
|
19
docs/SECURITY.md
Normal file
19
docs/SECURITY.md
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
---
|
||||||
|
title: Security policy
|
||||||
|
permalink: security-policy/index.html
|
||||||
|
---
|
||||||
|
|
||||||
|
## Supported versions
|
||||||
|
|
||||||
|
While we want to hear about security vulnerabilities in all versions of
|
||||||
|
borgmatic, security fixes will only be made to the most recently released
|
||||||
|
version. It's not practical for our small volunteer effort to maintain
|
||||||
|
multiple different release branches and put out separate security patches for
|
||||||
|
each.
|
||||||
|
|
||||||
|
## Reporting a vulnerability
|
||||||
|
|
||||||
|
If you find a security vulnerability, please [file a
|
||||||
|
ticket](https://torsion.org/borgmatic/#issues) or [send email
|
||||||
|
directly](mailto:witten@torsion.org) as appropriate. You should expect to hear
|
||||||
|
back within a few days at most, and generally sooner.
|
1
docs/_data/layout.json
Normal file
1
docs/_data/layout.json
Normal file
|
@ -0,0 +1 @@
|
||||||
|
"layouts/main.njk"
|
3
docs/_includes/asciinema.css
Normal file
3
docs/_includes/asciinema.css
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
.asciicast > iframe {
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
12
docs/_includes/components/external-links.css
Normal file
12
docs/_includes/components/external-links.css
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
/* External links */
|
||||||
|
a[href^="http://"]:not(.minilink):not(.elv-externalexempt),
|
||||||
|
a[href^="https://"]:not(.minilink):not(.elv-externalexempt),
|
||||||
|
a[href^="//"]:not(.minilink):not(.elv-externalexempt) {
|
||||||
|
text-decoration-color: inherit;
|
||||||
|
}
|
||||||
|
/* External link hovers */
|
||||||
|
a[href^="http://"]:not(.minilink):not(.elv-externalexempt):hover,
|
||||||
|
a[href^="https://"]:not(.minilink):not(.elv-externalexempt):hover,
|
||||||
|
a[href^="//"]:not(.minilink):not(.elv-externalexempt):hover {
|
||||||
|
text-decoration-color: #00bcd4;
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user