Compare commits
122 Commits
Author | SHA1 | Date | |
---|---|---|---|
cfff6c6855 | |||
37efaeae88 | |||
0978c669ad | |||
1366269586 | |||
a9a0910817 | |||
5bcc7b60c8 | |||
84a0552277 | |||
d4a02f73b5 | |||
3f901c0a52 | |||
b5b5c1fafa | |||
86e5085acc | |||
08a5e8717b | |||
6b2f2b2ac4 | |||
a07cf9e699 | |||
bf40b01077 | |||
a5c6a2fe1c | |||
82141fe981 | |||
228a83978d | |||
638db3770b | |||
98df5c3af2 | |||
b0e906c0e7 | |||
e8dccbf1c1 | |||
4a997bc234 | |||
3197178b3d | |||
5e618154d0 | |||
84f611ae4f | |||
5dc8450c8e | |||
689643e5fa | |||
0a3d87eaea | |||
b45b62cd38 | |||
8de7094691 | |||
8c7e68305e | |||
65a323433c | |||
b5a3589471 | |||
f4a736bdfe | |||
eab0ec15ef | |||
c65aa24001 | |||
5a24bf2037 | |||
324dbc3a79 | |||
9fe7db320a | |||
4d19596616 | |||
5cec2bf3d9 | |||
06e0f98fd8 | |||
87f36caf8d | |||
ab7acceff6 | |||
1b2b0c3020 | |||
289d178581 | |||
1e7f6d9f41 | |||
d0c90389fb | |||
f9e920dce9 | |||
0ed52bbc4a | |||
da8278b566 | |||
2af3522902 | |||
5e4784991a | |||
ab43ef00ce | |||
47a8a95b29 | |||
7c90c04ce0 | |||
97305cc3ce | |||
4985b805b4 | |||
d09b4c72a9 | |||
9807549f88 | |||
30c821120e | |||
13884bd448 | |||
6bce4c4a0d | |||
25572c98d7 | |||
dab0dfcb32 | |||
851c454ef0 | |||
c7a0cebaf7 | |||
76cfeda290 | |||
afdf831c59 | |||
9ac3087304 | |||
7cca83b698 | |||
4b5df7117a | |||
57decfa4db | |||
b80f60a731 | |||
8f5ea95348 | |||
b0cad58d6c | |||
073d6bddf6 | |||
810b65589f | |||
295bfb0c57 | |||
5f3d4f9b03 | |||
5321301708 | |||
a939a66fb4 | |||
c0721a8cad | |||
ea47704d86 | |||
61e4eeff6c | |||
3ab4b45041 | |||
4e1f256b48 | |||
96bb402837 | |||
97949266b3 | |||
e69d2385fc | |||
6d9340ebb2 | |||
0441e79b41 | |||
b1af304125 | |||
eb8f7e0329 | |||
bf978f2db4 | |||
ef66349674 | |||
51b885e7db | |||
1781787305 | |||
46ebb0cebb | |||
3e0fa57860 | |||
59f8722e05 | |||
4ba42e8905 | |||
3b79482b24 | |||
7eb19cb0a7 | |||
a4fabb8521 | |||
85ea8f4f45 | |||
290559116d | |||
72b27b0858 | |||
0fdee067c7 | |||
0dca5eeafc | |||
02ce3ba190 | |||
dc78bf4d6b | |||
4b7fbce291 | |||
1817b9a9ea | |||
009055c61a | |||
54884da8fa | |||
1177385e08 | |||
a45ba8553c | |||
d7d6e30178 | |||
94db527500 | |||
2849f54932 |
35
NEWS
35
NEWS
@ -1,3 +1,38 @@
|
||||
1.9.4.dev0
|
||||
* #251 (beta): Add a Btrfs hook for snapshotting and backing up Btrfs subvolumes. See the
|
||||
documentation for more information:
|
||||
https://torsion.org/borgmatic/docs/how-to/snapshot-your-filesystems/
|
||||
* #926: Fix library error when running within a PyInstaller bundle.
|
||||
* Reorganize data source and monitoring hooks to make developing new hooks easier.
|
||||
|
||||
1.9.3
|
||||
* #261 (beta): Add a ZFS hook for snapshotting and backing up ZFS datasets. See the documentation
|
||||
for more information: https://torsion.org/borgmatic/docs/how-to/snapshot-your-filesystems/
|
||||
* Remove any temporary copies of the manifest file created in support of the "bootstrap" action.
|
||||
* Deprecate the "store_config_files" option at the global scope and move it under the "bootstrap"
|
||||
hook. See the documentation for more information:
|
||||
https://torsion.org/borgmatic/docs/how-to/extract-a-backup/#extract-the-configuration-files-used-to-create-an-archive
|
||||
* Require the runtime directory to be an absolute path.
|
||||
* Add a "--deleted" flag to the "repo-list" action for listing deleted archives that haven't
|
||||
yet been compacted (Borg 2 only).
|
||||
* Promote the "spot" check from a beta feature to stable.
|
||||
|
||||
1.9.2
|
||||
* #441: Apply the "umask" option to all relevant actions, not just some of them.
|
||||
* #722: Remove the restriction that the "extract" and "mount" actions must match a single
|
||||
repository. Now they work more like other actions, where each repository is applied in turn.
|
||||
* #932: Fix the missing build backend setting in pyproject.toml to allow Fedora builds.
|
||||
* #934: Update the logic that probes for the borgmatic streaming database dump, bootstrap
|
||||
metadata, and check state directories to support more platforms and use cases. See the
|
||||
documentation for more information:
|
||||
https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#runtime-directory
|
||||
* #934: Add the "RuntimeDirectory" and "StateDirectory" options to the sample systemd service
|
||||
file to support the new runtime and state directory logic.
|
||||
* #939: Fix borgmatic ignoring the "BORG_RELOCATED_REPO_ACCESS_IS_OK" and
|
||||
"BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK" environment variables.
|
||||
* Add a Pushover monitoring hook. See the documentation for more information:
|
||||
https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pushover-hook
|
||||
|
||||
1.9.1
|
||||
* #928: Fix the user runtime directory location on macOS (and possibly Cygwin).
|
||||
* #930: Fix an error with the sample systemd service when no credentials are configured.
|
||||
|
@ -61,11 +61,15 @@ borgmatic is powered by [Borg Backup](https://www.borgbackup.org/).
|
||||
<a href="https://mariadb.com/"><img src="docs/static/mariadb.png" alt="MariaDB" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://www.mongodb.com/"><img src="docs/static/mongodb.png" alt="MongoDB" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://sqlite.org/"><img src="docs/static/sqlite.png" alt="SQLite" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://openzfs.org/"><img src="docs/static/openzfs.png" alt="OpenZFS" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://btrfs.readthedocs.io/"><img src="docs/static/btrfs.png" alt="Btrfs" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://rclone.org"><img src="docs/static/rclone.png" alt="rclone" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://healthchecks.io/"><img src="docs/static/healthchecks.png" alt="Healthchecks" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://uptime.kuma.pet/"><img src="docs/static/uptimekuma.png" alt="Uptime Kuma" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://cronitor.io/"><img src="docs/static/cronitor.png" alt="Cronitor" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://cronhub.io/"><img src="docs/static/cronhub.png" alt="Cronhub" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://www.pagerduty.com/"><img src="docs/static/pagerduty.png" alt="PagerDuty" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://www.pushover.net/"><img src="docs/static/pushover.png" alt="Pushover" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://ntfy.sh/"><img src="docs/static/ntfy.png" alt="ntfy" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://grafana.com/oss/loki/"><img src="docs/static/loki.png" alt="Loki" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
<a href="https://github.com/caronc/apprise/wiki"><img src="docs/static/apprise.png" alt="Apprise" height="60px" style="margin-bottom:20px; margin-right:20px;"></a>
|
||||
|
@ -8,6 +8,7 @@ import pathlib
|
||||
import random
|
||||
import shutil
|
||||
|
||||
import borgmatic.actions.create
|
||||
import borgmatic.borg.check
|
||||
import borgmatic.borg.create
|
||||
import borgmatic.borg.environment
|
||||
@ -345,7 +346,13 @@ def upgrade_check_times(config, borg_repository_id):
|
||||
|
||||
|
||||
def collect_spot_check_source_paths(
|
||||
repository, config, local_borg_version, global_arguments, local_path, remote_path
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given a repository configuration dict, a configuration dict, the local Borg version, global
|
||||
@ -357,7 +364,7 @@ def collect_spot_check_source_paths(
|
||||
'use_streaming',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
|
||||
).values()
|
||||
)
|
||||
|
||||
@ -366,10 +373,12 @@ def collect_spot_check_source_paths(
|
||||
dry_run=True,
|
||||
repository_path=repository['path'],
|
||||
config=config,
|
||||
config_paths=(),
|
||||
source_directories=borgmatic.actions.create.process_source_directories(
|
||||
config,
|
||||
),
|
||||
local_borg_version=local_borg_version,
|
||||
global_arguments=global_arguments,
|
||||
borgmatic_runtime_directories=(),
|
||||
borgmatic_runtime_directory=borgmatic_runtime_directory,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
list_files=True,
|
||||
@ -403,16 +412,22 @@ BORG_DIRECTORY_FILE_TYPE = 'd'
|
||||
|
||||
|
||||
def collect_spot_check_archive_paths(
|
||||
repository, archive, config, local_borg_version, global_arguments, local_path, remote_path
|
||||
repository,
|
||||
archive,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given a repository configuration dict, the name of the latest archive, a configuration dict, the
|
||||
local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, and
|
||||
the remote Borg path, collect the paths from the given archive (but only include files and
|
||||
symlinks and exclude borgmatic runtime directories).
|
||||
local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, the
|
||||
remote Borg path, and the borgmatic runtime directory, collect the paths from the given archive
|
||||
(but only include files and symlinks and exclude borgmatic runtime directories).
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
|
||||
return tuple(
|
||||
path
|
||||
@ -445,7 +460,7 @@ def compare_spot_check_hashes(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
log_label,
|
||||
log_prefix,
|
||||
source_paths,
|
||||
):
|
||||
'''
|
||||
@ -469,7 +484,7 @@ def compare_spot_check_hashes(
|
||||
if os.path.exists(os.path.join(working_directory or '', source_path))
|
||||
}
|
||||
logger.debug(
|
||||
f'{log_label}: Sampling {sample_count} source paths (~{spot_check_config["data_sample_percentage"]}%) for spot check'
|
||||
f'{log_prefix}: Sampling {sample_count} source paths (~{spot_check_config["data_sample_percentage"]}%) for spot check'
|
||||
)
|
||||
|
||||
source_sample_paths_iterator = iter(source_sample_paths)
|
||||
@ -546,18 +561,19 @@ def spot_check(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given a repository dict, a loaded configuration dict, the local Borg version, global arguments
|
||||
as an argparse.Namespace instance, the local Borg path, and the remote Borg path, perform a spot
|
||||
check for the latest archive in the given repository.
|
||||
as an argparse.Namespace instance, the local Borg path, the remote Borg path, and the borgmatic
|
||||
runtime directory, perform a spot check for the latest archive in the given repository.
|
||||
|
||||
A spot check compares file counts and also the hashes for a random sampling of source files on
|
||||
disk to those stored in the latest archive. If any differences are beyond configured tolerances,
|
||||
then the check fails.
|
||||
'''
|
||||
log_label = f'{repository.get("label", repository["path"])}'
|
||||
logger.debug(f'{log_label}: Running spot check')
|
||||
log_prefix = f'{repository.get("label", repository["path"])}'
|
||||
logger.debug(f'{log_prefix}: Running spot check')
|
||||
|
||||
try:
|
||||
spot_check_config = next(
|
||||
@ -578,8 +594,9 @@ def spot_check(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
logger.debug(f'{log_label}: {len(source_paths)} total source paths for spot check')
|
||||
logger.debug(f'{log_prefix}: {len(source_paths)} total source paths for spot check')
|
||||
|
||||
archive = borgmatic.borg.repo_list.resolve_archive_name(
|
||||
repository['path'],
|
||||
@ -590,7 +607,7 @@ def spot_check(
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
logger.debug(f'{log_label}: Using archive {archive} for spot check')
|
||||
logger.debug(f'{log_prefix}: Using archive {archive} for spot check')
|
||||
|
||||
archive_paths = collect_spot_check_archive_paths(
|
||||
repository,
|
||||
@ -600,8 +617,9 @@ def spot_check(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
logger.debug(f'{log_label}: {len(archive_paths)} total archive paths for spot check')
|
||||
logger.debug(f'{log_prefix}: {len(archive_paths)} total archive paths for spot check')
|
||||
|
||||
# Calculate the percentage delta between the source paths count and the archive paths count, and
|
||||
# compare that delta to the configured count tolerance percentage.
|
||||
@ -609,10 +627,10 @@ def spot_check(
|
||||
|
||||
if count_delta_percentage > spot_check_config['count_tolerance_percentage']:
|
||||
logger.debug(
|
||||
f'{log_label}: Paths in source paths but not latest archive: {", ".join(set(source_paths) - set(archive_paths)) or "none"}'
|
||||
f'{log_prefix}: Paths in source paths but not latest archive: {", ".join(set(source_paths) - set(archive_paths)) or "none"}'
|
||||
)
|
||||
logger.debug(
|
||||
f'{log_label}: Paths in latest archive but not source paths: {", ".join(set(archive_paths) - set(source_paths)) or "none"}'
|
||||
f'{log_prefix}: Paths in latest archive but not source paths: {", ".join(set(archive_paths) - set(source_paths)) or "none"}'
|
||||
)
|
||||
raise ValueError(
|
||||
f'Spot check failed: {count_delta_percentage:.2f}% file count delta between source paths and latest archive (tolerance is {spot_check_config["count_tolerance_percentage"]}%)'
|
||||
@ -626,25 +644,25 @@ def spot_check(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
log_label,
|
||||
log_prefix,
|
||||
source_paths,
|
||||
)
|
||||
|
||||
# Error if the percentage of failing hashes exceeds the configured tolerance percentage.
|
||||
logger.debug(f'{log_label}: {len(failing_paths)} non-matching spot check hashes')
|
||||
logger.debug(f'{log_prefix}: {len(failing_paths)} non-matching spot check hashes')
|
||||
data_tolerance_percentage = spot_check_config['data_tolerance_percentage']
|
||||
failing_percentage = (len(failing_paths) / len(source_paths)) * 100
|
||||
|
||||
if failing_percentage > data_tolerance_percentage:
|
||||
logger.debug(
|
||||
f'{log_label}: Source paths with data not matching the latest archive: {", ".join(failing_paths)}'
|
||||
f'{log_prefix}: Source paths with data not matching the latest archive: {", ".join(failing_paths)}'
|
||||
)
|
||||
raise ValueError(
|
||||
f'Spot check failed: {failing_percentage:.2f}% of source paths with data not matching the latest archive (tolerance is {data_tolerance_percentage}%)'
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f'{log_label}: Spot check passed with a {count_delta_percentage:.2f}% file count delta and a {failing_percentage:.2f}% file data delta'
|
||||
f'{log_prefix}: Spot check passed with a {count_delta_percentage:.2f}% file count delta and a {failing_percentage:.2f}% file data delta'
|
||||
)
|
||||
|
||||
|
||||
@ -678,7 +696,9 @@ def run_check(
|
||||
**hook_context,
|
||||
)
|
||||
|
||||
logger.info(f'{repository.get("label", repository["path"])}: Running consistency checks')
|
||||
log_prefix = repository.get('label', repository['path'])
|
||||
logger.info(f'{log_prefix}: Running consistency checks')
|
||||
|
||||
repository_id = borgmatic.borg.check.get_repository_id(
|
||||
repository['path'],
|
||||
config,
|
||||
@ -730,14 +750,18 @@ def run_check(
|
||||
write_check_time(make_check_time_path(config, repository_id, 'extract'))
|
||||
|
||||
if 'spot' in checks:
|
||||
spot_check(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
with borgmatic.config.paths.Runtime_directory(
|
||||
config, log_prefix
|
||||
) as borgmatic_runtime_directory:
|
||||
spot_check(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
write_check_time(make_check_time_path(config, repository_id, 'spot'))
|
||||
|
||||
borgmatic.hooks.command.execute_hook(
|
||||
|
@ -38,37 +38,44 @@ def get_config_paths(archive_name, bootstrap_arguments, global_arguments, local_
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(
|
||||
{'borgmatic_source_directory': bootstrap_arguments.borgmatic_source_directory}
|
||||
)
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(
|
||||
{'user_runtime_directory': bootstrap_arguments.user_runtime_directory}
|
||||
)
|
||||
config = make_bootstrap_config(bootstrap_arguments)
|
||||
|
||||
# Probe for the manifest file in multiple locations, as the default location has moved to the
|
||||
# borgmatic runtime directory (which get stored as just "/borgmatic" with Borg 1.4+). But we
|
||||
# still want to support reading the manifest from previously created archives as well.
|
||||
for base_directory in ('borgmatic', borgmatic_runtime_directory, borgmatic_source_directory):
|
||||
borgmatic_manifest_path = os.path.join(base_directory, 'bootstrap', 'manifest.json')
|
||||
with borgmatic.config.paths.Runtime_directory(
|
||||
{'user_runtime_directory': bootstrap_arguments.user_runtime_directory},
|
||||
bootstrap_arguments.repository,
|
||||
) as borgmatic_runtime_directory:
|
||||
for base_directory in (
|
||||
'borgmatic',
|
||||
borgmatic.config.paths.make_runtime_directory_glob(borgmatic_runtime_directory),
|
||||
borgmatic_source_directory,
|
||||
):
|
||||
borgmatic_manifest_path = 'sh:' + os.path.join(
|
||||
base_directory, 'bootstrap', 'manifest.json'
|
||||
)
|
||||
|
||||
extract_process = borgmatic.borg.extract.extract_archive(
|
||||
global_arguments.dry_run,
|
||||
bootstrap_arguments.repository,
|
||||
archive_name,
|
||||
[borgmatic_manifest_path],
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path=bootstrap_arguments.local_path,
|
||||
remote_path=bootstrap_arguments.remote_path,
|
||||
extract_to_stdout=True,
|
||||
)
|
||||
manifest_json = extract_process.stdout.read()
|
||||
extract_process = borgmatic.borg.extract.extract_archive(
|
||||
global_arguments.dry_run,
|
||||
bootstrap_arguments.repository,
|
||||
archive_name,
|
||||
[borgmatic_manifest_path],
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path=bootstrap_arguments.local_path,
|
||||
remote_path=bootstrap_arguments.remote_path,
|
||||
extract_to_stdout=True,
|
||||
)
|
||||
manifest_json = extract_process.stdout.read()
|
||||
|
||||
if manifest_json:
|
||||
break
|
||||
else:
|
||||
raise ValueError(
|
||||
'Cannot read configuration paths from archive due to missing bootstrap manifest'
|
||||
)
|
||||
if manifest_json:
|
||||
break
|
||||
else:
|
||||
raise ValueError(
|
||||
'Cannot read configuration paths from archive due to missing bootstrap manifest'
|
||||
)
|
||||
|
||||
try:
|
||||
manifest_data = json.loads(manifest_json)
|
||||
|
@ -1,7 +1,8 @@
|
||||
import importlib.metadata
|
||||
import json
|
||||
import glob
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import borgmatic.actions.json
|
||||
import borgmatic.borg.create
|
||||
@ -9,35 +10,137 @@ import borgmatic.config.paths
|
||||
import borgmatic.config.validate
|
||||
import borgmatic.hooks.command
|
||||
import borgmatic.hooks.dispatch
|
||||
import borgmatic.hooks.dump
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_borgmatic_manifest(config, config_paths, dry_run):
|
||||
def expand_directory(directory, working_directory):
|
||||
'''
|
||||
Create a borgmatic manifest file to store the paths to the configuration files used to create
|
||||
the archive.
|
||||
Given a directory path, expand any tilde (representing a user's home directory) and any globs
|
||||
therein. Return a list of one or more resulting paths.
|
||||
'''
|
||||
if dry_run:
|
||||
return
|
||||
expanded_directory = os.path.join(working_directory or '', os.path.expanduser(directory))
|
||||
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
borgmatic_manifest_path = os.path.join(
|
||||
borgmatic_runtime_directory, 'bootstrap', 'manifest.json'
|
||||
return glob.glob(expanded_directory) or [expanded_directory]
|
||||
|
||||
|
||||
def expand_directories(directories, working_directory=None):
|
||||
'''
|
||||
Given a sequence of directory paths and an optional working directory, expand tildes and globs
|
||||
in each one. Return all the resulting directories as a single flattened tuple.
|
||||
'''
|
||||
if directories is None:
|
||||
return ()
|
||||
|
||||
return tuple(
|
||||
itertools.chain.from_iterable(
|
||||
expand_directory(directory, working_directory) for directory in directories
|
||||
)
|
||||
)
|
||||
|
||||
if not os.path.exists(borgmatic_manifest_path):
|
||||
os.makedirs(os.path.dirname(borgmatic_manifest_path), exist_ok=True)
|
||||
|
||||
with open(borgmatic_manifest_path, 'w') as config_list_file:
|
||||
json.dump(
|
||||
{
|
||||
'borgmatic_version': importlib.metadata.version('borgmatic'),
|
||||
'config_paths': config_paths,
|
||||
},
|
||||
config_list_file,
|
||||
)
|
||||
def map_directories_to_devices(directories, working_directory=None):
|
||||
'''
|
||||
Given a sequence of directories and an optional working directory, return a map from directory
|
||||
to an identifier for the device on which that directory resides or None if the path doesn't
|
||||
exist.
|
||||
|
||||
This is handy for determining whether two different directories are on the same filesystem (have
|
||||
the same device identifier).
|
||||
'''
|
||||
return {
|
||||
directory: os.stat(full_directory).st_dev if os.path.exists(full_directory) else None
|
||||
for directory in directories
|
||||
for full_directory in (os.path.join(working_directory or '', directory),)
|
||||
}
|
||||
|
||||
|
||||
def deduplicate_directories(directory_devices, additional_directory_devices):
|
||||
'''
|
||||
Given a map from directory to the identifier for the device on which that directory resides,
|
||||
return the directories as a sorted sequence with all duplicate child directories removed. For
|
||||
instance, if paths is ['/foo', '/foo/bar'], return just: ['/foo']
|
||||
|
||||
The one exception to this rule is if two paths are on different filesystems (devices). In that
|
||||
case, they won't get de-duplicated in case they both need to be passed to Borg (e.g. the
|
||||
location.one_file_system option is true).
|
||||
|
||||
The idea is that if Borg is given a parent directory, then it doesn't also need to be given
|
||||
child directories, because it will naturally spider the contents of the parent directory. And
|
||||
there are cases where Borg coming across the same file twice will result in duplicate reads and
|
||||
even hangs, e.g. when a database hook is using a named pipe for streaming database dumps to
|
||||
Borg.
|
||||
|
||||
If any additional directory devices are given, also deduplicate against them, but don't include
|
||||
them in the returned directories.
|
||||
'''
|
||||
deduplicated = set()
|
||||
directories = sorted(directory_devices.keys())
|
||||
additional_directories = sorted(additional_directory_devices.keys())
|
||||
all_devices = {**directory_devices, **additional_directory_devices}
|
||||
|
||||
for directory in directories:
|
||||
deduplicated.add(directory)
|
||||
parents = pathlib.PurePath(directory).parents
|
||||
|
||||
# If another directory in the given list (or the additional list) is a parent of current
|
||||
# directory (even n levels up) and both are on the same filesystem, then the current
|
||||
# directory is a duplicate.
|
||||
for other_directory in directories + additional_directories:
|
||||
for parent in parents:
|
||||
if (
|
||||
pathlib.PurePath(other_directory) == parent
|
||||
and all_devices[directory] is not None
|
||||
and all_devices[other_directory] == all_devices[directory]
|
||||
):
|
||||
if directory in deduplicated:
|
||||
deduplicated.remove(directory)
|
||||
break
|
||||
|
||||
return sorted(deduplicated)
|
||||
|
||||
|
||||
ROOT_PATTERN_PREFIX = 'R '
|
||||
|
||||
|
||||
def pattern_root_directories(patterns=None):
|
||||
'''
|
||||
Given a sequence of patterns, parse out and return just the root directories.
|
||||
'''
|
||||
if not patterns:
|
||||
return []
|
||||
|
||||
return [
|
||||
pattern.split(ROOT_PATTERN_PREFIX, maxsplit=1)[1]
|
||||
for pattern in patterns
|
||||
if pattern.startswith(ROOT_PATTERN_PREFIX)
|
||||
]
|
||||
|
||||
|
||||
def process_source_directories(config, source_directories=None):
|
||||
'''
|
||||
Given a sequence of source directories (either in the source_directories argument or, lacking
|
||||
that, from config), expand and deduplicate the source directories, returning the result.
|
||||
'''
|
||||
working_directory = borgmatic.config.paths.get_working_directory(config)
|
||||
|
||||
if source_directories is None:
|
||||
source_directories = tuple(config.get('source_directories', ()))
|
||||
|
||||
return deduplicate_directories(
|
||||
map_directories_to_devices(
|
||||
expand_directories(
|
||||
tuple(source_directories),
|
||||
working_directory=working_directory,
|
||||
)
|
||||
),
|
||||
additional_directory_devices=map_directories_to_devices(
|
||||
expand_directories(
|
||||
pattern_root_directories(config.get('patterns')),
|
||||
working_directory=working_directory,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def run_create(
|
||||
@ -71,54 +174,68 @@ def run_create(
|
||||
global_arguments.dry_run,
|
||||
**hook_context,
|
||||
)
|
||||
logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}')
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
active_dumps = borgmatic.hooks.dispatch.call_hooks(
|
||||
'dump_data_sources',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
if config.get('store_config_files', True):
|
||||
create_borgmatic_manifest(
|
||||
|
||||
log_prefix = repository.get('label', repository['path'])
|
||||
logger.info(f'{log_prefix}: Creating archive{dry_run_label}')
|
||||
|
||||
with borgmatic.config.paths.Runtime_directory(
|
||||
config, log_prefix
|
||||
) as borgmatic_runtime_directory:
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
config_paths,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
source_directories = process_source_directories(config)
|
||||
active_dumps = borgmatic.hooks.dispatch.call_hooks(
|
||||
'dump_data_sources',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
|
||||
config_paths,
|
||||
borgmatic_runtime_directory,
|
||||
source_directories,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
stream_processes = [process for processes in active_dumps.values() for process in processes]
|
||||
|
||||
json_output = borgmatic.borg.create.create_archive(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
config,
|
||||
config_paths,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
progress=create_arguments.progress,
|
||||
stats=create_arguments.stats,
|
||||
json=create_arguments.json,
|
||||
list_files=create_arguments.list_files,
|
||||
stream_processes=stream_processes,
|
||||
)
|
||||
if json_output:
|
||||
yield borgmatic.actions.json.parse_json(json_output, repository.get('label'))
|
||||
# Process source directories again in case any data source hooks updated them. Without this
|
||||
# step, we could end up with duplicate paths that cause Borg to hang when it tries to read
|
||||
# from the same named pipe twice.
|
||||
source_directories = process_source_directories(config, source_directories)
|
||||
stream_processes = [process for processes in active_dumps.values() for process in processes]
|
||||
|
||||
json_output = borgmatic.borg.create.create_archive(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
config,
|
||||
source_directories,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
borgmatic_runtime_directory,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
progress=create_arguments.progress,
|
||||
stats=create_arguments.stats,
|
||||
json=create_arguments.json,
|
||||
list_files=create_arguments.list_files,
|
||||
stream_processes=stream_processes,
|
||||
)
|
||||
|
||||
if json_output:
|
||||
yield borgmatic.actions.json.parse_json(json_output, repository.get('label'))
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
config_filename,
|
||||
borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
config_filename,
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
borgmatic.hooks.command.execute_hook(
|
||||
config.get('after_backup'),
|
||||
config.get('umask'),
|
||||
|
@ -11,8 +11,8 @@ import borgmatic.borg.mount
|
||||
import borgmatic.borg.repo_list
|
||||
import borgmatic.config.paths
|
||||
import borgmatic.config.validate
|
||||
import borgmatic.hooks.data_source.dump
|
||||
import borgmatic.hooks.dispatch
|
||||
import borgmatic.hooks.dump
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -44,7 +44,8 @@ def get_configured_data_source(
|
||||
hooks_to_search = {
|
||||
hook_name: value
|
||||
for (hook_name, value) in config.items()
|
||||
if hook_name in borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES
|
||||
if hook_name.split('_databases')[0]
|
||||
in borgmatic.hooks.dispatch.get_submodule_names(borgmatic.hooks.data_source)
|
||||
}
|
||||
else:
|
||||
try:
|
||||
@ -108,6 +109,7 @@ def restore_single_data_source(
|
||||
hook_name,
|
||||
data_source,
|
||||
connection_params,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given (among other things) an archive name, a data source hook name, the hostname, port,
|
||||