Update the logic that probes for the borgmatic runtime directory to support more platforms and use cases (#934).
Reviewed-on: #937
This commit is contained in:
commit
76cfeda290
4
NEWS
4
NEWS
@ -1,5 +1,9 @@
|
||||
1.9.2.dev0
|
||||
* #932: Fix missing build backend setting in pyproject.toml to allow Fedora builds.
|
||||
* #934: Update the logic that probes for the borgmatic streaming database dump, bootstrap
|
||||
metadata, and check state directories to support more platforms and use cases.
|
||||
* #934: Add the "RuntimeDirectory" and "StateDirectory" options to the sample systemd service
|
||||
file to support the new runtime and state directory logic.
|
||||
|
||||
1.9.1
|
||||
* #928: Fix the user runtime directory location on macOS (and possibly Cygwin).
|
||||
|
@ -403,16 +403,22 @@ BORG_DIRECTORY_FILE_TYPE = 'd'
|
||||
|
||||
|
||||
def collect_spot_check_archive_paths(
|
||||
repository, archive, config, local_borg_version, global_arguments, local_path, remote_path
|
||||
repository,
|
||||
archive,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given a repository configuration dict, the name of the latest archive, a configuration dict, the
|
||||
local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, and
|
||||
the remote Borg path, collect the paths from the given archive (but only include files and
|
||||
symlinks and exclude borgmatic runtime directories).
|
||||
local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, the
|
||||
remote Borg path, and the borgmatic runtime directory, collect the paths from the given archive
|
||||
(but only include files and symlinks and exclude borgmatic runtime directories).
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
|
||||
return tuple(
|
||||
path
|
||||
@ -445,7 +451,7 @@ def compare_spot_check_hashes(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
log_label,
|
||||
log_prefix,
|
||||
source_paths,
|
||||
):
|
||||
'''
|
||||
@ -469,7 +475,7 @@ def compare_spot_check_hashes(
|
||||
if os.path.exists(os.path.join(working_directory or '', source_path))
|
||||
}
|
||||
logger.debug(
|
||||
f'{log_label}: Sampling {sample_count} source paths (~{spot_check_config["data_sample_percentage"]}%) for spot check'
|
||||
f'{log_prefix}: Sampling {sample_count} source paths (~{spot_check_config["data_sample_percentage"]}%) for spot check'
|
||||
)
|
||||
|
||||
source_sample_paths_iterator = iter(source_sample_paths)
|
||||
@ -546,18 +552,19 @@ def spot_check(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given a repository dict, a loaded configuration dict, the local Borg version, global arguments
|
||||
as an argparse.Namespace instance, the local Borg path, and the remote Borg path, perform a spot
|
||||
check for the latest archive in the given repository.
|
||||
as an argparse.Namespace instance, the local Borg path, the remote Borg path, and the borgmatic
|
||||
runtime directory, perform a spot check for the latest archive in the given repository.
|
||||
|
||||
A spot check compares file counts and also the hashes for a random sampling of source files on
|
||||
disk to those stored in the latest archive. If any differences are beyond configured tolerances,
|
||||
then the check fails.
|
||||
'''
|
||||
log_label = f'{repository.get("label", repository["path"])}'
|
||||
logger.debug(f'{log_label}: Running spot check')
|
||||
log_prefix = f'{repository.get("label", repository["path"])}'
|
||||
logger.debug(f'{log_prefix}: Running spot check')
|
||||
|
||||
try:
|
||||
spot_check_config = next(
|
||||
@ -579,7 +586,7 @@ def spot_check(
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
logger.debug(f'{log_label}: {len(source_paths)} total source paths for spot check')
|
||||
logger.debug(f'{log_prefix}: {len(source_paths)} total source paths for spot check')
|
||||
|
||||
archive = borgmatic.borg.repo_list.resolve_archive_name(
|
||||
repository['path'],
|
||||
@ -590,7 +597,7 @@ def spot_check(
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
logger.debug(f'{log_label}: Using archive {archive} for spot check')
|
||||
logger.debug(f'{log_prefix}: Using archive {archive} for spot check')
|
||||
|
||||
archive_paths = collect_spot_check_archive_paths(
|
||||
repository,
|
||||
@ -600,8 +607,9 @@ def spot_check(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
logger.debug(f'{log_label}: {len(archive_paths)} total archive paths for spot check')
|
||||
logger.debug(f'{log_prefix}: {len(archive_paths)} total archive paths for spot check')
|
||||
|
||||
# Calculate the percentage delta between the source paths count and the archive paths count, and
|
||||
# compare that delta to the configured count tolerance percentage.
|
||||
@ -609,10 +617,10 @@ def spot_check(
|
||||
|
||||
if count_delta_percentage > spot_check_config['count_tolerance_percentage']:
|
||||
logger.debug(
|
||||
f'{log_label}: Paths in source paths but not latest archive: {", ".join(set(source_paths) - set(archive_paths)) or "none"}'
|
||||
f'{log_prefix}: Paths in source paths but not latest archive: {", ".join(set(source_paths) - set(archive_paths)) or "none"}'
|
||||
)
|
||||
logger.debug(
|
||||
f'{log_label}: Paths in latest archive but not source paths: {", ".join(set(archive_paths) - set(source_paths)) or "none"}'
|
||||
f'{log_prefix}: Paths in latest archive but not source paths: {", ".join(set(archive_paths) - set(source_paths)) or "none"}'
|
||||
)
|
||||
raise ValueError(
|
||||
f'Spot check failed: {count_delta_percentage:.2f}% file count delta between source paths and latest archive (tolerance is {spot_check_config["count_tolerance_percentage"]}%)'
|
||||
@ -626,25 +634,25 @@ def spot_check(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
log_label,
|
||||
log_prefix,
|
||||
source_paths,
|
||||
)
|
||||
|
||||
# Error if the percentage of failing hashes exceeds the configured tolerance percentage.
|
||||
logger.debug(f'{log_label}: {len(failing_paths)} non-matching spot check hashes')
|
||||
logger.debug(f'{log_prefix}: {len(failing_paths)} non-matching spot check hashes')
|
||||
data_tolerance_percentage = spot_check_config['data_tolerance_percentage']
|
||||
failing_percentage = (len(failing_paths) / len(source_paths)) * 100
|
||||
|
||||
if failing_percentage > data_tolerance_percentage:
|
||||
logger.debug(
|
||||
f'{log_label}: Source paths with data not matching the latest archive: {", ".join(failing_paths)}'
|
||||
f'{log_prefix}: Source paths with data not matching the latest archive: {", ".join(failing_paths)}'
|
||||
)
|
||||
raise ValueError(
|
||||
f'Spot check failed: {failing_percentage:.2f}% of source paths with data not matching the latest archive (tolerance is {data_tolerance_percentage}%)'
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f'{log_label}: Spot check passed with a {count_delta_percentage:.2f}% file count delta and a {failing_percentage:.2f}% file data delta'
|
||||
f'{log_prefix}: Spot check passed with a {count_delta_percentage:.2f}% file count delta and a {failing_percentage:.2f}% file data delta'
|
||||
)
|
||||
|
||||
|
||||
@ -678,7 +686,9 @@ def run_check(
|
||||
**hook_context,
|
||||
)
|
||||
|
||||
logger.info(f'{repository.get("label", repository["path"])}: Running consistency checks')
|
||||
log_prefix = repository.get('label', repository['path'])
|
||||
logger.info(f'{log_prefix}: Running consistency checks')
|
||||
|
||||
repository_id = borgmatic.borg.check.get_repository_id(
|
||||
repository['path'],
|
||||
config,
|
||||
@ -730,14 +740,18 @@ def run_check(
|
||||
write_check_time(make_check_time_path(config, repository_id, 'extract'))
|
||||
|
||||
if 'spot' in checks:
|
||||
spot_check(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
with borgmatic.config.paths.Runtime_directory(
|
||||
config, log_prefix
|
||||
) as borgmatic_runtime_directory:
|
||||
spot_check(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
write_check_time(make_check_time_path(config, repository_id, 'spot'))
|
||||
|
||||
borgmatic.hooks.command.execute_hook(
|
||||
|
@ -38,37 +38,44 @@ def get_config_paths(archive_name, bootstrap_arguments, global_arguments, local_
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(
|
||||
{'borgmatic_source_directory': bootstrap_arguments.borgmatic_source_directory}
|
||||
)
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(
|
||||
{'user_runtime_directory': bootstrap_arguments.user_runtime_directory}
|
||||
)
|
||||
config = make_bootstrap_config(bootstrap_arguments)
|
||||
|
||||
# Probe for the manifest file in multiple locations, as the default location has moved to the
|
||||
# borgmatic runtime directory (which get stored as just "/borgmatic" with Borg 1.4+). But we
|
||||
# still want to support reading the manifest from previously created archives as well.
|
||||
for base_directory in ('borgmatic', borgmatic_runtime_directory, borgmatic_source_directory):
|
||||
borgmatic_manifest_path = os.path.join(base_directory, 'bootstrap', 'manifest.json')
|
||||
with borgmatic.config.paths.Runtime_directory(
|
||||
{'user_runtime_directory': bootstrap_arguments.user_runtime_directory},
|
||||
bootstrap_arguments.repository,
|
||||
) as borgmatic_runtime_directory:
|
||||
for base_directory in (
|
||||
'borgmatic',
|
||||
borgmatic.config.paths.make_runtime_directory_glob(borgmatic_runtime_directory),
|
||||
borgmatic_source_directory,
|
||||
):
|
||||
borgmatic_manifest_path = 'sh:' + os.path.join(
|
||||
base_directory, 'bootstrap', 'manifest.json'
|
||||
)
|
||||
|
||||
extract_process = borgmatic.borg.extract.extract_archive(
|
||||
global_arguments.dry_run,
|
||||
bootstrap_arguments.repository,
|
||||
archive_name,
|
||||
[borgmatic_manifest_path],
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path=bootstrap_arguments.local_path,
|
||||
remote_path=bootstrap_arguments.remote_path,
|
||||
extract_to_stdout=True,
|
||||
)
|
||||
manifest_json = extract_process.stdout.read()
|
||||
extract_process = borgmatic.borg.extract.extract_archive(
|
||||
global_arguments.dry_run,
|
||||
bootstrap_arguments.repository,
|
||||
archive_name,
|
||||
[borgmatic_manifest_path],
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path=bootstrap_arguments.local_path,
|
||||
remote_path=bootstrap_arguments.remote_path,
|
||||
extract_to_stdout=True,
|
||||
)
|
||||
manifest_json = extract_process.stdout.read()
|
||||
|
||||
if manifest_json:
|
||||
break
|
||||
else:
|
||||
raise ValueError(
|
||||
'Cannot read configuration paths from archive due to missing bootstrap manifest'
|
||||
)
|
||||
if manifest_json:
|
||||
break
|
||||
else:
|
||||
raise ValueError(
|
||||
'Cannot read configuration paths from archive due to missing bootstrap manifest'
|
||||
)
|
||||
|
||||
try:
|
||||
manifest_data = json.loads(manifest_json)
|
||||
|
@ -14,15 +14,15 @@ import borgmatic.hooks.dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_borgmatic_manifest(config, config_paths, dry_run):
|
||||
def create_borgmatic_manifest(config, config_paths, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Create a borgmatic manifest file to store the paths to the configuration files used to create
|
||||
the archive.
|
||||
Given a configuration dict, a sequence of config file paths, the borgmatic runtime directory,
|
||||
and whether this is a dry run, create a borgmatic manifest file to store the paths to the
|
||||
configuration files used to create the archive.
|
||||
'''
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
borgmatic_manifest_path = os.path.join(
|
||||
borgmatic_runtime_directory, 'bootstrap', 'manifest.json'
|
||||
)
|
||||
@ -71,54 +71,68 @@ def run_create(
|
||||
global_arguments.dry_run,
|
||||
**hook_context,
|
||||
)
|
||||
logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}')
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
active_dumps = borgmatic.hooks.dispatch.call_hooks(
|
||||
'dump_data_sources',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
if config.get('store_config_files', True):
|
||||
create_borgmatic_manifest(
|
||||
|
||||
log_prefix = repository.get('label', repository['path'])
|
||||
logger.info(f'{log_prefix}: Creating archive{dry_run_label}')
|
||||
|
||||
with borgmatic.config.paths.Runtime_directory(
|
||||
config, log_prefix
|
||||
) as borgmatic_runtime_directory:
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
config_paths,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
stream_processes = [process for processes in active_dumps.values() for process in processes]
|
||||
active_dumps = borgmatic.hooks.dispatch.call_hooks(
|
||||
'dump_data_sources',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
stream_processes = [process for processes in active_dumps.values() for process in processes]
|
||||
|
||||
json_output = borgmatic.borg.create.create_archive(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
config,
|
||||
config_paths,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
progress=create_arguments.progress,
|
||||
stats=create_arguments.stats,
|
||||
json=create_arguments.json,
|
||||
list_files=create_arguments.list_files,
|
||||
stream_processes=stream_processes,
|
||||
)
|
||||
if json_output:
|
||||
yield borgmatic.actions.json.parse_json(json_output, repository.get('label'))
|
||||
if config.get('store_config_files', True):
|
||||
create_borgmatic_manifest(
|
||||
config,
|
||||
config_paths,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
|
||||
json_output = borgmatic.borg.create.create_archive(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
config,
|
||||
config_paths,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
borgmatic_runtime_directory,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
progress=create_arguments.progress,
|
||||
stats=create_arguments.stats,
|
||||
json=create_arguments.json,
|
||||
list_files=create_arguments.list_files,
|
||||
stream_processes=stream_processes,
|
||||
)
|
||||
|
||||
if json_output:
|
||||
yield borgmatic.actions.json.parse_json(json_output, repository.get('label'))
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
config_filename,
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
config_filename,
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
borgmatic.hooks.command.execute_hook(
|
||||
config.get('after_backup'),
|
||||
config.get('umask'),
|
||||
|
@ -108,6 +108,7 @@ def restore_single_data_source(
|
||||
hook_name,
|
||||
data_source,
|
||||
connection_params,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given (among other things) an archive name, a data source hook name, the hostname, port,
|
||||
@ -123,9 +124,9 @@ def restore_single_data_source(
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
data_source['name'],
|
||||
)[hook_name]
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
|
||||
destination_path = (
|
||||
tempfile.mkdtemp(dir=borgmatic_runtime_directory)
|
||||
@ -135,7 +136,7 @@ def restore_single_data_source(
|
||||
|
||||
try:
|
||||
# Kick off a single data source extract. If using a directory format, extract to a temporary
|
||||
# directory. Otheriwes extract the single dump file to stdout.
|
||||
# directory. Otherwise extract the single dump file to stdout.
|
||||
extract_process = borgmatic.borg.extract.extract_archive(
|
||||
dry_run=global_arguments.dry_run,
|
||||
repository=repository['path'],
|
||||
@ -170,6 +171,7 @@ def restore_single_data_source(
|
||||
dry_run=global_arguments.dry_run,
|
||||
extract_process=extract_process,
|
||||
connection_params=connection_params,
|
||||
borgmatic_runtime_directory=borgmatic_runtime_directory,
|
||||
)
|
||||
|
||||
|
||||
@ -181,17 +183,17 @@ def collect_archive_data_source_names(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given a local or remote repository path, a resolved archive name, a configuration dict, the
|
||||
local Borg version, global_arguments an argparse.Namespace, and local and remote Borg paths,
|
||||
query the archive for the names of data sources it contains as dumps and return them as a dict
|
||||
from hook name to a sequence of data source names.
|
||||
local Borg version, global_arguments an argparse.Namespace, local and remote Borg paths, and the
|
||||
borgmatic runtime directory, query the archive for the names of data sources it contains as
|
||||
dumps and return them as a dict from hook name to a sequence of data source names.
|
||||
'''
|
||||
borgmatic_source_directory = str(
|
||||
pathlib.Path(borgmatic.config.paths.get_borgmatic_source_directory(config))
|
||||
)
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
|
||||
# Probe for the data source dumps in multiple locations, as the default location has moved to
|
||||
# the borgmatic runtime directory (which get stored as just "/borgmatic" with Borg 1.4+). But we
|
||||
@ -207,7 +209,7 @@ def collect_archive_data_source_names(
|
||||
+ borgmatic.hooks.dump.make_data_source_dump_path(base_directory, '*_databases/*/*')
|
||||
for base_directory in (
|
||||
'borgmatic',
|
||||
borgmatic_runtime_directory.lstrip('/'),
|
||||
borgmatic.config.paths.make_runtime_directory_glob(borgmatic_runtime_directory),
|
||||
borgmatic_source_directory.lstrip('/'),
|
||||
)
|
||||
],
|
||||
@ -342,109 +344,116 @@ def run_restore(
|
||||
):
|
||||
return
|
||||
|
||||
logger.info(
|
||||
f'{repository.get("label", repository["path"])}: Restoring data sources from archive {restore_arguments.archive}'
|
||||
)
|
||||
log_prefix = repository.get('label', repository['path'])
|
||||
logger.info(f'{log_prefix}: Restoring data sources from archive {restore_arguments.archive}')
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
with borgmatic.config.paths.Runtime_directory(
|
||||
config, log_prefix
|
||||
) as borgmatic_runtime_directory:
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
|
||||
archive_name = borgmatic.borg.repo_list.resolve_archive_name(
|
||||
repository['path'],
|
||||
restore_arguments.archive,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
archive_data_source_names = collect_archive_data_source_names(
|
||||
repository['path'],
|
||||
archive_name,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
restore_names = find_data_sources_to_restore(
|
||||
restore_arguments.data_sources, archive_data_source_names
|
||||
)
|
||||
found_names = set()
|
||||
remaining_restore_names = {}
|
||||
connection_params = {
|
||||
'hostname': restore_arguments.hostname,
|
||||
'port': restore_arguments.port,
|
||||
'username': restore_arguments.username,
|
||||
'password': restore_arguments.password,
|
||||
'restore_path': restore_arguments.restore_path,
|
||||
}
|
||||
archive_name = borgmatic.borg.repo_list.resolve_archive_name(
|
||||
repository['path'],
|
||||
restore_arguments.archive,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
archive_data_source_names = collect_archive_data_source_names(
|
||||
repository['path'],
|
||||
archive_name,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
restore_names = find_data_sources_to_restore(
|
||||
restore_arguments.data_sources, archive_data_source_names
|
||||
)
|
||||
found_names = set()
|
||||
remaining_restore_names = {}
|
||||
connection_params = {
|
||||
'hostname': restore_arguments.hostname,
|
||||
'port': restore_arguments.port,
|
||||
'username': restore_arguments.username,
|
||||
'password': restore_arguments.password,
|
||||
'restore_path': restore_arguments.restore_path,
|
||||
}
|
||||
|
||||
for hook_name, data_source_names in restore_names.items():
|
||||
for data_source_name in data_source_names:
|
||||
found_hook_name, found_data_source = get_configured_data_source(
|
||||
config, archive_data_source_names, hook_name, data_source_name
|
||||
)
|
||||
|
||||
if not found_data_source:
|
||||
remaining_restore_names.setdefault(found_hook_name or hook_name, []).append(
|
||||
data_source_name
|
||||
for hook_name, data_source_names in restore_names.items():
|
||||
for data_source_name in data_source_names:
|
||||
found_hook_name, found_data_source = get_configured_data_source(
|
||||
config, archive_data_source_names, hook_name, data_source_name
|
||||
)
|
||||
continue
|
||||
|
||||
found_names.add(data_source_name)
|
||||
restore_single_data_source(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
archive_name,
|
||||
found_hook_name or hook_name,
|
||||
dict(found_data_source, **{'schemas': restore_arguments.schemas}),
|
||||
connection_params,
|
||||
)
|
||||
if not found_data_source:
|
||||
remaining_restore_names.setdefault(found_hook_name or hook_name, []).append(
|
||||
data_source_name
|
||||
)
|
||||
continue
|
||||
|
||||
# For any data sources that weren't found via exact matches in the configuration, try to
|
||||
# fallback to "all" entries.
|
||||
for hook_name, data_source_names in remaining_restore_names.items():
|
||||
for data_source_name in data_source_names:
|
||||
found_hook_name, found_data_source = get_configured_data_source(
|
||||
config, archive_data_source_names, hook_name, data_source_name, 'all'
|
||||
)
|
||||
found_names.add(data_source_name)
|
||||
restore_single_data_source(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
archive_name,
|
||||
found_hook_name or hook_name,
|
||||
dict(found_data_source, **{'schemas': restore_arguments.schemas}),
|
||||
connection_params,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
|
||||
if not found_data_source:
|
||||
continue
|
||||
# For any data sources that weren't found via exact matches in the configuration, try to
|
||||
# fallback to "all" entries.
|
||||
for hook_name, data_source_names in remaining_restore_names.items():
|
||||
for data_source_name in data_source_names:
|
||||
found_hook_name, found_data_source = get_configured_data_source(
|
||||
config, archive_data_source_names, hook_name, data_source_name, 'all'
|
||||
)
|
||||
|
||||
found_names.add(data_source_name)
|
||||
data_source = copy.copy(found_data_source)
|
||||
data_source['name'] = data_source_name
|
||||
if not found_data_source:
|
||||
continue
|
||||
|
||||
restore_single_data_source(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
archive_name,
|
||||
found_hook_name or hook_name,
|
||||
dict(data_source, **{'schemas': restore_arguments.schemas}),
|
||||
connection_params,
|
||||
)
|
||||
found_names.add(data_source_name)
|
||||
data_source = copy.copy(found_data_source)
|
||||
data_source['name'] = data_source_name
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
restore_single_data_source(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
archive_name,
|
||||
found_hook_name or hook_name,
|
||||
dict(data_source, **{'schemas': restore_arguments.schemas}),
|
||||
connection_params,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
|
||||
ensure_data_sources_found(restore_names, remaining_restore_names, found_names)
|
||||
|
@ -504,6 +504,7 @@ def create_archive(
|
||||
config_paths,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
borgmatic_runtime_directory,
|
||||
local_path='borg',
|
||||
remote_path=None,
|
||||
progress=False,
|
||||
@ -524,9 +525,7 @@ def create_archive(
|
||||
|
||||
working_directory = borgmatic.config.paths.get_working_directory(config)
|
||||
borgmatic_runtime_directories = expand_directories(
|
||||
collect_borgmatic_runtime_directories(
|
||||
borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
),
|
||||
collect_borgmatic_runtime_directories(borgmatic_runtime_directory),
|
||||
working_directory=working_directory,
|
||||
)
|
||||
|
||||
|
@ -1,4 +1,8 @@
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def expand_user_in_path(path):
|
||||
@ -26,26 +30,91 @@ def get_borgmatic_source_directory(config):
|
||||
return expand_user_in_path(config.get('borgmatic_source_directory') or '~/.borgmatic')
|
||||
|
||||
|
||||
def get_borgmatic_runtime_directory(config):
|
||||
'''
|
||||
Given a configuration dict, get the borgmatic runtime directory used for storing temporary
|
||||
runtime data like streaming database dumps and bootstrap metadata. Defaults to
|
||||
$XDG_RUNTIME_DIR/./borgmatic or $TMPDIR/./borgmatic or $TEMP/./borgmatic or
|
||||
/run/user/$UID/./borgmatic.
|
||||
TEMPORARY_DIRECTORY_PREFIX = 'borgmatic-'
|
||||
|
||||
The "/./" is taking advantage of a Borg feature such that the part of the path before the "/./"
|
||||
does not get stored in the file path within an archive. That way, the path of the runtime
|
||||
directory can change without leaving database dumps within an archive inaccessible.
|
||||
|
||||
class Runtime_directory:
|
||||
'''
|
||||
return expand_user_in_path(
|
||||
os.path.join(
|
||||
A Python context manager for creating and cleaning up the borgmatic runtime directory used for
|
||||
storing temporary runtime data like streaming database dumps and bootstrap metadata.
|
||||
|
||||
Example use as a context manager:
|
||||
|
||||
with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory:
|
||||
do_something_with(borgmatic_runtime_directory)
|
||||
|
||||
For the scope of that "with" statement, the runtime directory is available. Afterwards, it
|
||||
automatically gets cleaned up as necessary.
|
||||
'''
|
||||
|
||||
def __init__(self, config, log_prefix):
|
||||
'''
|
||||
Given a configuration dict and a log prefix, determine the borgmatic runtime directory,
|
||||
creating a secure, temporary directory within it if necessary. Defaults to
|
||||
$XDG_RUNTIME_DIR/./borgmatic or $RUNTIME_DIRECTORY/./borgmatic or
|
||||
$TMPDIR/borgmatic-[random]/./borgmatic or $TEMP/borgmatic-[random]/./borgmatic or
|
||||
/tmp/borgmatic-[random]/./borgmatic where "[random]" is a randomly generated string intended
|
||||
to avoid path collisions.
|
||||
|
||||
If XDG_RUNTIME_DIR or RUNTIME_DIRECTORY is set and already ends in "/borgmatic", then don't
|
||||
tack on a second "/borgmatic" path component.
|
||||
|
||||
The "/./" is taking advantage of a Borg feature such that the part of the path before the "/./"
|
||||
does not get stored in the file path within an archive. That way, the path of the runtime
|
||||
directory can change without leaving database dumps within an archive inaccessible.
|
||||
'''
|
||||
runtime_directory = (
|
||||
config.get('user_runtime_directory')
|
||||
or os.environ.get('XDG_RUNTIME_DIR')
|
||||
or os.environ.get('TMPDIR')
|
||||
or os.environ.get('TEMP')
|
||||
or f'/run/user/{os.getuid()}',
|
||||
'.',
|
||||
'borgmatic',
|
||||
or os.environ.get('XDG_RUNTIME_DIR') # Set by PAM on Linux.
|
||||
or os.environ.get('RUNTIME_DIRECTORY') # Set by systemd if configured.
|
||||
)
|
||||
|
||||
if runtime_directory:
|
||||
self.temporary_directory = None
|
||||
else:
|
||||
base_directory = os.environ.get('TMPDIR') or os.environ.get('TEMP') or '/tmp'
|
||||
os.makedirs(base_directory, mode=0o700, exist_ok=True)
|
||||
self.temporary_directory = tempfile.TemporaryDirectory(
|
||||
prefix=TEMPORARY_DIRECTORY_PREFIX,
|
||||
dir=base_directory,
|
||||
)
|
||||
runtime_directory = self.temporary_directory.name
|
||||
|
||||
(base_path, final_directory) = os.path.split(runtime_directory.rstrip(os.path.sep))
|
||||
|
||||
self.runtime_path = expand_user_in_path(
|
||||
os.path.join(
|
||||
base_path if final_directory == 'borgmatic' else runtime_directory, '.', 'borgmatic'
|
||||
)
|
||||
)
|
||||
os.makedirs(self.runtime_path, mode=0o700, exist_ok=True)
|
||||
|
||||
logger.debug(f'{log_prefix}: Using runtime directory {os.path.normpath(self.runtime_path)}')
|
||||
|
||||
def __enter__(self):
|
||||
'''
|
||||
Return the borgmatic runtime path as a string.
|
||||
'''
|
||||
return self.runtime_path
|
||||
|
||||
def __exit__(self, exception, value, traceback):
|
||||
'''
|
||||
Delete any temporary directory that was created as part of initialization.
|
||||
'''
|
||||
if self.temporary_directory:
|
||||
self.temporary_directory.cleanup()
|
||||
|
||||
|
||||
def make_runtime_directory_glob(borgmatic_runtime_directory):
|
||||
'''
|
||||
Given a borgmatic runtime directory path, make a glob that would match that path, specifically
|
||||
replacing any randomly generated temporary subdirectory with "*" since such a directory's name
|
||||
changes on every borgmatic run.
|
||||
'''
|
||||
return os.path.join(
|
||||
*(
|
||||
'*' if subdirectory.startswith(TEMPORARY_DIRECTORY_PREFIX) else subdirectory
|
||||
for subdirectory in os.path.normpath(borgmatic_runtime_directory).split(os.path.sep)
|
||||
)
|
||||
)
|
||||
|
||||
@ -59,10 +128,9 @@ def get_borgmatic_state_directory(config):
|
||||
return expand_user_in_path(
|
||||
os.path.join(
|
||||
config.get('user_state_directory')
|
||||
or os.environ.get(
|
||||
'XDG_STATE_HOME',
|
||||
'~/.local/state',
|
||||
),
|
||||
or os.environ.get('XDG_STATE_HOME')
|
||||
or os.environ.get('STATE_DIRECTORY') # Set by systemd if configured.
|
||||
or '~/.local/state',
|
||||
'borgmatic',
|
||||
)
|
||||
)
|
||||
|
@ -14,15 +14,11 @@ from borgmatic.hooks import dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_dump_path(config, base_directory=None): # pragma: no cover
|
||||
def make_dump_path(base_directory): # pragma: no cover
|
||||
'''
|
||||
Given a configuration dict and an optional base directory, make the corresponding dump path. If
|
||||
a base directory isn't provided, use the borgmatic runtime directory.
|
||||
Given a base directory, make the corresponding dump path.
|
||||
'''
|
||||
return dump.make_data_source_dump_path(
|
||||
base_directory or borgmatic.config.paths.get_borgmatic_runtime_directory(config),
|
||||
'mariadb_databases',
|
||||
)
|
||||
return dump.make_data_source_dump_path(base_directory, 'mariadb_databases')
|
||||
|
||||
|
||||
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
|
||||
@ -126,12 +122,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
return any(databases)
|
||||
|
||||
|
||||
def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Dump the given MariaDB databases to a named pipe. The databases are supplied as a sequence of
|
||||
dicts, one dict describing each database as per the configuration schema. Use the given
|
||||
configuration dict to construct the destination path and the given log prefix in any log
|
||||
entries.
|
||||
borgmatic runtime directory to construct the destination path and the given log prefix in any
|
||||
log entries.
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
@ -142,7 +138,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
logger.info(f'{log_prefix}: Dumping MariaDB databases{dry_run_label}')
|
||||
|
||||
for database in databases:
|
||||
dump_path = make_dump_path(config)
|
||||
dump_path = make_dump_path(borgmatic_runtime_directory)
|
||||
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
||||
dump_database_names = database_names_to_dump(
|
||||
database, extra_environment, log_prefix, dry_run
|
||||
@ -185,43 +181,55 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
return [process for process in processes if process]
|
||||
|
||||
|
||||
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
|
||||
def remove_data_source_dumps(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the given
|
||||
configuration dict to construct the destination path and the log prefix in any log entries. If
|
||||
this is a dry run, then don't actually remove anything.
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the
|
||||
borgmatic_runtime_directory to construct the destination path and the log prefix in any log
|
||||
entries. If this is a dry run, then don't actually remove anything.
|
||||
'''
|
||||
dump.remove_data_source_dumps(make_dump_path(config), 'MariaDB', log_prefix, dry_run)
|
||||
dump.remove_data_source_dumps(
|
||||
make_dump_path(borgmatic_runtime_directory), 'MariaDB', log_prefix, dry_run
|
||||
)
|
||||
|
||||
|
||||
def make_data_source_dump_patterns(databases, config, log_prefix, name=None): # pragma: no cover
|
||||
def make_data_source_dump_patterns(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, name=None
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
|
||||
database name to match, return the corresponding glob patterns to match the database dump in an
|
||||
archive.
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
|
||||
borgmatic runtime directory, and a database name to match, return the corresponding glob
|
||||
patterns to match the database dump in an archive.
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
|
||||
return (
|
||||
dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, 'borgmatic'), name, hostname='*'
|
||||
make_dump_path(borgmatic_runtime_directory), name, hostname='*'
|
||||
),
|
||||
dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, borgmatic_source_directory), name, hostname='*'
|
||||
make_dump_path(borgmatic_source_directory), name, hostname='*'
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def restore_data_source_dump(
|
||||
hook_config, config, log_prefix, data_source, dry_run, extract_process, connection_params
|
||||
hook_config,
|
||||
config,
|
||||
log_prefix,
|
||||
data_source,
|
||||
dry_run,
|
||||
extract_process,
|
||||
connection_params,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Restore a database from the given extract stream. The database is supplied as a data source
|
||||
configuration dict, but the given hook configuration is ignored. The given configuration dict is
|
||||
used to construct the destination path, and the given log prefix is used for any log entries. If
|
||||
this is a dry run, then don't actually restore anything. Trigger the given active extract
|
||||
process (an instance of subprocess.Popen) to produce output to consume.
|
||||
configuration dict, but the given hook configuration is ignored. The given log prefix is used
|
||||
for any log entries. If this is a dry run, then don't actually restore anything. Trigger the
|
||||
given active extract process (an instance of subprocess.Popen) to produce output to consume.
|
||||
'''
|
||||
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
||||
hostname = connection_params['hostname'] or data_source.get(
|
||||
|
@ -8,15 +8,11 @@ from borgmatic.hooks import dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_dump_path(config, base_directory=None): # pragma: no cover
|
||||
def make_dump_path(base_directory): # pragma: no cover
|
||||
'''
|
||||
Given a configuration dict and an optional base directory, make the corresponding dump path. If
|
||||
a base directory isn't provided, use the borgmatic runtime directory.
|
||||
Given a base directory, make the corresponding dump path.
|
||||
'''
|
||||
return dump.make_data_source_dump_path(
|
||||
base_directory or borgmatic.config.paths.get_borgmatic_runtime_directory(config),
|
||||
'mongodb_databases',
|
||||
)
|
||||
return dump.make_data_source_dump_path(base_directory, 'mongodb_databases')
|
||||
|
||||
|
||||
def use_streaming(databases, config, log_prefix):
|
||||
@ -27,11 +23,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
return any(database.get('format') != 'directory' for database in databases)
|
||||
|
||||
|
||||
def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
|
||||
dicts, one dict describing each database as per the configuration schema. Use the configuration
|
||||
dict to construct the destination path and the given log prefix in any log entries.
|
||||
dicts, one dict describing each database as per the configuration schema. Use the borgmatic
|
||||
runtime directory to construct the destination path (used for the directory format and the given
|
||||
log prefix in any log entries.
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
@ -44,7 +41,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
for database in databases:
|
||||
name = database['name']
|
||||
dump_filename = dump.make_data_source_dump_filename(
|
||||
make_dump_path(config), name, database.get('hostname')
|
||||
make_dump_path(borgmatic_runtime_directory), name, database.get('hostname')
|
||||
)
|
||||
dump_format = database.get('format', 'archive')
|
||||
|
||||
@ -94,36 +91,49 @@ def build_dump_command(database, dump_filename, dump_format):
|
||||
)
|
||||
|
||||
|
||||
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
|
||||
def remove_data_source_dumps(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the log
|
||||
prefix in any log entries. Use the given configuration dict to construct the destination path.
|
||||
If this is a dry run, then don't actually remove anything.
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the
|
||||
borgmatic_runtime_directory to construct the destination path and the log prefix in any log
|
||||
entries. If this is a dry run, then don't actually remove anything.
|
||||
'''
|
||||
dump.remove_data_source_dumps(make_dump_path(config), 'MongoDB', log_prefix, dry_run)
|
||||
dump.remove_data_source_dumps(
|
||||
make_dump_path(borgmatic_runtime_directory), 'MongoDB', log_prefix, dry_run
|
||||
)
|
||||
|
||||
|
||||
def make_data_source_dump_patterns(databases, config, log_prefix, name=None): # pragma: no cover
|
||||
def make_data_source_dump_patterns(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, name=None
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Given a sequence of database configurations dicts, a configuration dict, a prefix to log with,
|
||||
and a database name to match, return the corresponding glob patterns to match the database dump
|
||||
in an archive.
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
|
||||
borgmatic runtime directory, and a database name to match, return the corresponding glob
|
||||
patterns to match the database dump in an archive.
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
|
||||
return (
|
||||
dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, 'borgmatic'), name, hostname='*'
|
||||
make_dump_path(borgmatic_runtime_directory), name, hostname='*'
|
||||
),
|
||||
dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, borgmatic_source_directory), name, hostname='*'
|
||||
make_dump_path(borgmatic_source_directory), name, hostname='*'
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def restore_data_source_dump(
|
||||
hook_config, config, log_prefix, data_source, dry_run, extract_process, connection_params
|
||||
hook_config,
|
||||
config,
|
||||
log_prefix,
|
||||
data_source,
|
||||
dry_run,
|
||||
extract_process,
|
||||
connection_params,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Restore a database from the given extract stream. The database is supplied as a data source
|
||||
@ -137,7 +147,9 @@ def restore_data_source_dump(
|
||||
'''
|
||||
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
||||
dump_filename = dump.make_data_source_dump_filename(
|
||||
make_dump_path(config), data_source['name'], data_source.get('hostname')
|
||||
make_dump_path(borgmatic_runtime_directory),
|
||||
data_source['name'],
|
||||
data_source.get('hostname'),
|
||||
)
|
||||
restore_command = build_restore_command(
|
||||
extract_process, data_source, dump_filename, connection_params
|
||||
|
@ -14,15 +14,11 @@ from borgmatic.hooks import dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_dump_path(config, base_directory=None): # pragma: no cover
|
||||
def make_dump_path(base_directory): # pragma: no cover
|
||||
'''
|
||||
Given a configuration dict and an optional base directory, make the corresponding dump path. If
|
||||
a base directory isn't provided, use the borgmatic runtime directory.
|
||||
Given a base directory, make the corresponding dump path.
|
||||
'''
|
||||
return dump.make_data_source_dump_path(
|
||||
base_directory or borgmatic.config.paths.get_borgmatic_runtime_directory(config),
|
||||
'mysql_databases',
|
||||
)
|
||||
return dump.make_data_source_dump_path(base_directory, 'mysql_databases')
|
||||
|
||||
|
||||
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
|
||||
@ -125,11 +121,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
return any(databases)
|
||||
|
||||
|
||||
def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
|
||||
of dicts, one dict describing each database as per the configuration schema. Use the given
|
||||
configuration dict to construct the destination path and the given log prefix in any log entries.
|
||||
borgmatic runtime directory to construct the destination path and the given log prefix in any
|
||||
log entries.
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
@ -140,7 +137,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}')
|
||||
|
||||
for database in databases:
|
||||
dump_path = make_dump_path(config)
|
||||
dump_path = make_dump_path(borgmatic_runtime_directory)
|
||||
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
||||
dump_database_names = database_names_to_dump(
|
||||
database, extra_environment, log_prefix, dry_run
|
||||
@ -183,43 +180,55 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
return [process for process in processes if process]
|
||||
|
||||
|
||||
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
|
||||
def remove_data_source_dumps(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the given
|
||||
configuration dict to construct the destination path and the log prefix in any log entries. If
|
||||
this is a dry run, then don't actually remove anything.
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the
|
||||
borgmatic runtime directory to construct the destination path and the log prefix in any log
|
||||
entries. If this is a dry run, then don't actually remove anything.
|
||||
'''
|
||||
dump.remove_data_source_dumps(make_dump_path(config), 'MySQL', log_prefix, dry_run)
|
||||
dump.remove_data_source_dumps(
|
||||
make_dump_path(borgmatic_runtime_directory), 'MySQL', log_prefix, dry_run
|
||||
)
|
||||
|
||||
|
||||
def make_data_source_dump_patterns(databases, config, log_prefix, name=None): # pragma: no cover
|
||||
def make_data_source_dump_patterns(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, name=None
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
|
||||
database name to match, return the corresponding glob patterns to match the database dump in an
|
||||
archive.
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
|
||||
borgmatic runtime directory, and a database name to match, return the corresponding glob
|
||||
patterns to match the database dump in an archive.
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
|
||||
return (
|
||||
dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, 'borgmatic'), name, hostname='*'
|
||||
make_dump_path(borgmatic_runtime_directory), name, hostname='*'
|
||||
),
|
||||
dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, borgmatic_source_directory), name, hostname='*'
|
||||
make_dump_path(borgmatic_source_directory), name, hostname='*'
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def restore_data_source_dump(
|
||||
hook_config, config, log_prefix, data_source, dry_run, extract_process, connection_params
|
||||
hook_config,
|
||||
config,
|
||||
log_prefix,
|
||||
data_source,
|
||||
dry_run,
|
||||
extract_process,
|
||||
connection_params,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Restore a database from the given extract stream. The database is supplied as a data source
|
||||
configuration dict, but the given hook configuration is ignored. The given configuration dict is
|
||||
used to construct the destination path, and the given log prefix is used for any log entries. If
|
||||
this is a dry run, then don't actually restore anything. Trigger the given active extract
|
||||
process (an instance of subprocess.Popen) to produce output to consume.
|
||||
configuration dict, but the given hook configuration is ignored. The given log prefix is used
|
||||
for any log entries. If this is a dry run, then don't actually restore anything. Trigger the
|
||||
given active extract process (an instance of subprocess.Popen) to produce output to consume.
|
||||
'''
|
||||
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
||||
hostname = connection_params['hostname'] or data_source.get(
|
||||
|
@ -16,15 +16,11 @@ from borgmatic.hooks import dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_dump_path(config, base_directory=None): # pragma: no cover
|
||||
def make_dump_path(base_directory): # pragma: no cover
|
||||
'''
|
||||
Given a configuration dict and an optional base directory, make the corresponding dump path. If
|
||||
a base directory isn't provided, use the borgmatic runtime directory.
|
||||
Given a base directory, make the corresponding dump path.
|
||||
'''
|
||||
return dump.make_data_source_dump_path(
|
||||
base_directory or borgmatic.config.paths.get_borgmatic_runtime_directory(config),
|
||||
'postgresql_databases',
|
||||
)
|
||||
return dump.make_data_source_dump_path(base_directory, 'postgresql_databases')
|
||||
|
||||
|
||||
def make_extra_environment(database, restore_connection_params=None):
|
||||
@ -108,12 +104,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
return any(database.get('format') != 'directory' for database in databases)
|
||||
|
||||
|
||||
def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
|
||||
dicts, one dict describing each database as per the configuration schema. Use the given
|
||||
configuration dict to construct the destination path and the given log prefix in any log
|
||||
entries.
|
||||
borgmatic runtime directory to construct the destination path and the given log prefix in any
|
||||
log entries.
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
@ -127,7 +123,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
|
||||
for database in databases:
|
||||
extra_environment = make_extra_environment(database)
|
||||
dump_path = make_dump_path(config)
|
||||
dump_path = make_dump_path(borgmatic_runtime_directory)
|
||||
dump_database_names = database_names_to_dump(
|
||||
database, extra_environment, log_prefix, dry_run
|
||||
)
|
||||
@ -210,43 +206,57 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
return processes
|
||||
|
||||
|
||||
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
|
||||
def remove_data_source_dumps(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the given
|
||||
configuration dict to construct the destination path and the log prefix in any log entries. If
|
||||
this is a dry run, then don't actually remove anything.
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the
|
||||
borgmatic runtime directory to construct the destination path and the log prefix in any log
|
||||
entries. If this is a dry run, then don't actually remove anything.
|
||||
'''
|
||||
dump.remove_data_source_dumps(make_dump_path(config), 'PostgreSQL', log_prefix, dry_run)
|
||||
dump.remove_data_source_dumps(
|
||||
make_dump_path(borgmatic_runtime_directory), 'PostgreSQL', log_prefix, dry_run
|
||||
)
|
||||
|
||||
|
||||
def make_data_source_dump_patterns(databases, config, log_prefix, name=None): # pragma: no cover
|
||||
def make_data_source_dump_patterns(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, name=None
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
|
||||
database name to match, return the corresponding glob patterns to match the database dump in an
|
||||
archive.
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
|
||||
borgmatic runtime directory, and a database name to match, return the corresponding glob
|
||||
patterns to match the database dump in an archive.
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
|
||||
return (
|
||||
dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, 'borgmatic'), name, hostname='*'
|
||||
make_dump_path(borgmatic_runtime_directory), name, hostname=' |