Update the logic that probes for the borgmatic streaming database dump, bootstrap metadata, and check state directories to support more platforms and use cases (#934).
This commit is contained in:
parent
c0721a8cad
commit
295bfb0c57
4
NEWS
4
NEWS
@ -1,5 +1,9 @@
|
||||
1.9.2.dev0
|
||||
* #932: Fix missing build backend setting in pyproject.toml to allow Fedora builds.
|
||||
* #934: Update the logic that probes for the borgmatic streaming database dump, bootstrap
|
||||
metadata, and check state directories to support more platforms and use cases.
|
||||
* #934: Add the "RuntimeDirectory" and "StateDirectory" options to the sample systemd service
|
||||
file to support the new runtime and state directory logic.
|
||||
|
||||
1.9.1
|
||||
* #928: Fix the user runtime directory location on macOS (and possibly Cygwin).
|
||||
|
@ -403,16 +403,22 @@ BORG_DIRECTORY_FILE_TYPE = 'd'
|
||||
|
||||
|
||||
def collect_spot_check_archive_paths(
|
||||
repository, archive, config, local_borg_version, global_arguments, local_path, remote_path
|
||||
repository,
|
||||
archive,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given a repository configuration dict, the name of the latest archive, a configuration dict, the
|
||||
local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, and
|
||||
the remote Borg path, collect the paths from the given archive (but only include files and
|
||||
symlinks and exclude borgmatic runtime directories).
|
||||
local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, the
|
||||
remote Borg path, and the borgmatic runtime directory, collect the paths from the given archive
|
||||
(but only include files and symlinks and exclude borgmatic runtime directories).
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
|
||||
return tuple(
|
||||
path
|
||||
@ -546,11 +552,12 @@ def spot_check(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given a repository dict, a loaded configuration dict, the local Borg version, global arguments
|
||||
as an argparse.Namespace instance, the local Borg path, and the remote Borg path, perform a spot
|
||||
check for the latest archive in the given repository.
|
||||
as an argparse.Namespace instance, the local Borg path, the remote Borg path, and the borgmatic
|
||||
runtime directory, perform a spot check for the latest archive in the given repository.
|
||||
|
||||
A spot check compares file counts and also the hashes for a random sampling of source files on
|
||||
disk to those stored in the latest archive. If any differences are beyond configured tolerances,
|
||||
@ -600,6 +607,7 @@ def spot_check(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
logger.debug(f'{log_label}: {len(archive_paths)} total archive paths for spot check')
|
||||
|
||||
@ -730,14 +738,16 @@ def run_check(
|
||||
write_check_time(make_check_time_path(config, repository_id, 'extract'))
|
||||
|
||||
if 'spot' in checks:
|
||||
spot_check(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory:
|
||||
spot_check(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
write_check_time(make_check_time_path(config, repository_id, 'spot'))
|
||||
|
||||
borgmatic.hooks.command.execute_hook(
|
||||
|
@ -38,37 +38,41 @@ def get_config_paths(archive_name, bootstrap_arguments, global_arguments, local_
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(
|
||||
{'borgmatic_source_directory': bootstrap_arguments.borgmatic_source_directory}
|
||||
)
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(
|
||||
{'user_runtime_directory': bootstrap_arguments.user_runtime_directory}
|
||||
)
|
||||
config = make_bootstrap_config(bootstrap_arguments)
|
||||
|
||||
# Probe for the manifest file in multiple locations, as the default location has moved to the
|
||||
# borgmatic runtime directory (which get stored as just "/borgmatic" with Borg 1.4+). But we
|
||||
# still want to support reading the manifest from previously created archives as well.
|
||||
for base_directory in ('borgmatic', borgmatic_runtime_directory, borgmatic_source_directory):
|
||||
borgmatic_manifest_path = os.path.join(base_directory, 'bootstrap', 'manifest.json')
|
||||
with borgmatic.config.paths.Runtime_directory(
|
||||
{'user_runtime_directory': bootstrap_arguments.user_runtime_directory}
|
||||
) as borgmatic_runtime_directory:
|
||||
for base_directory in (
|
||||
'borgmatic',
|
||||
borgmatic_runtime_directory,
|
||||
borgmatic_source_directory,
|
||||
):
|
||||
borgmatic_manifest_path = os.path.join(base_directory, 'bootstrap', 'manifest.json')
|
||||
|
||||
extract_process = borgmatic.borg.extract.extract_archive(
|
||||
global_arguments.dry_run,
|
||||
bootstrap_arguments.repository,
|
||||
archive_name,
|
||||
[borgmatic_manifest_path],
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path=bootstrap_arguments.local_path,
|
||||
remote_path=bootstrap_arguments.remote_path,
|
||||
extract_to_stdout=True,
|
||||
)
|
||||
manifest_json = extract_process.stdout.read()
|
||||
extract_process = borgmatic.borg.extract.extract_archive(
|
||||
global_arguments.dry_run,
|
||||
bootstrap_arguments.repository,
|
||||
archive_name,
|
||||
[borgmatic_manifest_path],
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path=bootstrap_arguments.local_path,
|
||||
remote_path=bootstrap_arguments.remote_path,
|
||||
extract_to_stdout=True,
|
||||
)
|
||||
manifest_json = extract_process.stdout.read()
|
||||
|
||||
if manifest_json:
|
||||
break
|
||||
else:
|
||||
raise ValueError(
|
||||
'Cannot read configuration paths from archive due to missing bootstrap manifest'
|
||||
)
|
||||
if manifest_json:
|
||||
break
|
||||
else:
|
||||
raise ValueError(
|
||||
'Cannot read configuration paths from archive due to missing bootstrap manifest'
|
||||
)
|
||||
|
||||
try:
|
||||
manifest_data = json.loads(manifest_json)
|
||||
|
@ -14,15 +14,15 @@ import borgmatic.hooks.dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_borgmatic_manifest(config, config_paths, dry_run):
|
||||
def create_borgmatic_manifest(config, config_paths, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Create a borgmatic manifest file to store the paths to the configuration files used to create
|
||||
the archive.
|
||||
Given a configuration dict, a sequence of config file paths, the borgmatic runtime directory,
|
||||
and whether this is a dry run, create a borgmatic manifest file to store the paths to the
|
||||
configuration files used to create the archive.
|
||||
'''
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
borgmatic_manifest_path = os.path.join(
|
||||
borgmatic_runtime_directory, 'bootstrap', 'manifest.json'
|
||||
)
|
||||
@ -72,53 +72,63 @@ def run_create(
|
||||
**hook_context,
|
||||
)
|
||||
logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}')
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
active_dumps = borgmatic.hooks.dispatch.call_hooks(
|
||||
'dump_data_sources',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
if config.get('store_config_files', True):
|
||||
create_borgmatic_manifest(
|
||||
|
||||
with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory:
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
config_paths,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
stream_processes = [process for processes in active_dumps.values() for process in processes]
|
||||
active_dumps = borgmatic.hooks.dispatch.call_hooks(
|
||||
'dump_data_sources',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
stream_processes = [process for processes in active_dumps.values() for process in processes]
|
||||
|
||||
json_output = borgmatic.borg.create.create_archive(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
config,
|
||||
config_paths,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
progress=create_arguments.progress,
|
||||
stats=create_arguments.stats,
|
||||
json=create_arguments.json,
|
||||
list_files=create_arguments.list_files,
|
||||
stream_processes=stream_processes,
|
||||
)
|
||||
if json_output:
|
||||
yield borgmatic.actions.json.parse_json(json_output, repository.get('label'))
|
||||
if config.get('store_config_files', True):
|
||||
create_borgmatic_manifest(
|
||||
config,
|
||||
config_paths,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
|
||||
json_output = borgmatic.borg.create.create_archive(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
config,
|
||||
config_paths,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
borgmatic_runtime_directory,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
progress=create_arguments.progress,
|
||||
stats=create_arguments.stats,
|
||||
json=create_arguments.json,
|
||||
list_files=create_arguments.list_files,
|
||||
stream_processes=stream_processes,
|
||||
)
|
||||
|
||||
if json_output:
|
||||
yield borgmatic.actions.json.parse_json(json_output, repository.get('label'))
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
config_filename,
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
config_filename,
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
borgmatic.hooks.command.execute_hook(
|
||||
config.get('after_backup'),
|
||||
config.get('umask'),
|
||||
|
@ -108,6 +108,7 @@ def restore_single_data_source(
|
||||
hook_name,
|
||||
data_source,
|
||||
connection_params,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given (among other things) an archive name, a data source hook name, the hostname, port,
|
||||
@ -123,9 +124,9 @@ def restore_single_data_source(
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
data_source['name'],
|
||||
)[hook_name]
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
|
||||
destination_path = (
|
||||
tempfile.mkdtemp(dir=borgmatic_runtime_directory)
|
||||
@ -135,7 +136,7 @@ def restore_single_data_source(
|
||||
|
||||
try:
|
||||
# Kick off a single data source extract. If using a directory format, extract to a temporary
|
||||
# directory. Otheriwes extract the single dump file to stdout.
|
||||
# directory. Otherwise extract the single dump file to stdout.
|
||||
extract_process = borgmatic.borg.extract.extract_archive(
|
||||
dry_run=global_arguments.dry_run,
|
||||
repository=repository['path'],
|
||||
@ -181,17 +182,17 @@ def collect_archive_data_source_names(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Given a local or remote repository path, a resolved archive name, a configuration dict, the
|
||||
local Borg version, global_arguments an argparse.Namespace, and local and remote Borg paths,
|
||||
query the archive for the names of data sources it contains as dumps and return them as a dict
|
||||
from hook name to a sequence of data source names.
|
||||
local Borg version, global_arguments an argparse.Namespace, local and remote Borg paths, and the
|
||||
borgmatic runtime directory, query the archive for the names of data sources it contains as
|
||||
dumps and return them as a dict from hook name to a sequence of data source names.
|
||||
'''
|
||||
borgmatic_source_directory = str(
|
||||
pathlib.Path(borgmatic.config.paths.get_borgmatic_source_directory(config))
|
||||
)
|
||||
borgmatic_runtime_directory = borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
|
||||
# Probe for the data source dumps in multiple locations, as the default location has moved to
|
||||
# the borgmatic runtime directory (which get stored as just "/borgmatic" with Borg 1.4+). But we
|
||||
@ -330,6 +331,7 @@ def run_restore(
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
borgmatic_runtime_directory,
|
||||
):
|
||||
'''
|
||||
Run the "restore" action for the given repository, but only if the repository matches the
|
||||
@ -346,105 +348,110 @@ def run_restore(
|
||||
f'{repository.get("label", repository["path"])}: Restoring data sources from archive {restore_arguments.archive}'
|
||||
)
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory:
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
|
||||
archive_name = borgmatic.borg.repo_list.resolve_archive_name(
|
||||
repository['path'],
|
||||
restore_arguments.archive,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
archive_data_source_names = collect_archive_data_source_names(
|
||||
repository['path'],
|
||||
archive_name,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
restore_names = find_data_sources_to_restore(
|
||||
restore_arguments.data_sources, archive_data_source_names
|
||||
)
|
||||
found_names = set()
|
||||
remaining_restore_names = {}
|
||||
connection_params = {
|
||||
'hostname': restore_arguments.hostname,
|
||||
'port': restore_arguments.port,
|
||||
'username': restore_arguments.username,
|
||||
'password': restore_arguments.password,
|
||||
'restore_path': restore_arguments.restore_path,
|
||||
}
|
||||
archive_name = borgmatic.borg.repo_list.resolve_archive_name(
|
||||
repository['path'],
|
||||
restore_arguments.archive,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
archive_data_source_names = collect_archive_data_source_names(
|
||||
repository['path'],
|
||||
archive_name,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
restore_names = find_data_sources_to_restore(
|
||||
restore_arguments.data_sources, archive_data_source_names
|
||||
)
|
||||
found_names = set()
|
||||
remaining_restore_names = {}
|
||||
connection_params = {
|
||||
'hostname': restore_arguments.hostname,
|
||||
'port': restore_arguments.port,
|
||||
'username': restore_arguments.username,
|
||||
'password': restore_arguments.password,
|
||||
'restore_path': restore_arguments.restore_path,
|
||||
}
|
||||
|
||||
for hook_name, data_source_names in restore_names.items():
|
||||
for data_source_name in data_source_names:
|
||||
found_hook_name, found_data_source = get_configured_data_source(
|
||||
config, archive_data_source_names, hook_name, data_source_name
|
||||
)
|
||||
|
||||
if not found_data_source:
|
||||
remaining_restore_names.setdefault(found_hook_name or hook_name, []).append(
|
||||
data_source_name
|
||||
for hook_name, data_source_names in restore_names.items():
|
||||
for data_source_name in data_source_names:
|
||||
found_hook_name, found_data_source = get_configured_data_source(
|
||||
config, archive_data_source_names, hook_name, data_source_name
|
||||
)
|
||||
continue
|
||||
|
||||
found_names.add(data_source_name)
|
||||
restore_single_data_source(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
archive_name,
|
||||
found_hook_name or hook_name,
|
||||
dict(found_data_source, **{'schemas': restore_arguments.schemas}),
|
||||
connection_params,
|
||||
)
|
||||
if not found_data_source:
|
||||
remaining_restore_names.setdefault(found_hook_name or hook_name, []).append(
|
||||
data_source_name
|
||||
)
|
||||
continue
|
||||
|
||||
# For any data sources that weren't found via exact matches in the configuration, try to
|
||||
# fallback to "all" entries.
|
||||
for hook_name, data_source_names in remaining_restore_names.items():
|
||||
for data_source_name in data_source_names:
|
||||
found_hook_name, found_data_source = get_configured_data_source(
|
||||
config, archive_data_source_names, hook_name, data_source_name, 'all'
|
||||
)
|
||||
found_names.add(data_source_name)
|
||||
restore_single_data_source(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
archive_name,
|
||||
found_hook_name or hook_name,
|
||||
dict(found_data_source, **{'schemas': restore_arguments.schemas}),
|
||||
connection_params,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
|
||||
if not found_data_source:
|
||||
continue
|
||||
# For any data sources that weren't found via exact matches in the configuration, try to
|
||||
# fallback to "all" entries.
|
||||
for hook_name, data_source_names in remaining_restore_names.items():
|
||||
for data_source_name in data_source_names:
|
||||
found_hook_name, found_data_source = get_configured_data_source(
|
||||
config, archive_data_source_names, hook_name, data_source_name, 'all'
|
||||
)
|
||||
|
||||
found_names.add(data_source_name)
|
||||
data_source = copy.copy(found_data_source)
|
||||
data_source['name'] = data_source_name
|
||||
if not found_data_source:
|
||||
continue
|
||||
|
||||
restore_single_data_source(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
archive_name,
|
||||
found_hook_name or hook_name,
|
||||
dict(data_source, **{'schemas': restore_arguments.schemas}),
|
||||
connection_params,
|
||||
)
|
||||
found_names.add(data_source_name)
|
||||
data_source = copy.copy(found_data_source)
|
||||
data_source['name'] = data_source_name
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
restore_single_data_source(
|
||||
repository,
|
||||
config,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
local_path,
|
||||
remote_path,
|
||||
archive_name,
|
||||
found_hook_name or hook_name,
|
||||
dict(data_source, **{'schemas': restore_arguments.schemas}),
|
||||
connection_params,
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_data_source_dumps',
|
||||
config,
|
||||
repository['path'],
|
||||
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
|
||||
ensure_data_sources_found(restore_names, remaining_restore_names, found_names)
|
||||
|
@ -504,6 +504,7 @@ def create_archive(
|
||||
config_paths,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
borgmatic_runtime_directory,
|
||||
local_path='borg',
|
||||
remote_path=None,
|
||||
progress=False,
|
||||
@ -524,9 +525,7 @@ def create_archive(
|
||||
|
||||
working_directory = borgmatic.config.paths.get_working_directory(config)
|
||||
borgmatic_runtime_directories = expand_directories(
|
||||
collect_borgmatic_runtime_directories(
|
||||
borgmatic.config.paths.get_borgmatic_runtime_directory(config)
|
||||
),
|
||||
collect_borgmatic_runtime_directories(borgmatic_runtime_directory),
|
||||
working_directory=working_directory,
|
||||
)
|
||||
|
||||
|
@ -275,8 +275,8 @@ def run_actions(
|
||||
'''
|
||||
Given parsed command-line arguments as an argparse.ArgumentParser instance, the configuration
|
||||
filename, a configuration dict, a sequence of loaded configuration paths, local and remote paths
|
||||
to Borg, a local Borg version string, and a repository name, run all actions from the
|
||||
command-line arguments on the given repository.
|
||||
to Borg, a local Borg version string, a repository name, and the borgmatic runtime directory,
|
||||
run all actions from the command-line arguments on the given repository.
|
||||
|
||||
Yield JSON output strings from executing any actions that produce JSON.
|
||||
|
||||
|
@ -1,4 +1,8 @@
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def expand_user_in_path(path):
|
||||
@ -26,28 +30,70 @@ def get_borgmatic_source_directory(config):
|
||||
return expand_user_in_path(config.get('borgmatic_source_directory') or '~/.borgmatic')
|
||||
|
||||
|
||||
def get_borgmatic_runtime_directory(config):
|
||||
class Runtime_directory:
|
||||
'''
|
||||
Given a configuration dict, get the borgmatic runtime directory used for storing temporary
|
||||
runtime data like streaming database dumps and bootstrap metadata. Defaults to
|
||||
$XDG_RUNTIME_DIR/./borgmatic or $TMPDIR/./borgmatic or $TEMP/./borgmatic or
|
||||
/run/user/$UID/./borgmatic.
|
||||
A Python context manager for creating and cleaning up the borgmatic runtime directory used for
|
||||
storing temporary runtime data like streaming database dumps and bootstrap metadata.
|
||||
|
||||
The "/./" is taking advantage of a Borg feature such that the part of the path before the "/./"
|
||||
does not get stored in the file path within an archive. That way, the path of the runtime
|
||||
directory can change without leaving database dumps within an archive inaccessible.
|
||||
Example use as a context manager:
|
||||
|
||||
with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory:
|
||||
do_something_with(borgmatic_runtime_directory)
|
||||
|
||||
For the scope of that "with" statement, the runtime directory is available. Afterwards, it
|
||||
automatically gets cleaned up as necessary.
|
||||
'''
|
||||
return expand_user_in_path(
|
||||
os.path.join(
|
||||
|
||||
def __init__(self, config):
|
||||
'''
|
||||
Given a configuration dict, determine the borgmatic runtime directory, creating a secure,
|
||||
temporary directory within it if necessary. Defaults to $XDG_RUNTIME_DIR/./borgmatic or
|
||||
$RUNTIME_DIRECTORY/./borgmatic or $TMPDIR/borgmatic-[random]/./borgmatic or
|
||||
$TEMP/borgmatic-[random]/./borgmatic or /tmp/borgmatic-[random]/./borgmatic where "[random]"
|
||||
is a randomly generated string intended to avoid path collisions.
|
||||
|
||||
If XDG_RUNTIME_DIR or RUNTIME_DIRECTORY is set and already ends in "/borgmatic", then don't
|
||||
tack on a second "/borgmatic" path component.
|
||||
|
||||
The "/./" is taking advantage of a Borg feature such that the part of the path before the "/./"
|
||||
does not get stored in the file path within an archive. That way, the path of the runtime
|
||||
directory can change without leaving database dumps within an archive inaccessible.
|
||||
'''
|
||||
|
||||
runtime_directory = (
|
||||
config.get('user_runtime_directory')
|
||||
or os.environ.get('XDG_RUNTIME_DIR')
|
||||
or os.environ.get('TMPDIR')
|
||||
or os.environ.get('TEMP')
|
||||
or f'/run/user/{os.getuid()}',
|
||||
'.',
|
||||
'borgmatic',
|
||||
or os.environ.get('XDG_RUNTIME_DIR') # Set by PAM on Linux.
|
||||
or os.environ.get('RUNTIME_DIRECTORY') # Set by systemd if configured.
|
||||
)
|
||||
)
|
||||
|
||||
if runtime_directory:
|
||||
self.temporary_directory = None
|
||||
else:
|
||||
self.temporary_directory = tempfile.TemporaryDirectory(
|
||||
prefix='borgmatic', dir=os.environ.get('TMPDIR') or os.environ.get('TEMP') or '/tmp'
|
||||
)
|
||||
runtime_directory = self.temporary_directory.name
|
||||
|
||||
(base_path, final_directory) = os.path.split(runtime_directory.rstrip(os.path.sep))
|
||||
|
||||
self.runtime_path = expand_user_in_path(
|
||||
os.path.join(
|
||||
base_path if final_directory == 'borgmatic' else runtime_directory, '.', 'borgmatic'
|
||||
)
|
||||
)
|
||||
|
||||
def __enter__(self):
|
||||
'''
|
||||
Return the borgmatic runtime path as a string.
|
||||
'''
|
||||
return self.runtime_path
|
||||
|
||||
def __exit__(self, exception, value, traceback):
|
||||
'''
|
||||
Delete any temporary directory that was created as part of initialization.
|
||||
'''
|
||||
if self.temporary_directory:
|
||||
self.temporary_directory.cleanup()
|
||||
|
||||
|
||||
def get_borgmatic_state_directory(config):
|
||||
@ -59,10 +105,9 @@ def get_borgmatic_state_directory(config):
|
||||
return expand_user_in_path(
|
||||
os.path.join(
|
||||
config.get('user_state_directory')
|
||||
or os.environ.get(
|
||||
'XDG_STATE_HOME',
|
||||
'~/.local/state',
|
||||
),
|
||||
or os.environ.get('XDG_STATE_HOME')
|
||||
or os.environ.get('STATE_DIRECTORY') # Set by systemd if configured.
|
||||
or '~/.local/state',
|
||||
'borgmatic',
|
||||
)
|
||||
)
|
||||
|
@ -14,15 +14,11 @@ from borgmatic.hooks import dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_dump_path(config, base_directory=None): # pragma: no cover
|
||||
def make_dump_path(base_directory): # pragma: no cover
|
||||
'''
|
||||
Given a configuration dict and an optional base directory, make the corresponding dump path. If
|
||||
a base directory isn't provided, use the borgmatic runtime directory.
|
||||
Given a base directory, make the corresponding dump path.
|
||||
'''
|
||||
return dump.make_data_source_dump_path(
|
||||
base_directory or borgmatic.config.paths.get_borgmatic_runtime_directory(config),
|
||||
'mariadb_databases',
|
||||
)
|
||||
return dump.make_data_source_dump_path(base_directory, 'mariadb_databases')
|
||||
|
||||
|
||||
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
|
||||
@ -126,12 +122,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
return any(databases)
|
||||
|
||||
|
||||
def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Dump the given MariaDB databases to a named pipe. The databases are supplied as a sequence of
|
||||
dicts, one dict describing each database as per the configuration schema. Use the given
|
||||
configuration dict to construct the destination path and the given log prefix in any log
|
||||
entries.
|
||||
borgmatic runtime directory to construct the destination path and the given log prefix in any
|
||||
log entries.
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
@ -142,7 +138,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
logger.info(f'{log_prefix}: Dumping MariaDB databases{dry_run_label}')
|
||||
|
||||
for database in databases:
|
||||
dump_path = make_dump_path(config)
|
||||
dump_path = make_dump_path(borgmatic_runtime_directory)
|
||||
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
||||
dump_database_names = database_names_to_dump(
|
||||
database, extra_environment, log_prefix, dry_run
|
||||
@ -185,30 +181,36 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
return [process for process in processes if process]
|
||||
|
||||
|
||||
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
|
||||
def remove_data_source_dumps(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the given
|
||||
configuration dict to construct the destination path and the log prefix in any log entries. If
|
||||
this is a dry run, then don't actually remove anything.
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the
|
||||
borgmatic_runtime_directory to construct the destination path and the log prefix in any log
|
||||
entries. If this is a dry run, then don't actually remove anything.
|
||||
'''
|
||||
dump.remove_data_source_dumps(make_dump_path(config), 'MariaDB', log_prefix, dry_run)
|
||||
dump.remove_data_source_dumps(
|
||||
make_dump_path(borgmatic_runtime_directory), 'MariaDB', log_prefix, dry_run
|
||||
)
|
||||
|
||||
|
||||
def make_data_source_dump_patterns(databases, config, log_prefix, name=None): # pragma: no cover
|
||||
def make_data_source_dump_patterns(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, name=None
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
|
||||
database name to match, return the corresponding glob patterns to match the database dump in an
|
||||
archive.
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
|
||||
borgmatic runtime directory, and a database name to match, return the corresponding glob
|
||||
patterns to match the database dump in an archive.
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
|
||||
return (
|
||||
dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, 'borgmatic'), name, hostname='*'
|
||||
make_dump_path(borgmatic_runtime_directory), name, hostname='*'
|
||||
),
|
||||
dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, borgmatic_source_directory), name, hostname='*'
|
||||
make_dump_path(borgmatic_source_directory), name, hostname='*'
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -8,15 +8,11 @@ from borgmatic.hooks import dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_dump_path(config, base_directory=None): # pragma: no cover
|
||||
def make_dump_path(base_directory): # pragma: no cover
|
||||
'''
|
||||
Given a configuration dict and an optional base directory, make the corresponding dump path. If
|
||||
a base directory isn't provided, use the borgmatic runtime directory.
|
||||
Given a base directory, make the corresponding dump path.
|
||||
'''
|
||||
return dump.make_data_source_dump_path(
|
||||
base_directory or borgmatic.config.paths.get_borgmatic_runtime_directory(config),
|
||||
'mongodb_databases',
|
||||
)
|
||||
return dump.make_data_source_dump_path(base_directory, 'mongodb_databases')
|
||||
|
||||
|
||||
def use_streaming(databases, config, log_prefix):
|
||||
@ -27,11 +23,11 @@ def use_streaming(databases, config, log_prefix):
|
||||
return any(database.get('format') != 'directory' for database in databases)
|
||||
|
||||
|
||||
def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
|
||||
dicts, one dict describing each database as per the configuration schema. Use the configuration
|
||||
dict to construct the destination path and the given log prefix in any log entries.
|
||||
dicts, one dict describing each database as per the configuration schema. Use the borgmatic
|
||||
runtime directory to construct the destination path and the given log prefix in any log entries.
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
@ -44,7 +40,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
for database in databases:
|
||||
name = database['name']
|
||||
dump_filename = dump.make_data_source_dump_filename(
|
||||
make_dump_path(config), name, database.get('hostname')
|
||||
make_dump_path(borgmatic_runtime_directory), name, database.get('hostname')
|
||||
)
|
||||
dump_format = database.get('format', 'archive')
|
||||
|
||||
@ -94,30 +90,36 @@ def build_dump_command(database, dump_filename, dump_format):
|
||||
)
|
||||
|
||||
|
||||
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
|
||||
def remove_data_source_dumps(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the log
|
||||
prefix in any log entries. Use the given configuration dict to construct the destination path.
|
||||
If this is a dry run, then don't actually remove anything.
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the
|
||||
borgmatic_runtime_directory to construct the destination path and the log prefix in any log
|
||||
entries. If this is a dry run, then don't actually remove anything.
|
||||
'''
|
||||
dump.remove_data_source_dumps(make_dump_path(config), 'MongoDB', log_prefix, dry_run)
|
||||
dump.remove_data_source_dumps(
|
||||
make_dump_path(borgmatic_runtime_directory), 'MongoDB', log_prefix, dry_run
|
||||
)
|
||||
|
||||
|
||||
def make_data_source_dump_patterns(databases, config, log_prefix, name=None): # pragma: no cover
|
||||
def make_data_source_dump_patterns(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, name=None
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Given a sequence of database configurations dicts, a configuration dict, a prefix to log with,
|
||||
and a database name to match, return the corresponding glob patterns to match the database dump
|
||||
in an archive.
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
|
||||
borgmatic runtime directory, and a database name to match, return the corresponding glob
|
||||
patterns to match the database dump in an archive.
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
|
||||
return (
|
||||
dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, 'borgmatic'), name, hostname='*'
|
||||
make_dump_path(borgmatic_runtime_directory), name, hostname='*'
|
||||
),
|
||||
dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, borgmatic_source_directory), name, hostname='*'
|
||||
make_dump_path(borgmatic_source_directory), name, hostname='*'
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -14,15 +14,11 @@ from borgmatic.hooks import dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_dump_path(config, base_directory=None): # pragma: no cover
|
||||
def make_dump_path(base_directory): # pragma: no cover
|
||||
'''
|
||||
Given a configuration dict and an optional base directory, make the corresponding dump path. If
|
||||
a base directory isn't provided, use the borgmatic runtime directory.
|
||||
Given a base directory, make the corresponding dump path.
|
||||
'''
|
||||
return dump.make_data_source_dump_path(
|
||||
base_directory or borgmatic.config.paths.get_borgmatic_runtime_directory(config),
|
||||
'mysql_databases',
|
||||
)
|
||||
return dump.make_data_source_dump_path(base_directory, 'mysql_databases')
|
||||
|
||||
|
||||
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
|
||||
@ -125,11 +121,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
return any(databases)
|
||||
|
||||
|
||||
def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
|
||||
of dicts, one dict describing each database as per the configuration schema. Use the given
|
||||
configuration dict to construct the destination path and the given log prefix in any log entries.
|
||||
borgmatic runtime directory to construct the destination path and the given log prefix in any
|
||||
log entries.
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
@ -140,7 +137,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}')
|
||||
|
||||
for database in databases:
|
||||
dump_path = make_dump_path(config)
|
||||
dump_path = make_dump_path(borgmatic_runtime_directory)
|
||||
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
||||
dump_database_names = database_names_to_dump(
|
||||
database, extra_environment, log_prefix, dry_run
|
||||
@ -183,30 +180,36 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
return [process for process in processes if process]
|
||||
|
||||
|
||||
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
|
||||
def remove_data_source_dumps(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the given
|
||||
configuration dict to construct the destination path and the log prefix in any log entries. If
|
||||
this is a dry run, then don't actually remove anything.
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the
|
||||
borgmatic runtime directory to construct the destination path and the log prefix in any log
|
||||
entries. If this is a dry run, then don't actually remove anything.
|
||||
'''
|
||||
dump.remove_data_source_dumps(make_dump_path(config), 'MySQL', log_prefix, dry_run)
|
||||
dump.remove_data_source_dumps(
|
||||
make_dump_path(borgmatic_runtime_directory), 'MySQL', log_prefix, dry_run
|
||||
)
|
||||
|
||||
|
||||
def make_data_source_dump_patterns(databases, config, log_prefix, name=None): # pragma: no cover
|
||||
def make_data_source_dump_patterns(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, name=None
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
|
||||
database name to match, return the corresponding glob patterns to match the database dump in an
|
||||
archive.
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
|
||||
borgmatic runtime directory, and a database name to match, return the corresponding glob
|
||||
patterns to match the database dump in an archive.
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
|
||||
return (
|
||||
dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, 'borgmatic'), name, hostname='*'
|
||||
make_dump_path(borgmatic_runtime_directory), name, hostname='*'
|
||||
),
|
||||
dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, borgmatic_source_directory), name, hostname='*'
|
||||
make_dump_path(borgmatic_source_directory), name, hostname='*'
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -16,15 +16,11 @@ from borgmatic.hooks import dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_dump_path(config, base_directory=None): # pragma: no cover
|
||||
def make_dump_path(base_directory): # pragma: no cover
|
||||
'''
|
||||
Given a configuration dict and an optional base directory, make the corresponding dump path. If
|
||||
a base directory isn't provided, use the borgmatic runtime directory.
|
||||
Given a base directory, make the corresponding dump path.
|
||||
'''
|
||||
return dump.make_data_source_dump_path(
|
||||
base_directory or borgmatic.config.paths.get_borgmatic_runtime_directory(config),
|
||||
'postgresql_databases',
|
||||
)
|
||||
return dump.make_data_source_dump_path(base_directory, 'postgresql_databases')
|
||||
|
||||
|
||||
def make_extra_environment(database, restore_connection_params=None):
|
||||
@ -108,12 +104,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
return any(database.get('format') != 'directory' for database in databases)
|
||||
|
||||
|
||||
def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
|
||||
dicts, one dict describing each database as per the configuration schema. Use the given
|
||||
configuration dict to construct the destination path and the given log prefix in any log
|
||||
entries.
|
||||
borgmatic runtime directory to construct the destination path and the given log prefix in any
|
||||
log entries.
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
@ -127,7 +123,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
|
||||
for database in databases:
|
||||
extra_environment = make_extra_environment(database)
|
||||
dump_path = make_dump_path(config)
|
||||
dump_path = make_dump_path(borgmatic_runtime_directory)
|
||||
dump_database_names = database_names_to_dump(
|
||||
database, extra_environment, log_prefix, dry_run
|
||||
)
|
||||
@ -210,30 +206,36 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
return processes
|
||||
|
||||
|
||||
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
|
||||
def remove_data_source_dumps(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the given
|
||||
configuration dict to construct the destination path and the log prefix in any log entries. If
|
||||
this is a dry run, then don't actually remove anything.
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the
|
||||
borgmatic runtime directory to construct the destination path and the log prefix in any log
|
||||
entries. If this is a dry run, then don't actually remove anything.
|
||||
'''
|
||||
dump.remove_data_source_dumps(make_dump_path(config), 'PostgreSQL', log_prefix, dry_run)
|
||||
dump.remove_data_source_dumps(
|
||||
make_dump_path(borgmatic_runtime_directory), 'PostgreSQL', log_prefix, dry_run
|
||||
)
|
||||
|
||||
|
||||
def make_data_source_dump_patterns(databases, config, log_prefix, name=None): # pragma: no cover
|
||||
def make_data_source_dump_patterns(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, name=None
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
|
||||
database name to match, return the corresponding glob patterns to match the database dump in an
|
||||
archive.
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
|
||||
borgmatic runtime directory, and a database name to match, return the corresponding glob
|
||||
patterns to match the database dump in an archive.
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
|
||||
return (
|
||||
dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, 'borgmatic'), name, hostname='*'
|
||||
make_dump_path(borgmatic_runtime_directory), name, hostname='*'
|
||||
),
|
||||
dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, borgmatic_source_directory), name, hostname='*'
|
||||
make_dump_path(borgmatic_source_directory), name, hostname='*'
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -9,15 +9,11 @@ from borgmatic.hooks import dump
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def make_dump_path(config, base_directory=None): # pragma: no cover
|
||||
def make_dump_path(base_directory): # pragma: no cover
|
||||
'''
|
||||
Given a configuration dict and an optional base directory, make the corresponding dump path. If
|
||||
a base directory isn't provided, use the borgmatic runtime directory.
|
||||
Given a base directory, make the corresponding dump path.
|
||||
'''
|
||||
return dump.make_data_source_dump_path(
|
||||
base_directory or borgmatic.config.paths.get_borgmatic_runtime_directory(config),
|
||||
'sqlite_databases',
|
||||
)
|
||||
return dump.make_data_source_dump_path(base_directory, 'sqlite_databases')
|
||||
|
||||
|
||||
def use_streaming(databases, config, log_prefix):
|
||||
@ -28,11 +24,11 @@ def use_streaming(databases, config, log_prefix):
|
||||
return any(databases)
|
||||
|
||||
|
||||
def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
def dump_data_sources(databases, config, log_prefix, borgmatic_runtime_directory, dry_run):
|
||||
'''
|
||||
Dump the given SQLite databases to a named pipe. The databases are supplied as a sequence of
|
||||
configuration dicts, as per the configuration schema. Use the given configuration dict to
|
||||
construct the destination path and the given log prefix in any log entries.
|
||||
configuration dicts, as per the configuration schema. Use the given borgmatic runtime directory
|
||||
to construct the destination path and the given log prefix in any log entries.
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
@ -52,7 +48,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
f'{log_prefix}: No SQLite database at {database_path}; an empty database will be created and dumped'
|
||||
)
|
||||
|
||||
dump_path = make_dump_path(config)
|
||||
dump_path = make_dump_path(borgmatic_runtime_directory)
|
||||
dump_filename = dump.make_data_source_dump_filename(dump_path, database['name'])
|
||||
|
||||
if os.path.exists(dump_filename):
|
||||
@ -80,30 +76,36 @@ def dump_data_sources(databases, config, log_prefix, dry_run):
|
||||
return processes
|
||||
|
||||
|
||||
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
|
||||
def remove_data_source_dumps(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Remove the given SQLite database dumps from the filesystem. The databases are supplied as a
|
||||
sequence of configuration dicts, as per the configuration schema. Use the given configuration
|
||||
dict to construct the destination path and the given log prefix in any log entries. If this is a
|
||||
dry run, then don't actually remove anything.
|
||||
Remove all database dump files for this hook regardless of the given databases. Use the
|
||||
borgmatic runtime directory to construct the destination path and the log prefix in any log
|
||||
entries. If this is a dry run, then don't actually remove anything.
|
||||
'''
|
||||
dump.remove_data_source_dumps(make_dump_path(config), 'SQLite', log_prefix, dry_run)
|
||||
dump.remove_data_source_dumps(
|
||||
make_dump_path(borgmatic_runtime_directory), 'SQLite', log_prefix, dry_run
|
||||
)
|
||||
|
||||
|
||||
def make_data_source_dump_patterns(databases, config, log_prefix, name=None): # pragma: no cover
|
||||
def make_data_source_dump_patterns(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, name=None
|
||||
): # pragma: no cover
|
||||
'''
|
||||
Make a pattern that matches the given SQLite databases. The databases are supplied as a sequence
|
||||
of configuration dicts, as per the configuration schema.
|
||||
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, the
|
||||
borgmatic runtime directory, and a database name to match, return the corresponding glob
|
||||
patterns to match the database dump in an archive.
|
||||
'''
|
||||
borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config)
|
||||
|
||||
return (
|
||||
dump.make_data_source_dump_filename(make_dump_path('borgmatic'), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, 'borgmatic'), name, hostname='*'
|
||||
make_dump_path(borgmatic_runtime_directory), name, hostname='*'
|
||||
),
|
||||
dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*'),
|
||||
dump.make_data_source_dump_filename(
|
||||
make_dump_path(config, borgmatic_source_directory), name, hostname='*'
|
||||
make_dump_path(borgmatic_source_directory), name, hostname='*'
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -9,6 +9,8 @@ Documentation=https://torsion.org/borgmatic/
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
RuntimeDirectory=borgmatic
|
||||
StateDirectory=borgmatic
|
||||
|
||||
# Load single encrypted credential.
|
||||
LoadCredentialEncrypted=borgmatic.pw
|
||||
|
@ -16,9 +16,6 @@ def test_get_config_paths_returns_list_of_config_paths():
|
||||
flexmock(module.borgmatic.config.paths).should_receive(
|
||||
'get_borgmatic_source_directory'
|
||||
).and_return('/source')
|
||||
flexmock(module.borgmatic.config.paths).should_receive(
|
||||
'get_borgmatic_runtime_directory'
|
||||
).and_return('/runtime')
|
||||
flexmock(module).should_receive('make_bootstrap_config').and_return({})
|
||||
bootstrap_arguments = flexmock(
|
||||
repository='repo',
|
||||
@ -33,6 +30,9 @@ def test_get_config_paths_returns_list_of_config_paths():
|
||||
dry_run=False,
|
||||
)
|
||||
local_borg_version = flexmock()
|
||||
flexmock(module.borgmatic.config.paths).should_receive('Runtime_directory').and_return(
|
||||
flexmock()
|
||||
)
|
||||
extract_process = flexmock(
|
||||
stdout=flexmock(
|
||||
read=lambda: '{"config_paths": ["/borgmatic/config.yaml"]}',
|
||||
@ -47,13 +47,58 @@ def test_get_config_paths_returns_list_of_config_paths():
|
||||
) == ['/borgmatic/config.yaml']
|
||||
|
||||
|
||||
def test_get_config_paths_probes_for_manifest():
|
||||
flexmock(module.borgmatic.config.paths).should_receive(
|
||||
'get_borgmatic_source_directory'
|
||||
).and_return('/source')
|
||||
flexmock(module).should_receive('make_bootstrap_config').and_return({})
|
||||
bootstrap_arguments = flexmock(
|
||||
repository='repo',
|
||||
archive='archive',
|
||||
ssh_command=None,
|
||||
local_path='borg7',
|
||||
remote_path='borg8',
|
||||
borgmatic_source_directory=None,
|
||||
user_runtime_directory=None,
|
||||
)
|
||||
global_arguments = flexmock(
|
||||
dry_run=False,
|
||||
)
|
||||
local_borg_version = flexmock()
|
||||
borgmatic_runtime_directory = flexmock()
|
||||
flexmock(module.borgmatic.config.paths).should_receive('Runtime_directory').and_return(
|
||||
borgmatic_runtime_directory,
|
||||
)
|
||||
flexmock(module.os.path).should_receive('join').with_args(
|
||||
'borgmatic', 'bootstrap', 'manifest.json'
|
||||
).and_return(flexmock()).once()
|
||||
flexmock(module.os.path).should_receive('join').with_args(
|
||||
borgmatic_runtime_directory, 'bootstrap', 'manifest.json'
|
||||
).and_return(flexmock()).once()
|
||||
flexmock(module.os.path).should_receive('join').with_args(
|
||||
'/source', 'bootstrap', 'manifest.json'
|
||||
).and_return(flexmock()).once()
|
||||
manifest_missing_extract_process = flexmock(
|
||||
stdout=flexmock(read=lambda: None),
|
||||
)
|
||||
manifest_found_extract_process = flexmock(
|
||||
stdout=flexmock(
|
||||
read=lambda: '{"config_paths": ["/borgmatic/config.yaml"]}',
|
||||
),
|
||||
)
|
||||
flexmock(module.borgmatic.borg.extract).should_receive('extract_archive').and_return(
|
||||
manifest_missing_extract_process
|
||||
).and_return(manifest_missing_extract_process).and_return(manifest_found_extract_process)
|
||||
|
||||
assert module.get_config_paths(
|
||||
'archive', bootstrap_arguments, global_arguments, local_borg_version
|
||||
) == ['/borgmatic/config.yaml']
|
||||
|
||||
|
||||
def test_get_config_paths_translates_ssh_command_argument_to_config():
|
||||
flexmock(module.borgmatic.config.paths).should_receive(
|
||||
'get_borgmatic_source_directory'
|
||||
).and_return('/source')
|
||||
flexmock(module.borgmatic.config.paths).should_receive(
|
||||
'get_borgmatic_runtime_directory'
|
||||
).and_return('/runtime')
|
||||