2019-11-08 19:53:27 +00:00
|
|
|
import logging
|
|
|
|
import os
|
2020-01-19 14:15:47 +00:00
|
|
|
import shutil
|
2019-11-08 19:53:27 +00:00
|
|
|
|
2022-05-28 21:42:19 +00:00
|
|
|
from borgmatic.borg.state import DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
2019-12-11 00:04:34 +00:00
|
|
|
|
2019-11-08 19:53:27 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
DATA_SOURCE_HOOK_NAMES = (
|
2023-08-04 20:22:44 +00:00
|
|
|
'mariadb_databases',
|
2023-03-02 18:25:16 +00:00
|
|
|
'mysql_databases',
|
|
|
|
'mongodb_databases',
|
2023-08-04 20:22:44 +00:00
|
|
|
'postgresql_databases',
|
2023-03-02 18:25:16 +00:00
|
|
|
'sqlite_databases',
|
|
|
|
)
|
2019-11-12 05:59:30 +00:00
|
|
|
|
2019-11-08 19:53:27 +00:00
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
def make_data_source_dump_path(borgmatic_source_directory, data_source_hook_name):
|
2019-12-11 00:04:34 +00:00
|
|
|
'''
|
2023-08-24 20:50:10 +00:00
|
|
|
Given a borgmatic source directory (or None) and a data source hook name, construct a data
|
|
|
|
source dump path.
|
2019-12-11 00:04:34 +00:00
|
|
|
'''
|
|
|
|
if not borgmatic_source_directory:
|
|
|
|
borgmatic_source_directory = DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
return os.path.join(borgmatic_source_directory, data_source_hook_name)
|
2019-12-11 00:04:34 +00:00
|
|
|
|
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
def make_data_source_dump_filename(dump_path, name, hostname=None):
|
2019-11-08 19:53:27 +00:00
|
|
|
'''
|
2023-08-24 20:50:10 +00:00
|
|
|
Based on the given dump directory path, data source name, and hostname, return a filename to use
|
|
|
|
for the data source dump. The hostname defaults to localhost.
|
2019-11-08 19:53:27 +00:00
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
Raise ValueError if the data source name is invalid.
|
2019-11-08 19:53:27 +00:00
|
|
|
'''
|
|
|
|
if os.path.sep in name:
|
2023-08-24 20:50:10 +00:00
|
|
|
raise ValueError(f'Invalid data source name {name}')
|
2019-11-08 19:53:27 +00:00
|
|
|
|
|
|
|
return os.path.join(os.path.expanduser(dump_path), hostname or 'localhost', name)
|
|
|
|
|
|
|
|
|
2020-05-18 18:31:29 +00:00
|
|
|
def create_parent_directory_for_dump(dump_path):
|
2019-11-13 18:41:57 +00:00
|
|
|
'''
|
2020-05-18 18:31:29 +00:00
|
|
|
Create a directory to contain the given dump path.
|
2019-11-13 18:41:57 +00:00
|
|
|
'''
|
2020-05-07 18:44:04 +00:00
|
|
|
os.makedirs(os.path.dirname(dump_path), mode=0o700, exist_ok=True)
|
2020-05-15 05:38:38 +00:00
|
|
|
|
2020-05-18 18:31:29 +00:00
|
|
|
|
|
|
|
def create_named_pipe_for_dump(dump_path):
|
|
|
|
'''
|
|
|
|
Create a named pipe at the given dump path.
|
|
|
|
'''
|
|
|
|
create_parent_directory_for_dump(dump_path)
|
2020-05-15 05:38:38 +00:00
|
|
|
os.mkfifo(dump_path, mode=0o600)
|
2019-11-13 18:41:57 +00:00
|
|
|
|
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
def remove_data_source_dumps(dump_path, data_source_type_name, log_prefix, dry_run):
|
2019-11-08 19:53:27 +00:00
|
|
|
'''
|
2023-08-24 20:50:10 +00:00
|
|
|
Remove all data source dumps in the given dump directory path (including the directory itself).
|
|
|
|
If this is a dry run, then don't actually remove anything.
|
2019-11-08 19:53:27 +00:00
|
|
|
'''
|
|
|
|
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
|
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
logger.debug(f'{log_prefix}: Removing {data_source_type_name} data source dumps{dry_run_label}')
|
2019-11-08 19:53:27 +00:00
|
|
|
|
2020-06-02 19:40:32 +00:00
|
|
|
expanded_path = os.path.expanduser(dump_path)
|
|
|
|
|
|
|
|
if dry_run:
|
|
|
|
return
|
|
|
|
|
|
|
|
if os.path.exists(expanded_path):
|
|
|
|
shutil.rmtree(expanded_path)
|
2019-11-12 05:59:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def convert_glob_patterns_to_borg_patterns(patterns):
|
|
|
|
'''
|
|
|
|
Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
|
|
|
|
patterns like "sh:etc/*".
|
|
|
|
'''
|
2023-03-24 06:11:14 +00:00
|
|
|
return [f'sh:{pattern.lstrip(os.path.sep)}' for pattern in patterns]
|