2023-03-02 18:25:16 +00:00
|
|
|
import logging
|
|
|
|
import os
|
2024-01-07 18:21:49 +00:00
|
|
|
import shlex
|
2023-03-02 18:25:16 +00:00
|
|
|
|
|
|
|
from borgmatic.execute import execute_command, execute_command_with_processes
|
|
|
|
from borgmatic.hooks import dump
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2023-07-09 06:14:30 +00:00
|
|
|
def make_dump_path(config): # pragma: no cover
|
2023-03-02 18:25:16 +00:00
|
|
|
'''
|
2023-07-09 06:14:30 +00:00
|
|
|
Make the dump path from the given configuration dict and the name of this hook.
|
2023-03-02 18:25:16 +00:00
|
|
|
'''
|
2023-08-24 20:50:10 +00:00
|
|
|
return dump.make_data_source_dump_path(
|
2023-07-09 06:14:30 +00:00
|
|
|
config.get('borgmatic_source_directory'), 'sqlite_databases'
|
2023-03-02 18:25:16 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-04-15 18:02:05 +00:00
|
|
|
def use_streaming(databases, config, log_prefix):
|
|
|
|
'''
|
|
|
|
Given a sequence of SQLite database configuration dicts, a configuration dict (ignored), and a
|
|
|
|
log prefix (ignored), return whether streaming will be using during dumps.
|
|
|
|
'''
|
|
|
|
return any(databases)
|
|
|
|
|
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
def dump_data_sources(databases, config, log_prefix, dry_run):
|
2023-03-02 18:25:16 +00:00
|
|
|
'''
|
2024-04-15 18:02:05 +00:00
|
|
|
Dump the given SQLite databases to a named pipe. The databases are supplied as a sequence of
|
2023-07-09 06:14:30 +00:00
|
|
|
configuration dicts, as per the configuration schema. Use the given configuration dict to
|
2023-12-31 19:07:59 +00:00
|
|
|
construct the destination path and the given log prefix in any log entries.
|
|
|
|
|
|
|
|
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
|
|
|
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
2023-03-02 18:25:16 +00:00
|
|
|
'''
|
|
|
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
|
|
|
processes = []
|
|
|
|
|
2023-03-24 06:11:14 +00:00
|
|
|
logger.info(f'{log_prefix}: Dumping SQLite databases{dry_run_label}')
|
2023-03-02 18:25:16 +00:00
|
|
|
|
|
|
|
for database in databases:
|
|
|
|
database_path = database['path']
|
2023-03-04 07:13:07 +00:00
|
|
|
|
|
|
|
if database['name'] == 'all':
|
2023-08-14 19:43:21 +00:00
|
|
|
logger.warning('The "all" database name has no meaning for SQLite databases')
|
2023-03-04 07:13:07 +00:00
|
|
|
if not os.path.exists(database_path):
|
|
|
|
logger.warning(
|
2023-08-14 19:43:21 +00:00
|
|
|
f'{log_prefix}: No SQLite database at {database_path}; an empty database will be created and dumped'
|
2023-03-04 07:13:07 +00:00
|
|
|
)
|
|
|
|
|
2023-07-09 06:14:30 +00:00
|
|
|
dump_path = make_dump_path(config)
|
2023-08-24 20:50:10 +00:00
|
|
|
dump_filename = dump.make_data_source_dump_filename(dump_path, database['name'])
|
2023-12-31 19:07:59 +00:00
|
|
|
|
2023-03-02 18:25:16 +00:00
|
|
|
if os.path.exists(dump_filename):
|
|
|
|
logger.warning(
|
|
|
|
f'{log_prefix}: Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
|
|
|
command = (
|
|
|
|
'sqlite3',
|
2024-01-07 18:21:49 +00:00
|
|
|
shlex.quote(database_path),
|
2023-03-02 18:25:16 +00:00
|
|
|
'.dump',
|
|
|
|
'>',
|
2024-01-07 18:21:49 +00:00
|
|
|
shlex.quote(dump_filename),
|
2023-03-02 18:25:16 +00:00
|
|
|
)
|
|
|
|
logger.debug(
|
|
|
|
f'{log_prefix}: Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}'
|
|
|
|
)
|
|
|
|
if dry_run:
|
|
|
|
continue
|
|
|
|
|
2023-12-31 19:07:59 +00:00
|
|
|
dump.create_named_pipe_for_dump(dump_filename)
|
2023-03-02 18:25:16 +00:00
|
|
|
processes.append(execute_command(command, shell=True, run_to_completion=False))
|
|
|
|
|
|
|
|
return processes
|
|
|
|
|
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
|
2023-03-02 18:25:16 +00:00
|
|
|
'''
|
2024-04-15 18:02:05 +00:00
|
|
|
Remove the given SQLite database dumps from the filesystem. The databases are supplied as a
|
2023-07-09 06:14:30 +00:00
|
|
|
sequence of configuration dicts, as per the configuration schema. Use the given configuration
|
|
|
|
dict to construct the destination path and the given log prefix in any log entries. If this is a
|
|
|
|
dry run, then don't actually remove anything.
|
2023-03-02 18:25:16 +00:00
|
|
|
'''
|
2023-08-24 20:50:10 +00:00
|
|
|
dump.remove_data_source_dumps(make_dump_path(config), 'SQLite', log_prefix, dry_run)
|
2023-03-02 18:25:16 +00:00
|
|
|
|
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
def make_data_source_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
|
2023-03-02 18:25:16 +00:00
|
|
|
'''
|
2024-04-15 18:02:05 +00:00
|
|
|
Make a pattern that matches the given SQLite databases. The databases are supplied as a sequence
|
|
|
|
of configuration dicts, as per the configuration schema.
|
2023-03-02 18:25:16 +00:00
|
|
|
'''
|
2023-08-24 20:50:10 +00:00
|
|
|
return dump.make_data_source_dump_filename(make_dump_path(config), name)
|
2023-03-02 18:25:16 +00:00
|
|
|
|
|
|
|
|
2023-08-24 20:50:10 +00:00
|
|
|
def restore_data_source_dump(
|
|
|
|
hook_config, config, log_prefix, data_source, dry_run, extract_process, connection_params
|
2023-06-16 09:44:00 +00:00
|
|
|
):
|
2023-03-02 18:25:16 +00:00
|
|
|
'''
|
2023-08-24 20:50:10 +00:00
|
|
|
Restore a database from the given extract stream. The database is supplied as a data source
|
|
|
|
configuration dict, but the given hook configuration is ignored. The given configuration dict is
|
|
|
|
used to construct the destination path, and the given log prefix is used for any log entries. If
|
|
|
|
this is a dry run, then don't actually restore anything. Trigger the given active extract
|
|
|
|
process (an instance of subprocess.Popen) to produce output to consume.
|
2023-03-02 18:25:16 +00:00
|
|
|
'''
|
|
|
|
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
2023-08-24 20:50:10 +00:00
|
|
|
database_path = connection_params['restore_path'] or data_source.get(
|
|
|
|
'restore_path', data_source.get('path')
|
2023-06-16 09:44:00 +00:00
|
|
|
)
|
2023-03-02 18:25:16 +00:00
|
|
|
|
|
|
|
logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}')
|
|
|
|
if dry_run:
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
2023-03-04 17:30:57 +00:00
|
|
|
os.remove(database_path)
|
2023-03-04 17:59:16 +00:00
|
|
|
logger.warning(f'{log_prefix}: Removed existing SQLite database at {database_path}')
|
2023-03-04 07:13:07 +00:00
|
|
|
except FileNotFoundError: # pragma: no cover
|
|
|
|
pass
|
2023-03-02 18:25:16 +00:00
|
|
|
|
|
|
|
restore_command = (
|
|
|
|
'sqlite3',
|
|
|
|
database_path,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning
|
|
|
|
# if the restore paths don't exist in the archive.
|
|
|
|
execute_command_with_processes(
|
|
|
|
restore_command,
|
|
|
|
[extract_process],
|
|
|
|
output_log_level=logging.DEBUG,
|
|
|
|
input_file=extract_process.stdout,
|
|
|
|
)
|