Prevent ZFS snapshots from ending up in the Borg archive twice (#261).
This commit is contained in:
parent
0ed52bbc4a
commit
f9e920dce9
@ -379,7 +379,6 @@ def collect_spot_check_source_paths(
|
||||
),
|
||||
local_borg_version=local_borg_version,
|
||||
global_arguments=global_arguments,
|
||||
borgmatic_runtime_directory=borgmatic_runtime_directory,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
list_files=True,
|
||||
|
@ -143,10 +143,10 @@ def pattern_root_directories(patterns=None):
|
||||
]
|
||||
|
||||
|
||||
def process_source_directories(config, config_paths, borgmatic_runtime_directory):
|
||||
def process_source_directories(config, config_paths):
|
||||
'''
|
||||
Given a configuration dict, a sequence of configuration paths, and the borgmatic runtime
|
||||
directory, expand and deduplicate the source directories from them.
|
||||
Given a configuration dict and a sequence of configuration paths, expand and deduplicate the
|
||||
source directories from them.
|
||||
'''
|
||||
working_directory = borgmatic.config.paths.get_working_directory(config)
|
||||
|
||||
@ -154,7 +154,6 @@ def process_source_directories(config, config_paths, borgmatic_runtime_directory
|
||||
map_directories_to_devices(
|
||||
expand_directories(
|
||||
tuple(config.get('source_directories', ()))
|
||||
+ (borgmatic_runtime_directory,)
|
||||
+ tuple(config_paths if config.get('store_config_files', True) else ()),
|
||||
working_directory=working_directory,
|
||||
)
|
||||
@ -214,9 +213,7 @@ def run_create(
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
source_directories = process_source_directories(
|
||||
config, config_paths, borgmatic_runtime_directory
|
||||
)
|
||||
source_directories = process_source_directories(config, config_paths)
|
||||
active_dumps = borgmatic.hooks.dispatch.call_hooks(
|
||||
'dump_data_sources',
|
||||
config,
|
||||
@ -235,6 +232,7 @@ def run_create(
|
||||
borgmatic_runtime_directory,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
source_directories.append(os.path.join(borgmatic_runtime_directory, 'bootstrap'))
|
||||
|
||||
json_output = borgmatic.borg.create.create_archive(
|
||||
global_arguments.dry_run,
|
||||
|
@ -228,7 +228,6 @@ def make_base_create_command(
|
||||
source_directories,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
borgmatic_runtime_directory,
|
||||
local_path='borg',
|
||||
remote_path=None,
|
||||
progress=False,
|
||||
@ -411,7 +410,6 @@ def create_archive(
|
||||
source_directories,
|
||||
local_borg_version,
|
||||
global_arguments,
|
||||
borgmatic_runtime_directory,
|
||||
local_path,
|
||||
remote_path,
|
||||
progress,
|
||||
|
@ -123,7 +123,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
|
||||
|
||||
def dump_data_sources(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, source_directories, dry_run
|
||||
databases,
|
||||
config,
|
||||
log_prefix,
|
||||
borgmatic_runtime_directory,
|
||||
source_directories,
|
||||
dry_run,
|
||||
):
|
||||
'''
|
||||
Dump the given MariaDB databases to a named pipe. The databases are supplied as a sequence of
|
||||
@ -133,6 +138,7 @@ def dump_data_sources(
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
Also append the given source directories with the parent directory of the database dumps.
|
||||
'''
|
||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
processes = []
|
||||
@ -180,6 +186,8 @@ def dump_data_sources(
|
||||
)
|
||||
)
|
||||
|
||||
source_directories.append(os.path.join(borgmatic_runtime_directory, 'mariadb_databases'))
|
||||
|
||||
return [process for process in processes if process]
|
||||
|
||||
|
||||
|
@ -24,7 +24,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
|
||||
|
||||
def dump_data_sources(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, source_directories, dry_run
|
||||
databases,
|
||||
config,
|
||||
log_prefix,
|
||||
borgmatic_runtime_directory,
|
||||
source_directories,
|
||||
dry_run,
|
||||
):
|
||||
'''
|
||||
Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
|
||||
@ -34,6 +39,7 @@ def dump_data_sources(
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
Also append the given source directories with the parent directory of the database dumps.
|
||||
'''
|
||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
|
||||
@ -62,6 +68,8 @@ def dump_data_sources(
|
||||
dump.create_named_pipe_for_dump(dump_filename)
|
||||
processes.append(execute_command(command, shell=True, run_to_completion=False))
|
||||
|
||||
source_directories.append(os.path.join(borgmatic_runtime_directory, 'mongodb_databases'))
|
||||
|
||||
return processes
|
||||
|
||||
|
||||
|
@ -122,7 +122,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
|
||||
|
||||
def dump_data_sources(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, source_directories, dry_run
|
||||
databases,
|
||||
config,
|
||||
log_prefix,
|
||||
borgmatic_runtime_directory,
|
||||
source_directories,
|
||||
dry_run,
|
||||
):
|
||||
'''
|
||||
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
|
||||
@ -132,6 +137,7 @@ def dump_data_sources(
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
Also append the given source directories with the parent directory of the database dumps.
|
||||
'''
|
||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
processes = []
|
||||
@ -179,6 +185,8 @@ def dump_data_sources(
|
||||
)
|
||||
)
|
||||
|
||||
source_directories.append(os.path.join(borgmatic_runtime_directory, 'mysql_databases'))
|
||||
|
||||
return [process for process in processes if process]
|
||||
|
||||
|
||||
|
@ -105,7 +105,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
|
||||
|
||||
def dump_data_sources(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, source_directories, dry_run
|
||||
databases,
|
||||
config,
|
||||
log_prefix,
|
||||
borgmatic_runtime_directory,
|
||||
source_directories,
|
||||
dry_run,
|
||||
):
|
||||
'''
|
||||
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
|
||||
@ -115,6 +120,7 @@ def dump_data_sources(
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
Also append the given source directories with the parent directory of the database dumps.
|
||||
|
||||
Raise ValueError if the databases to dump cannot be determined.
|
||||
'''
|
||||
@ -205,6 +211,8 @@ def dump_data_sources(
|
||||
)
|
||||
)
|
||||
|
||||
source_directories.append(os.path.join(borgmatic_runtime_directory, 'postgresql_databases'))
|
||||
|
||||
return processes
|
||||
|
||||
|
||||
|
@ -25,7 +25,12 @@ def use_streaming(databases, config, log_prefix):
|
||||
|
||||
|
||||
def dump_data_sources(
|
||||
databases, config, log_prefix, borgmatic_runtime_directory, source_directories, dry_run
|
||||
databases,
|
||||
config,
|
||||
log_prefix,
|
||||
borgmatic_runtime_directory,
|
||||
source_directories,
|
||||
dry_run,
|
||||
):
|
||||
'''
|
||||
Dump the given SQLite databases to a named pipe. The databases are supplied as a sequence of
|
||||
@ -34,6 +39,7 @@ def dump_data_sources(
|
||||
|
||||
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
||||
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
||||
Also append the given source directories with the parent directory of the database dumps.
|
||||
'''
|
||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
processes = []
|
||||
@ -75,6 +81,8 @@ def dump_data_sources(
|
||||
dump.create_named_pipe_for_dump(dump_filename)
|
||||
processes.append(execute_command(command, shell=True, run_to_completion=False))
|
||||
|
||||
source_directories.append(os.path.join(borgmatic_runtime_directory, 'sqlite_databases'))
|
||||
|
||||
return processes
|
||||
|
||||
|
||||
|
@ -20,7 +20,12 @@ BORGMATIC_SNAPSHOT_PREFIX = 'borgmatic-'
|
||||
|
||||
|
||||
def dump_data_sources(
|
||||
hook_config, config, log_prefix, borgmatic_runtime_directory, source_directories, dry_run
|
||||
hook_config,
|
||||
config,
|
||||
log_prefix,
|
||||
borgmatic_runtime_directory,
|
||||
source_directories,
|
||||
dry_run,
|
||||
):
|
||||
'''
|
||||
Given a ZFS configuration dict, a configuration dict, a log prefix, the borgmatic runtime
|
||||
@ -34,12 +39,11 @@ def dump_data_sources(
|
||||
If this is a dry run or ZFS isn't enabled, then don't actually snapshot anything.
|
||||
|
||||
'''
|
||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
# TODO: Check for ZFS enabled in config and skip accordingly.
|
||||
|
||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
logger.info(f'{log_prefix}: Snapshotting ZFS datasets{dry_run_label}')
|
||||
|
||||
# TODO: Check for ZFS enabled in config and skip accordingly.
|
||||
# TODO: Check for Borg 1.4+ and error if Borg is too old (cuz we need the slashdot hack).
|
||||
# TODO: Dry run.
|
||||
|
||||
# List ZFS datasets to get their mount points.
|
||||
|
Loading…
x
Reference in New Issue
Block a user