Merge branch 'main' into main

This commit is contained in:
IBims1NicerTobi 2023-09-02 17:28:33 +00:00
commit 8f01b59bbf
28 changed files with 1204 additions and 1048 deletions

View File

@ -13,7 +13,6 @@ services:
environment:
POSTGRES_PASSWORD: test2
POSTGRES_DB: test
POSTGRES_USER: postgres2
commands:
- docker-entrypoint.sh -p 5433
- name: mariadb
@ -28,6 +27,18 @@ services:
MARIADB_DATABASE: test
commands:
- docker-entrypoint.sh --port=3307
- name: not-actually-mysql
image: docker.io/mariadb:10.11.4
environment:
MARIADB_ROOT_PASSWORD: test
MARIADB_DATABASE: test
- name: not-actually-mysql2
image: docker.io/mariadb:10.11.4
environment:
MARIADB_ROOT_PASSWORD: test2
MARIADB_DATABASE: test
commands:
- docker-entrypoint.sh --port=3307
- name: mongodb
image: docker.io/mongo:5.0.5
environment:

18
NEWS
View File

@ -1,9 +1,17 @@
1.8.2.dev0
1.8.3.dev0
* #743: Add a monitoring hook for sending backup status and logs to to Grafana Loki. See the
documentation for more information:
https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#loki-hook
1.8.2
* #345: Add "key export" action to export a copy of the repository key for safekeeping in case
the original goes missing or gets damaged.
* #727: Add a MariaDB database hook that uses native MariaDB commands instead of the deprecated
MySQL ones. Be aware though that any existing backups made with the "mysql_databases:" hook are
only restorable with a "mysql_databases:" configuration.
* #738: Fix for potential data loss (data not getting restored) in which the database "restore"
action didn't actually restore anything and indicated success anyway.
* Remove the deprecated use of the MongoDB hook's "--db" flag for database restoration.
* Add source code reference documentation for getting oriented with the borgmatic code as a
developer: https://torsion.org/borgmatic/docs/reference/source-code/
@ -35,10 +43,10 @@
"check --repair".
* When merging two configuration files, error gracefully if the two files do not adhere to the same
format.
* #721: Remove configuration sections ("location:", "storage:", "hooks:" etc.), while still keeping
deprecated support for them. Now, all options are at the same level, and you don't need to worry
about commenting/uncommenting section headers when you change an option (if you remove your
sections first).
* #721: Remove configuration sections ("location:", "storage:", "hooks:", etc.), while still
keeping deprecated support for them. Now, all options are at the same level, and you don't need
to worry about commenting/uncommenting section headers when you change an option (if you remove
your sections first).
* #721: BREAKING: The retention prefix and the consistency prefix can no longer have different
values (unless one is not set).
* #721: BREAKING: The storage umask and the hooks umask can no longer have different values (unless

View File

@ -78,17 +78,17 @@ def run_create(
)
logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}')
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
'remove_data_source_dumps',
config,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
global_arguments.dry_run,
)
active_dumps = borgmatic.hooks.dispatch.call_hooks(
'dump_databases',
'dump_data_sources',
config,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
global_arguments.dry_run,
)
if config.get('store_config_files', True):
@ -115,10 +115,10 @@ def run_create(
yield json.loads(json_output)
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
'remove_data_source_dumps',
config,
config_filename,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
global_arguments.dry_run,
)
borgmatic.hooks.command.execute_hook(

View File

@ -17,50 +17,51 @@ logger = logging.getLogger(__name__)
UNSPECIFIED_HOOK = object()
def get_configured_database(
config, archive_database_names, hook_name, database_name, configuration_database_name=None
def get_configured_data_source(
config,
archive_data_source_names,
hook_name,
data_source_name,
configuration_data_source_name=None,
):
'''
Find the first database with the given hook name and database name in the configuration dict and
the given archive database names dict (from hook name to database names contained in a
particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database
hooks for the named database. If a configuration database name is given, use that instead of the
database name to lookup the database in the given hooks configuration.
Find the first data source with the given hook name and data source name in the configuration
dict and the given archive data source names dict (from hook name to data source names contained
in a particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all data
source hooks for the named data source. If a configuration data source name is given, use that
instead of the data source name to lookup the data source in the given hooks configuration.
Return the found database as a tuple of (found hook name, database configuration dict).
Return the found data source as a tuple of (found hook name, data source configuration dict) or
(None, None) if not found.
'''
if not configuration_database_name:
configuration_database_name = database_name
if not configuration_data_source_name:
configuration_data_source_name = data_source_name
if hook_name == UNSPECIFIED_HOOK:
hooks_to_search = {
hook_name: value
for (hook_name, value) in config.items()
if hook_name in borgmatic.hooks.dump.DATABASE_HOOK_NAMES
if hook_name in borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES
}
else:
hooks_to_search = {hook_name: config[hook_name]}
try:
hooks_to_search = {hook_name: config[hook_name]}
except KeyError:
return (None, None)
return next(
(
(name, hook_database)
(name, hook_data_source)
for (name, hook) in hooks_to_search.items()
for hook_database in hook
if hook_database['name'] == configuration_database_name
and database_name in archive_database_names.get(name, [])
for hook_data_source in hook
if hook_data_source['name'] == configuration_data_source_name
and data_source_name in archive_data_source_names.get(name, [])
),
(None, None),
)
def get_configured_hook_name_and_database(hooks, database_name):
'''
Find the hook name and first database dict with the given database name in the configured hooks
dict. This searches across all database hooks.
'''
def restore_single_database(
def restore_single_data_source(
repository,
config,
local_borg_version,
@ -69,27 +70,27 @@ def restore_single_database(
remote_path,
archive_name,
hook_name,
database,
data_source,
connection_params,
): # pragma: no cover
'''
Given (among other things) an archive name, a database hook name, the hostname,
port, username and password as connection params, and a configured database
configuration dict, restore that database from the archive.
Given (among other things) an archive name, a data source hook name, the hostname, port,
username/password as connection params, and a configured data source configuration dict, restore
that data source from the archive.
'''
logger.info(
f'{repository.get("label", repository["path"])}: Restoring database {database["name"]}'
f'{repository.get("label", repository["path"])}: Restoring data source {data_source["name"]}'
)
dump_pattern = borgmatic.hooks.dispatch.call_hooks(
'make_database_dump_pattern',
'make_data_source_dump_pattern',
config,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
database['name'],
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
data_source['name'],
)[hook_name]
# Kick off a single database extract to stdout.
# Kick off a single data source extract to stdout.
extract_process = borgmatic.borg.extract.extract_archive(
dry_run=global_arguments.dry_run,
repository=repository['path'],
@ -103,23 +104,23 @@ def restore_single_database(
destination_path='/',
# A directory format dump isn't a single file, and therefore can't extract
# to stdout. In this case, the extract_process return value is None.
extract_to_stdout=bool(database.get('format') != 'directory'),
extract_to_stdout=bool(data_source.get('format') != 'directory'),
)
# Run a single database restore, consuming the extract stdout (if any).
# Run a single data source restore, consuming the extract stdout (if any).
borgmatic.hooks.dispatch.call_hooks(
'restore_database_dump',
config,
repository['path'],
database['name'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
global_arguments.dry_run,
extract_process,
connection_params,
function_name='restore_data_source_dump',
config=config,
log_prefix=repository['path'],
hook_names=[hook_name],
data_source=data_source,
dry_run=global_arguments.dry_run,
extract_process=extract_process,
connection_params=connection_params,
)
def collect_archive_database_names(
def collect_archive_data_source_names(
repository,
archive,
config,
@ -131,60 +132,62 @@ def collect_archive_database_names(
'''
Given a local or remote repository path, a resolved archive name, a configuration dict, the
local Borg version, global_arguments an argparse.Namespace, and local and remote Borg paths,
query the archive for the names of databases it contains and return them as a dict from hook
name to a sequence of database names.
query the archive for the names of data sources it contains as dumps and return them as a dict
from hook name to a sequence of data source names.
'''
borgmatic_source_directory = os.path.expanduser(
config.get(
'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
)
).lstrip('/')
parent_dump_path = os.path.expanduser(
borgmatic.hooks.dump.make_database_dump_path(borgmatic_source_directory, '*_databases/*/*')
)
dump_paths = borgmatic.borg.list.capture_archive_listing(
repository,
archive,
config,
local_borg_version,
global_arguments,
list_path=parent_dump_path,
list_paths=[
os.path.expanduser(
borgmatic.hooks.dump.make_data_source_dump_path(borgmatic_source_directory, pattern)
)
for pattern in ('*_databases/*/*',)
],
local_path=local_path,
remote_path=remote_path,
)
# Determine the database names corresponding to the dumps found in the archive and
# Determine the data source names corresponding to the dumps found in the archive and
# add them to restore_names.
archive_database_names = {}
archive_data_source_names = {}
for dump_path in dump_paths:
try:
(hook_name, _, database_name) = dump_path.split(
(hook_name, _, data_source_name) = dump_path.split(
borgmatic_source_directory + os.path.sep, 1
)[1].split(os.path.sep)[0:3]
except (ValueError, IndexError):
logger.warning(
f'{repository}: Ignoring invalid database dump path "{dump_path}" in archive {archive}'
f'{repository}: Ignoring invalid data source dump path "{dump_path}" in archive {archive}'
)
else:
if database_name not in archive_database_names.get(hook_name, []):
archive_database_names.setdefault(hook_name, []).extend([database_name])
if data_source_name not in archive_data_source_names.get(hook_name, []):
archive_data_source_names.setdefault(hook_name, []).extend([data_source_name])
return archive_database_names
return archive_data_source_names
def find_databases_to_restore(requested_database_names, archive_database_names):
def find_data_sources_to_restore(requested_data_source_names, archive_data_source_names):
'''
Given a sequence of requested database names to restore and a dict of hook name to the names of
databases found in an archive, return an expanded sequence of database names to restore,
replacing "all" with actual database names as appropriate.
Given a sequence of requested data source names to restore and a dict of hook name to the names
of data sources found in an archive, return an expanded sequence of data source names to
restore, replacing "all" with actual data source names as appropriate.
Raise ValueError if any of the requested database names cannot be found in the archive.
Raise ValueError if any of the requested data source names cannot be found in the archive.
'''
# A map from database hook name to the database names to restore for that hook.
# A map from data source hook name to the data source names to restore for that hook.
restore_names = (
{UNSPECIFIED_HOOK: requested_database_names}
if requested_database_names
{UNSPECIFIED_HOOK: requested_data_source_names}
if requested_data_source_names
else {UNSPECIFIED_HOOK: ['all']}
)
@ -193,56 +196,59 @@ def find_databases_to_restore(requested_database_names, archive_database_names):
if 'all' in restore_names[UNSPECIFIED_HOOK]:
restore_names[UNSPECIFIED_HOOK].remove('all')
for hook_name, database_names in archive_database_names.items():
restore_names.setdefault(hook_name, []).extend(database_names)
for hook_name, data_source_names in archive_data_source_names.items():
restore_names.setdefault(hook_name, []).extend(data_source_names)
# If a database is to be restored as part of "all", then remove it from restore names so
# it doesn't get restored twice.
for database_name in database_names:
if database_name in restore_names[UNSPECIFIED_HOOK]:
restore_names[UNSPECIFIED_HOOK].remove(database_name)
# If a data source is to be restored as part of "all", then remove it from restore names
# so it doesn't get restored twice.
for data_source_name in data_source_names:
if data_source_name in restore_names[UNSPECIFIED_HOOK]:
restore_names[UNSPECIFIED_HOOK].remove(data_source_name)
if not restore_names[UNSPECIFIED_HOOK]:
restore_names.pop(UNSPECIFIED_HOOK)
combined_restore_names = set(
name for database_names in restore_names.values() for name in database_names
name for data_source_names in restore_names.values() for name in data_source_names
)
combined_archive_database_names = set(
name for database_names in archive_database_names.values() for name in database_names
combined_archive_data_source_names = set(
name
for data_source_names in archive_data_source_names.values()
for name in data_source_names
)
missing_names = sorted(set(combined_restore_names) - combined_archive_database_names)
missing_names = sorted(set(combined_restore_names) - combined_archive_data_source_names)
if missing_names:
joined_names = ', '.join(f'"{name}"' for name in missing_names)
raise ValueError(
f"Cannot restore database{'s' if len(missing_names) > 1 else ''} {joined_names} missing from archive"
f"Cannot restore data source{'s' if len(missing_names) > 1 else ''} {joined_names} missing from archive"
)
return restore_names
def ensure_databases_found(restore_names, remaining_restore_names, found_names):
def ensure_data_sources_found(restore_names, remaining_restore_names, found_names):
'''
Given a dict from hook name to database names to restore, a dict from hook name to remaining
database names to restore, and a sequence of found (actually restored) database names, raise
ValueError if requested databases to restore were missing from the archive and/or configuration.
Given a dict from hook name to data source names to restore, a dict from hook name to remaining
data source names to restore, and a sequence of found (actually restored) data source names,
raise ValueError if requested data source to restore were missing from the archive and/or
configuration.
'''
combined_restore_names = set(
name
for database_names in tuple(restore_names.values())
for data_source_names in tuple(restore_names.values())
+ tuple(remaining_restore_names.values())
for name in database_names
for name in data_source_names
)
if not combined_restore_names and not found_names:
raise ValueError('No databases were found to restore')
raise ValueError('No data sources were found to restore')
missing_names = sorted(set(combined_restore_names) - set(found_names))
if missing_names:
joined_names = ', '.join(f'"{name}"' for name in missing_names)
raise ValueError(
f"Cannot restore database{'s' if len(missing_names) > 1 else ''} {joined_names} missing from borgmatic's configuration"
f"Cannot restore data source{'s' if len(missing_names) > 1 else ''} {joined_names} missing from borgmatic's configuration"
)
@ -259,7 +265,7 @@ def run_restore(
Run the "restore" action for the given repository, but only if the repository matches the
requested repository in restore arguments.
Raise ValueError if a configured database could not be found to restore.
Raise ValueError if a configured data source could not be found to restore.
'''
if restore_arguments.repository and not borgmatic.config.validate.repositories_match(
repository, restore_arguments.repository
@ -267,14 +273,14 @@ def run_restore(
return
logger.info(
f'{repository.get("label", repository["path"])}: Restoring databases from archive {restore_arguments.archive}'
f'{repository.get("label", repository["path"])}: Restoring data sources from archive {restore_arguments.archive}'
)
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
'remove_data_source_dumps',
config,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
global_arguments.dry_run,
)
@ -287,7 +293,7 @@ def run_restore(
local_path,
remote_path,
)
archive_database_names = collect_archive_database_names(
archive_data_source_names = collect_archive_data_source_names(
repository['path'],
archive_name,
config,
@ -296,7 +302,9 @@ def run_restore(
local_path,
remote_path,
)
restore_names = find_databases_to_restore(restore_arguments.databases, archive_database_names)
restore_names = find_data_sources_to_restore(
restore_arguments.data_sources, archive_data_source_names
)
found_names = set()
remaining_restore_names = {}
connection_params = {
@ -307,20 +315,20 @@ def run_restore(
'restore_path': restore_arguments.restore_path,
}
for hook_name, database_names in restore_names.items():
for database_name in database_names:
found_hook_name, found_database = get_configured_database(
config, archive_database_names, hook_name, database_name
for hook_name, data_source_names in restore_names.items():
for data_source_name in data_source_names:
found_hook_name, found_data_source = get_configured_data_source(
config, archive_data_source_names, hook_name, data_source_name
)
if not found_database:
if not found_data_source:
remaining_restore_names.setdefault(found_hook_name or hook_name, []).append(
database_name
data_source_name
)
continue
found_names.add(database_name)
restore_single_database(
found_names.add(data_source_name)
restore_single_data_source(
repository,
config,
local_borg_version,
@ -329,26 +337,26 @@ def run_restore(
remote_path,
archive_name,
found_hook_name or hook_name,
dict(found_database, **{'schemas': restore_arguments.schemas}),
dict(found_data_source, **{'schemas': restore_arguments.schemas}),
connection_params,
)
# For any database that weren't found via exact matches in the configuration, try to fallback
# to "all" entries.
for hook_name, database_names in remaining_restore_names.items():
for database_name in database_names:
found_hook_name, found_database = get_configured_database(
config, archive_database_names, hook_name, database_name, 'all'
# For any data sources that weren't found via exact matches in the configuration, try to
# fallback to "all" entries.
for hook_name, data_source_names in remaining_restore_names.items():
for data_source_name in data_source_names:
found_hook_name, found_data_source = get_configured_data_source(
config, archive_data_source_names, hook_name, data_source_name, 'all'
)
if not found_database:
if not found_data_source:
continue
found_names.add(database_name)
database = copy.copy(found_database)
database['name'] = database_name
found_names.add(data_source_name)
data_source = copy.copy(found_data_source)
data_source['name'] = data_source_name
restore_single_database(
restore_single_data_source(
repository,
config,
local_borg_version,
@ -357,16 +365,16 @@ def run_restore(
remote_path,
archive_name,
found_hook_name or hook_name,
dict(database, **{'schemas': restore_arguments.schemas}),
dict(data_source, **{'schemas': restore_arguments.schemas}),
connection_params,
)
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
'remove_data_source_dumps',
config,
repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES,
global_arguments.dry_run,
)
ensure_databases_found(restore_names, remaining_restore_names, found_names)
ensure_data_sources_found(restore_names, remaining_restore_names, found_names)

View File

@ -92,13 +92,13 @@ def capture_archive_listing(
config,
local_borg_version,
global_arguments,
list_path=None,
list_paths=None,
local_path='borg',
remote_path=None,
):
'''
Given a local or remote repository path, an archive name, a configuration dict, the local Borg
version, global arguments as an argparse.Namespace, the archive path in which to list files, and
version, global arguments as an argparse.Namespace, the archive paths in which to list files, and
local and remote Borg paths, capture the output of listing that archive and return it as a list
of file paths.
'''
@ -113,7 +113,7 @@ def capture_archive_listing(
argparse.Namespace(
repository=repository_path,
archive=archive,
paths=[f'sh:{list_path}'],
paths=[f'sh:{path}' for path in list_paths] if list_paths else None,
find_paths=None,
json=None,
format='{path}{NL}', # noqa: FS003

View File

@ -906,8 +906,8 @@ def make_parsers():
restore_parser = action_parsers.add_parser(
'restore',
aliases=ACTION_ALIASES['restore'],
help='Restore database dumps from a named archive',
description='Restore database dumps from a named archive. (To extract files instead, use "borgmatic extract".)',
help='Restore data source (e.g. database) dumps from a named archive',
description='Restore data source (e.g. database) dumps from a named archive. (To extract files instead, use "borgmatic extract".)',
add_help=False,
)
restore_group = restore_parser.add_argument_group('restore arguments')
@ -919,18 +919,19 @@ def make_parsers():
'--archive', help='Name of archive to restore from (or "latest")', required=True
)
restore_group.add_argument(
'--data-source',
'--database',
metavar='NAME',
dest='databases',
dest='data_sources',
action='append',
help="Name of database to restore from archive, must be defined in borgmatic's configuration, can specify flag multiple times, defaults to all databases",
help="Name of data source (e.g. database) to restore from archive, must be defined in borgmatic's configuration, can specify flag multiple times, defaults to all data sources in the archive",
)
restore_group.add_argument(
'--schema',
metavar='NAME',
dest='schemas',
action='append',
help='Name of schema to restore from the database, can specify flag multiple times, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases',
help='Name of schema to restore from the data source, can specify flag multiple times, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases',
)
restore_group.add_argument(
'--hostname',
@ -938,7 +939,7 @@ def make_parsers():
)
restore_group.add_argument(
'--port',
help='Port to restore to. Defaults to the "restore_port" option in borgmatic\'s configuration',
help='Database port to restore to. Defaults to the "restore_port" option in borgmatic\'s configuration',
)
restore_group.add_argument(
'--username',

View File

@ -50,7 +50,7 @@ def normalize_sections(config_filename, config):
dict(
levelno=logging.WARNING,
levelname='WARNING',
msg=f'{config_filename}: Configuration sections (like location: and storage:) are deprecated and support will be removed from a future release. To prepare for this, move your options out of sections to the global scope.',
msg=f'{config_filename}: Configuration sections (like location:, storage:, retention:, consistency:, and hooks:) are deprecated and support will be removed from a future release. To prepare for this, move your options out of sections to the global scope.',
)
)
]

View File

@ -6,7 +6,7 @@ from borgmatic.borg.state import DEFAULT_BORGMATIC_SOURCE_DIRECTORY
logger = logging.getLogger(__name__)
DATABASE_HOOK_NAMES = (
DATA_SOURCE_HOOK_NAMES = (
'mariadb_databases',
'mysql_databases',
'mongodb_databases',
@ -15,26 +15,26 @@ DATABASE_HOOK_NAMES = (
)
def make_database_dump_path(borgmatic_source_directory, database_hook_name):
def make_data_source_dump_path(borgmatic_source_directory, data_source_hook_name):
'''
Given a borgmatic source directory (or None) and a database hook name, construct a database dump
path.
Given a borgmatic source directory (or None) and a data source hook name, construct a data
source dump path.
'''
if not borgmatic_source_directory:
borgmatic_source_directory = DEFAULT_BORGMATIC_SOURCE_DIRECTORY
return os.path.join(borgmatic_source_directory, database_hook_name)
return os.path.join(borgmatic_source_directory, data_source_hook_name)
def make_database_dump_filename(dump_path, name, hostname=None):
def make_data_source_dump_filename(dump_path, name, hostname=None):
'''
Based on the given dump directory path, database name, and hostname, return a filename to use
for the database dump. The hostname defaults to localhost.
Based on the given dump directory path, data source name, and hostname, return a filename to use
for the data source dump. The hostname defaults to localhost.
Raise ValueError if the database name is invalid.
Raise ValueError if the data source name is invalid.
'''
if os.path.sep in name:
raise ValueError(f'Invalid database name {name}')
raise ValueError(f'Invalid data source name {name}')
return os.path.join(os.path.expanduser(dump_path), hostname or 'localhost', name)
@ -54,14 +54,14 @@ def create_named_pipe_for_dump(dump_path):
os.mkfifo(dump_path, mode=0o600)
def remove_database_dumps(dump_path, database_type_name, log_prefix, dry_run):
def remove_data_source_dumps(dump_path, data_source_type_name, log_prefix, dry_run):
'''
Remove all database dumps in the given dump directory path (including the directory itself). If
this is a dry run, then don't actually remove anything.
Remove all data source dumps in the given dump directory path (including the directory itself).
If this is a dry run, then don't actually remove anything.
'''
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
logger.debug(f'{log_prefix}: Removing {database_type_name} database dumps{dry_run_label}')
logger.debug(f'{log_prefix}: Removing {data_source_type_name} data source dumps{dry_run_label}')
expanded_path = os.path.expanduser(dump_path)

View File

@ -67,6 +67,7 @@ class Loki_log_buffer:
request_body = self.to_request()
self.root['streams'][0]['values'] = []
request_header = {'Content-Type': 'application/json'}
try:
result = requests.post(self.url, headers=request_header, data=request_body, timeout=5)
result.raise_for_status()
@ -100,6 +101,7 @@ class Loki_log_handler(logging.Handler):
Add an arbitrary string as a log entry to the stream.
'''
self.buffer.add_value(msg)
if len(self.buffer) > MAX_BUFFER_LINES:
self.buffer.flush()
@ -116,6 +118,7 @@ def initialize_monitor(hook_config, config, config_filename, monitoring_log_leve
'''
url = hook_config.get('url')
loki = Loki_log_handler(url, dry_run)
for key, value in hook_config.get('labels').items():
if value == '__hostname':
loki.add_label(key, platform.node())
@ -125,6 +128,7 @@ def initialize_monitor(hook_config, config, config_filename, monitoring_log_leve
loki.add_label(key, config_filename)
else:
loki.add_label(key, value)
logging.getLogger().addHandler(loki)
@ -143,6 +147,7 @@ def destroy_monitor(hook_config, config, config_filename, monitoring_log_level,
Remove the monitor handler that was added to the root logger.
'''
logger = logging.getLogger()
for handler in tuple(logger.handlers):
if isinstance(handler, Loki_log_handler):
handler.flush()

View File

@ -16,7 +16,7 @@ def make_dump_path(config): # pragma: no cover
'''
Make the dump path from the given configuration dict and the name of this hook.
'''
return dump.make_database_dump_path(
return dump.make_data_source_dump_path(
config.get('borgmatic_source_directory'), 'mariadb_databases'
)
@ -62,16 +62,17 @@ def execute_dump_command(
):
'''
Kick off a dump for the given MariaDB database (provided as a configuration dict) to a named
pipe constructed from the given dump path and database names. Use the given log prefix in any
pipe constructed from the given dump path and database name. Use the given log prefix in any
log entries.
Return a subprocess.Popen instance for the dump process ready to spew to a named pipe. But if
this is a dry run, then don't actually dump anything and return None.
'''
database_name = database['name']
dump_filename = dump.make_database_dump_filename(
dump_filename = dump.make_data_source_dump_filename(
dump_path, database['name'], database.get('hostname')
)
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of MariaDB database "{database_name}" to {dump_filename}'
@ -106,7 +107,7 @@ def execute_dump_command(
)
def dump_databases(databases, config, log_prefix, dry_run):
def dump_data_sources(databases, config, log_prefix, dry_run):
'''
Dump the given MariaDB databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given
@ -165,61 +166,55 @@ def dump_databases(databases, config, log_prefix, dry_run):
return [process for process in processes if process]
def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the given
configuration dict to construct the destination path and the log prefix in any log entries. If
this is a dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(config), 'MariaDB', log_prefix, dry_run)
dump.remove_data_source_dumps(make_dump_path(config), 'MariaDB', log_prefix, dry_run)
def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
def make_data_source_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
'''
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
database name to match, return the corresponding glob patterns to match the database dump in an
archive.
'''
return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
return dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump(
databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
def restore_data_source_dump(
hook_config, config, log_prefix, data_source, dry_run, extract_process, connection_params
):
'''
Restore the given MariaDB database from an extract stream. The databases are supplied as a
sequence containing one dict describing each database (as per the configuration schema), but
only the database corresponding to the given database name is restored. Use the given log prefix
in any log entries. If this is a dry run, then don't actually restore anything. Trigger the
given active extract process (an instance of subprocess.Popen) to produce output to consume.
Restore a database from the given extract stream. The database is supplied as a data source
configuration dict, but the given hook configuration is ignored. The given configuration dict is
used to construct the destination path, and the given log prefix is used for any log entries. If
this is a dry run, then don't actually restore anything. Trigger the given active extract
process (an instance of subprocess.Popen) to produce output to consume.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
try:
database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
hostname = connection_params['hostname'] or database.get(
'restore_hostname', database.get('hostname')
hostname = connection_params['hostname'] or data_source.get(
'restore_hostname', data_source.get('hostname')
)
port = str(connection_params['port'] or database.get('restore_port', database.get('port', '')))
username = connection_params['username'] or database.get(
'restore_username', database.get('username')
port = str(
connection_params['port'] or data_source.get('restore_port', data_source.get('port', ''))
)
password = connection_params['password'] or database.get(
'restore_password', database.get('password')
username = connection_params['username'] or data_source.get(
'restore_username', data_source.get('username')
)
password = connection_params['password'] or data_source.get(
'restore_password', data_source.get('password')
)
restore_command = (
('mariadb', '--batch')
+ (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ())
+ (
tuple(data_source['restore_options'].split(' '))
if 'restore_options' in data_source
else ()
)
+ (('--host', hostname) if hostname else ())
+ (('--port', str(port)) if port else ())
+ (('--protocol', 'tcp') if hostname or port else ())
@ -227,7 +222,7 @@ def restore_database_dump(
)
extra_environment = {'MYSQL_PWD': password} if password else None
logger.debug(f"{log_prefix}: Restoring MariaDB database {database['name']}{dry_run_label}")
logger.debug(f"{log_prefix}: Restoring MariaDB database {data_source['name']}{dry_run_label}")
if dry_run:
return

View File

@ -10,12 +10,12 @@ def make_dump_path(config): # pragma: no cover
'''
Make the dump path from the given configuration dict and the name of this hook.
'''
return dump.make_database_dump_path(
return dump.make_data_source_dump_path(
config.get('borgmatic_source_directory'), 'mongodb_databases'
)
def dump_databases(databases, config, log_prefix, dry_run):
def dump_data_sources(databases, config, log_prefix, dry_run):
'''
Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the configuration
@ -31,7 +31,7 @@ def dump_databases(databases, config, log_prefix, dry_run):
processes = []
for database in databases:
name = database['name']
dump_filename = dump.make_database_dump_filename(
dump_filename = dump.make_data_source_dump_filename(
make_dump_path(config), name, database.get('hostname')
)
dump_format = database.get('format', 'archive')
@ -78,59 +78,46 @@ def build_dump_command(database, dump_filename, dump_format):
)
def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the log
prefix in any log entries. Use the given configuration dict to construct the destination path.
If this is a dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(config), 'MongoDB', log_prefix, dry_run)
dump.remove_data_source_dumps(make_dump_path(config), 'MongoDB', log_prefix, dry_run)
def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
def make_data_source_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
'''
Given a sequence of database configurations dicts, a configuration dict, a prefix to log with,
and a database name to match, return the corresponding glob patterns to match the database dump
in an archive.
'''
return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
return dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump(
databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
def restore_data_source_dump(
hook_config, config, log_prefix, data_source, dry_run, extract_process, connection_params
):
'''
Restore the given MongoDB database from an extract stream. The databases are supplied as a
sequence containing one dict describing each database (as per the configuration schema), but
only the database corresponding to the given database name is restored. Use the configuration
dict to construct the destination path and the given log prefix in any log entries. If this is a
dry run, then don't actually restore anything. Trigger the given active extract process (an
instance of subprocess.Popen) to produce output to consume.
Restore a database from the given extract stream. The database is supplied as a data source
configuration dict, but the given hook configuration is ignored. The given configuration dict is
used to construct the destination path, and the given log prefix is used for any log entries. If
this is a dry run, then don't actually restore anything. Trigger the given active extract
process (an instance of subprocess.Popen) to produce output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
try:
database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
dump_filename = dump.make_database_dump_filename(
make_dump_path(config), database['name'], database.get('hostname')
dump_filename = dump.make_data_source_dump_filename(
make_dump_path(config), data_source['name'], data_source.get('hostname')
)
restore_command = build_restore_command(
extract_process, database, dump_filename, connection_params
extract_process, data_source, dump_filename, connection_params
)
logger.debug(f"{log_prefix}: Restoring MongoDB database {database['name']}{dry_run_label}")
logger.debug(f"{log_prefix}: Restoring MongoDB database {data_source['name']}{dry_run_label}")
if dry_run:
return
@ -165,7 +152,7 @@ def build_restore_command(extract_process, database, dump_filename, connection_p
else:
command.extend(('--dir', dump_filename))
if database['name'] != 'all':
command.extend(('--drop', '--db', database['name']))
command.extend(('--drop',))
if hostname:
command.extend(('--host', hostname))
if port:
@ -178,7 +165,8 @@ def build_restore_command(extract_process, database, dump_filename, connection_p
command.extend(('--authenticationDatabase', database['authentication_database']))
if 'restore_options' in database:
command.extend(database['restore_options'].split(' '))
if database['schemas']:
if database.get('schemas'):
for schema in database['schemas']:
command.extend(('--nsInclude', schema))
return command

View File

@ -16,7 +16,9 @@ def make_dump_path(config): # pragma: no cover
'''
Make the dump path from the given configuration dict and the name of this hook.
'''
return dump.make_database_dump_path(config.get('borgmatic_source_directory'), 'mysql_databases')
return dump.make_data_source_dump_path(
config.get('borgmatic_source_directory'), 'mysql_databases'
)
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
@ -60,16 +62,17 @@ def execute_dump_command(
):
'''
Kick off a dump for the given MySQL/MariaDB database (provided as a configuration dict) to a
named pipe constructed from the given dump path and database names. Use the given log prefix in
named pipe constructed from the given dump path and database name. Use the given log prefix in
any log entries.
Return a subprocess.Popen instance for the dump process ready to spew to a named pipe. But if
this is a dry run, then don't actually dump anything and return None.
'''
database_name = database['name']
dump_filename = dump.make_database_dump_filename(
dump_filename = dump.make_data_source_dump_filename(
dump_path, database['name'], database.get('hostname')
)
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of MySQL database "{database_name}" to {dump_filename}'
@ -104,7 +107,7 @@ def execute_dump_command(
)
def dump_databases(databases, config, log_prefix, dry_run):
def dump_data_sources(databases, config, log_prefix, dry_run):
'''
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
of dicts, one dict describing each database as per the configuration schema. Use the given
@ -162,61 +165,55 @@ def dump_databases(databases, config, log_prefix, dry_run):
return [process for process in processes if process]
def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the given
configuration dict to construct the destination path and the log prefix in any log entries. If
this is a dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(config), 'MySQL', log_prefix, dry_run)
dump.remove_data_source_dumps(make_dump_path(config), 'MySQL', log_prefix, dry_run)
def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
def make_data_source_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
'''
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
database name to match, return the corresponding glob patterns to match the database dump in an
archive.
'''
return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
return dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump(
databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
def restore_data_source_dump(
hook_config, config, log_prefix, data_source, dry_run, extract_process, connection_params
):
'''
Restore the given MySQL/MariaDB database from an extract stream. The databases are supplied as a
sequence containing one dict describing each database (as per the configuration schema), but
only the database corresponding to the given database name is restored. Use the given log
prefix in any log entries. If this is a dry run, then don't actually restore anything. Trigger
the given active extract process (an instance of subprocess.Popen) to produce output to consume.
Restore a database from the given extract stream. The database is supplied as a data source
configuration dict, but the given hook configuration is ignored. The given configuration dict is
used to construct the destination path, and the given log prefix is used for any log entries. If
this is a dry run, then don't actually restore anything. Trigger the given active extract
process (an instance of subprocess.Popen) to produce output to consume.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
try:
database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
hostname = connection_params['hostname'] or database.get(
'restore_hostname', database.get('hostname')
hostname = connection_params['hostname'] or data_source.get(
'restore_hostname', data_source.get('hostname')
)
port = str(connection_params['port'] or database.get('restore_port', database.get('port', '')))
username = connection_params['username'] or database.get(
'restore_username', database.get('username')
port = str(
connection_params['port'] or data_source.get('restore_port', data_source.get('port', ''))
)
password = connection_params['password'] or database.get(
'restore_password', database.get('password')
username = connection_params['username'] or data_source.get(
'restore_username', data_source.get('username')
)
password = connection_params['password'] or data_source.get(
'restore_password', data_source.get('password')
)
restore_command = (
('mysql', '--batch')
+ (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ())
+ (
tuple(data_source['restore_options'].split(' '))
if 'restore_options' in data_source
else ()
)
+ (('--host', hostname) if hostname else ())
+ (('--port', str(port)) if port else ())
+ (('--protocol', 'tcp') if hostname or port else ())
@ -224,7 +221,7 @@ def restore_database_dump(
)
extra_environment = {'MYSQL_PWD': password} if password else None
logger.debug(f"{log_prefix}: Restoring MySQL database {database['name']}{dry_run_label}")
logger.debug(f"{log_prefix}: Restoring MySQL database {data_source['name']}{dry_run_label}")
if dry_run:
return

View File

@ -18,7 +18,7 @@ def make_dump_path(config): # pragma: no cover
'''
Make the dump path from the given configuration dict and the name of this hook.
'''
return dump.make_database_dump_path(
return dump.make_data_source_dump_path(
config.get('borgmatic_source_directory'), 'postgresql_databases'
)
@ -92,7 +92,7 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
)
def dump_databases(databases, config, log_prefix, dry_run):
def dump_data_sources(databases, config, log_prefix, dry_run):
'''
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given
@ -126,7 +126,7 @@ def dump_databases(databases, config, log_prefix, dry_run):
dump_format = database.get('format', None if database_name == 'all' else 'custom')
default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump'
dump_command = database.get('pg_dump_command') or default_dump_command
dump_filename = dump.make_database_dump_filename(
dump_filename = dump.make_data_source_dump_filename(
dump_path, database_name, database.get('hostname')
)
if os.path.exists(dump_filename):
@ -183,34 +183,33 @@ def dump_databases(databases, config, log_prefix, dry_run):
return processes
def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the given
configuration dict to construct the destination path and the log prefix in any log entries. If
this is a dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(config), 'PostgreSQL', log_prefix, dry_run)
dump.remove_data_source_dumps(make_dump_path(config), 'PostgreSQL', log_prefix, dry_run)
def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
def make_data_source_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
'''
Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
database name to match, return the corresponding glob patterns to match the database dump in an
archive.
'''
return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
return dump.make_data_source_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump(
databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
def restore_data_source_dump(
hook_config, config, log_prefix, data_source, dry_run, extract_process, connection_params
):
'''
Restore the given PostgreSQL database from an extract stream. The databases are supplied as a
sequence containing one dict describing each database (as per the configuration schema), but
only the database corresponding to the given database name is restored. Use the given
configuration dict to construct the destination path and the given log prefix in any log
entries. If this is a dry run, then don't actually restore anything. Trigger the given active
extract process (an instance of subprocess.Popen) to produce output to consume.
Restore a database from the given extract stream. The database is supplied as a data source
configuration dict, but the given hook configuration is ignored. The given configuration dict is
used to construct the destination path, and the given log prefix is used for any log entries. If
this is a dry run, then don't actually restore anything. Trigger the given active extract
process (an instance of subprocess.Popen) to produce output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream.
@ -219,66 +218,66 @@ def restore_database_dump(
hostname, port, username, and password.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
try:
database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
hostname = connection_params['hostname'] or database.get(
'restore_hostname', database.get('hostname')
hostname = connection_params['hostname'] or data_source.get(
'restore_hostname', data_source.get('hostname')
)
port = str(connection_params['port'] or database.get('restore_port', database.get('port', '')))
username = connection_params['username'] or database.get(
'restore_username', database.get('username')
port = str(
connection_params['port'] or data_source.get('restore_port', data_source.get('port', ''))
)
username = connection_params['username'] or data_source.get(
'restore_username', data_source.get('username')
)
all_databases = bool(database['name'] == 'all')
dump_filename = dump.make_database_dump_filename(
make_dump_path(config), database['name'], database.get('hostname')
all_databases = bool(data_source['name'] == 'all')
dump_filename = dump.make_data_source_dump_filename(
make_dump_path(config), data_source['name'], data_source.get('hostname')
)
psql_command = shlex.split(database.get('psql_command') or 'psql')
psql_command = shlex.split(data_source.get('psql_command') or 'psql')
analyze_command = (
tuple(psql_command)
+ ('--no-password', '--no-psqlrc', '--quiet')
+ (('--host', hostname) if hostname else ())
+ (('--port', port) if port else ())
+ (('--username', username) if username else ())
+ (('--dbname', database['name']) if not all_databases else ())
+ (tuple(database['analyze_options'].split(' ')) if 'analyze_options' in database else ())
+ (('--dbname', data_source['name']) if not all_databases else ())
+ (
tuple(data_source['analyze_options'].split(' '))
if 'analyze_options' in data_source
else ()
)
+ ('--command', 'ANALYZE')
)
use_psql_command = all_databases or database.get('format') == 'plain'
pg_restore_command = shlex.split(database.get('pg_restore_command') or 'pg_restore')
use_psql_command = all_databases or data_source.get('format') == 'plain'
pg_restore_command = shlex.split(data_source.get('pg_restore_command') or 'pg_restore')
restore_command = (
tuple(psql_command if use_psql_command else pg_restore_command)
+ ('--no-password',)
+ (('--no-psqlrc',) if use_psql_command else ('--if-exists', '--exit-on-error', '--clean'))
+ (('--dbname', database['name']) if not all_databases else ())
+ (('--dbname', data_source['name']) if not all_databases else ())
+ (('--host', hostname) if hostname else ())
+ (('--port', port) if port else ())
+ (('--username', username) if username else ())
+ (('--no-owner',) if database.get('no_owner', False) else ())
+ (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ())
+ (('--no-owner',) if data_source.get('no_owner', False) else ())
+ (
tuple(data_source['restore_options'].split(' '))
if 'restore_options' in data_source
else ()
)
+ (() if extract_process else (dump_filename,))
+ tuple(
itertools.chain.from_iterable(('--schema', schema) for schema in database['schemas'])
if database.get('schemas')
itertools.chain.from_iterable(('--schema', schema) for schema in data_source['schemas'])
if data_source.get('schemas')
else ()
)
)
extra_environment = make_extra_environment(
database, restore_connection_params=connection_params
data_source, restore_connection_params=connection_params
)
logger.debug(f"{log_prefix}: Restoring PostgreSQL database {database['name']}{dry_run_label}")
logger.debug(
f"{log_prefix}: Restoring PostgreSQL database {data_source['name']}{dry_run_label}"
)
if dry_run:
return

View File

@ -11,12 +11,12 @@ def make_dump_path(config): # pragma: no cover
'''
Make the dump path from the given configuration dict and the name of this hook.
'''
return dump.make_database_dump_path(
return dump.make_data_source_dump_path(
config.get('borgmatic_source_directory'), 'sqlite_databases'
)
def dump_databases(databases, config, log_prefix, dry_run):
def dump_data_sources(databases, config, log_prefix, dry_run):
'''
Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of
configuration dicts, as per the configuration schema. Use the given configuration dict to
@ -32,14 +32,14 @@ def dump_databases(databases, config, log_prefix, dry_run):
database_path = database['path']
if database['name'] == 'all':
logger.warning('The "all" database name has no meaning for SQLite3 databases')
logger.warning('The "all" database name has no meaning for SQLite databases')
if not os.path.exists(database_path):
logger.warning(
f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped'
f'{log_prefix}: No SQLite database at {database_path}; an empty database will be created and dumped'
)
dump_path = make_dump_path(config)
dump_filename = dump.make_database_dump_filename(dump_path, database['name'])
dump_filename = dump.make_data_source_dump_filename(dump_path, database['name'])
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}'
@ -65,49 +65,37 @@ def dump_databases(databases, config, log_prefix, dry_run):
return processes
def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
'''
Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a
sequence of configuration dicts, as per the configuration schema. Use the given configuration
dict to construct the destination path and the given log prefix in any log entries. If this is a
dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(config), 'SQLite', log_prefix, dry_run)
dump.remove_data_source_dumps(make_dump_path(config), 'SQLite', log_prefix, dry_run)
def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
def make_data_source_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
'''
Make a pattern that matches the given SQLite3 databases. The databases are supplied as a
sequence of configuration dicts, as per the configuration schema.
'''
return dump.make_database_dump_filename(make_dump_path(config), name)
return dump.make_data_source_dump_filename(make_dump_path(config), name)
def restore_database_dump(
databases_config, config, log_prefix, database_name, dry_run, extract_process, connection_params
def restore_data_source_dump(
hook_config, config, log_prefix, data_source, dry_run, extract_process, connection_params
):
'''
Restore the given SQLite3 database from an extract stream. The databases are supplied as a
sequence containing one dict describing each database (as per the configuration schema), but
only the database corresponding to the given database name is restored. Use the given log prefix
in any log entries. If this is a dry run, then don't actually restore anything. Trigger the
given active extract process (an instance of subprocess.Popen) to produce output to consume.
Restore a database from the given extract stream. The database is supplied as a data source
configuration dict, but the given hook configuration is ignored. The given configuration dict is
used to construct the destination path, and the given log prefix is used for any log entries. If
this is a dry run, then don't actually restore anything. Trigger the given active extract
process (an instance of subprocess.Popen) to produce output to consume.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
try:
database = next(
database_config
for database_config in databases_config
if database_config.get('name') == database_name
)
except StopIteration:
raise ValueError(
f'A database named "{database_name}" could not be found in the configuration'
)
database_path = connection_params['restore_path'] or database.get(
'restore_path', database.get('path')
database_path = connection_params['restore_path'] or data_source.get(
'restore_path', data_source.get('path')
)
logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}')

View File

@ -38,11 +38,11 @@ below for how to configure this.
borgmatic integrates with monitoring services like
[Healthchecks](https://healthchecks.io/), [Cronitor](https://cronitor.io),
[Cronhub](https://cronhub.io), [PagerDuty](https://www.pagerduty.com/), and
[ntfy](https://ntfy.sh/) and pings these services whenever borgmatic runs.
That way, you'll receive an alert when something goes wrong or (for certain
hooks) the service doesn't hear from borgmatic for a configured interval. See
[Healthchecks
[Cronhub](https://cronhub.io), [PagerDuty](https://www.pagerduty.com/),
[ntfy](https://ntfy.sh/), and [Grafana Loki](https://grafana.com/oss/loki/)
and pings these services whenever borgmatic runs. That way, you'll receive an
alert when something goes wrong or (for certain hooks) the service doesn't
hear from borgmatic for a configured interval. See [Healthchecks
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#healthchecks-hook),
[Cronitor
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-hook),
@ -50,7 +50,10 @@ hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-h
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook),
[PagerDuty
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pagerduty-hook),
and [ntfy hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook)
[ntfy
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook),
and [Loki
hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#loki-hook),
below for how to configure this.
While these services offer different features, you probably only need to use
@ -129,7 +132,7 @@ especially the security information.
## Healthchecks hook
[Healthchecks](https://healthchecks.io/) is a service that provides "instant
alerts when your cron jobs fail silently", and borgmatic has built-in
alerts when your cron jobs fail silently," and borgmatic has built-in
integration with it. Once you create a Healthchecks account and project on
their site, all you need to do is configure borgmatic with the unique "Ping
URL" for your project. Here's an example:
@ -144,21 +147,19 @@ healthchecks:
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic pings your Healthchecks project when a
backup begins, ends, or errors. Specifically, after the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
hooks</a> run, borgmatic lets Healthchecks know that it has started if any of
the `create`, `prune`, `compact`, or `check` actions are run.
backup begins, ends, or errors, but only when any of the `create`, `prune`,
`compact`, or `check` actions are run.
Then, if the actions complete successfully, borgmatic notifies Healthchecks of
the success after the `after_backup` hooks run and includes borgmatic logs in
the payload data sent to Healthchecks. This means that borgmatic logs show up
in the Healthchecks UI, although be aware that Healthchecks currently has a
10-kilobyte limit for the logs in each ping.
the success and includes borgmatic logs in the payload data sent to
Healthchecks. This means that borgmatic logs show up in the Healthchecks UI,
although be aware that Healthchecks currently has a 10-kilobyte limit for the
logs in each ping.
If an error occurs during any action or hook, borgmatic notifies Healthchecks
after the `on_error` hooks run, also tacking on logs including the error
itself. But the logs are only included for errors that occur when a `create`,
`prune`, `compact`, or `check` action is run.
If an error occurs during any action or hook, borgmatic notifies Healthchecks,
also tacking on logs including the error itself. But the logs are only
included for errors that occur when a `create`, `prune`, `compact`, or `check`
action is run.
You can customize the verbosity of the logs that are sent to Healthchecks with
borgmatic's `--monitoring-verbosity` flag. The `--list` and `--stats` flags
@ -175,7 +176,7 @@ or it doesn't hear from borgmatic for a certain period of time.
## Cronitor hook
[Cronitor](https://cronitor.io/) provides "Cron monitoring and uptime healthchecks
for websites, services and APIs", and borgmatic has built-in
for websites, services and APIs," and borgmatic has built-in
integration with it. Once you create a Cronitor account and cron job monitor on
their site, all you need to do is configure borgmatic with the unique "Ping
API URL" for your monitor. Here's an example:
@ -190,13 +191,9 @@ cronitor:
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic pings your Cronitor monitor when a backup
begins, ends, or errors. Specifically, after the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
hooks</a> run, borgmatic lets Cronitor know that it has started if any of the
`prune`, `compact`, `create`, or `check` actions are run. Then, if the actions
complete successfully, borgmatic notifies Cronitor of the success after the
`after_backup` hooks run. And if an error occurs during any action or hook,
borgmatic notifies Cronitor after the `on_error` hooks run.
begins, ends, or errors, but only when any of the `prune`, `compact`,
`create`, or `check` actions are run. Then, if the actions complete
successfully or errors, borgmatic notifies Cronitor accordingly.
You can configure Cronitor to notify you by a [variety of
mechanisms](https://cronitor.io/docs/cron-job-notifications) when backups fail
@ -206,7 +203,7 @@ or it doesn't hear from borgmatic for a certain period of time.
## Cronhub hook
[Cronhub](https://cronhub.io/) provides "instant alerts when any of your
background jobs fail silently or run longer than expected", and borgmatic has
background jobs fail silently or run longer than expected," and borgmatic has
built-in integration with it. Once you create a Cronhub account and monitor on
their site, all you need to do is configure borgmatic with the unique "Ping
URL" for your monitor. Here's an example:
@ -221,13 +218,9 @@ cronhub:
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic pings your Cronhub monitor when a backup
begins, ends, or errors. Specifically, after the <a
href="https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/">`before_backup`
hooks</a> run, borgmatic lets Cronhub know that it has started if any of the
`prune`, `compact`, `create`, or `check` actions are run. Then, if the actions
complete successfully, borgmatic notifies Cronhub of the success after the
`after_backup` hooks run. And if an error occurs during any action or hook,
borgmatic notifies Cronhub after the `on_error` hooks run.
begins, ends, or errors, but only when any of the `prune`, `compact`,
`create`, or `check` actions are run. Then, if the actions complete
successfully or errors, borgmatic notifies Cronhub accordingly.
Note that even though you configure borgmatic with the "start" variant of the
ping URL, borgmatic substitutes the correct state into the URL when pinging
@ -266,10 +259,9 @@ pagerduty:
this option in the `hooks:` section of your configuration.
With this hook in place, borgmatic creates a PagerDuty event for your service
whenever backups fail. Specifically, if an error occurs during a `create`,
`prune`, `compact`, or `check` action, borgmatic sends an event to PagerDuty
before the `on_error` hooks run. Note that borgmatic does not contact
PagerDuty when a backup starts or ends without error.
whenever backups fail, but only when any of the `create`, `prune`, `compact`,
or `check` actions are run. Note that borgmatic does not contact PagerDuty
when a backup starts or when it ends without error.
You can configure PagerDuty to notify you by a [variety of
mechanisms](https://support.pagerduty.com/docs/notifications) when backups
@ -281,10 +273,12 @@ us](https://torsion.org/borgmatic/#support-and-contributing).
## ntfy hook
[ntfy](https://ntfy.sh) is a free, simple, service (either hosted or self-hosted)
which offers simple pub/sub push notifications to multiple platforms including
[web](https://ntfy.sh/stats), [Android](https://play.google.com/store/apps/details?id=io.heckel.ntfy)
and [iOS](https://apps.apple.com/us/app/ntfy/id1625396347).
<span class="minilink minilink-addedin">New in version 1.6.3</span>
[ntfy](https://ntfy.sh) is a free, simple, service (either hosted or
self-hosted) which offers simple pub/sub push notifications to multiple
platforms including [web](https://ntfy.sh/stats),
[Android](https://play.google.com/store/apps/details?id=io.heckel.ntfy) and
[iOS](https://apps.apple.com/us/app/ntfy/id1625396347).
Since push notifications for regular events might soon become quite annoying,
this hook only fires on any errors by default in order to instantly alert you to issues.
@ -328,6 +322,59 @@ ntfy:
the `ntfy:` option in the `hooks:` section of your configuration.
## Loki hook
<span class="minilink minilink-addedin">New in version 1.8.3</span> [Grafana
Loki](https://grafana.com/oss/loki/) is a "horizontally scalable, highly
available, multi-tenant log aggregation system inspired by Prometheus."
borgmatic has built-in integration with Loki, sending both backup status and
borgmatic logs.
You can configure borgmatic to use either a [self-hosted Loki
instance](https://grafana.com/docs/loki/latest/installation/) or [a Grafana
Cloud account](https://grafana.com/auth/sign-up/create-user). Start by setting
your Loki API push URL. Here's an example:
```yaml
loki:
url: http://localhost:3100/loki/api/v1/push
```
With this hook in place, borgmatic sends its logs to your Loki instance as any
of the `prune`, `compact`, `create`, or `check` actions are run. Then, after
the actions complete, borgmatic notifies Loki of success or failure.
This hook supports sending arbitrary labels to Loki. For instance:
```yaml
loki:
url: http://localhost:3100/loki/api/v1/push
labels:
app: borgmatic
hostname: example.org
```
There are also a few placeholders you can optionally use as label values:
* `__config`: name of the borgmatic configuration file
* `__config_path`: full path of the borgmatic configuration file
* `__hostname`: the local machine hostname
These placeholders are only substituted for the whole label value, not
interpolated into a larger string. For instance:
```yaml
loki:
url: http://localhost:3100/loki/api/v1/push
labels:
app: borgmatic
config: __config
hostname: __hostname
```
## Scripting borgmatic
To consume the output of borgmatic in other software, you can include an

View File

@ -21,7 +21,7 @@ apk add --no-cache python3 py3-pip borgbackup postgresql-client mariadb-client m
py3-ruamel.yaml py3-ruamel.yaml.clib bash sqlite fish
# If certain dependencies of black are available in this version of Alpine, install them.
apk add --no-cache py3-typed-ast py3-regex || true
python3 -m pip install --no-cache --upgrade pip==22.2.2 setuptools==64.0.1
python3 -m pip install --no-cache --upgrade pip==22.2.2 setuptools==64.0.1 pymongo==4.4.1
pip3 install --ignore-installed tox==3.25.1
export COVERAGE_FILE=/tmp/.coverage

View File

@ -1,6 +1,6 @@
from setuptools import find_packages, setup
VERSION = '1.8.2.dev0'
VERSION = '1.8.3.dev0'
setup(

View File

@ -10,7 +10,6 @@ services:
environment:
POSTGRES_PASSWORD: test2
POSTGRES_DB: test
POSTGRES_USER: postgres2
command: docker-entrypoint.sh -p 5433
mariadb:
image: docker.io/mariadb:10.11.4
@ -23,6 +22,17 @@ services:
MARIADB_ROOT_PASSWORD: test2
MARIADB_DATABASE: test
command: docker-entrypoint.sh --port=3307
not-actually-mysql:
image: docker.io/mariadb:10.11.4
environment:
MARIADB_ROOT_PASSWORD: test
MARIADB_DATABASE: test
not-actually-mysql2:
image: docker.io/mariadb:10.11.4
environment:
MARIADB_ROOT_PASSWORD: test2
MARIADB_DATABASE: test
command: docker-entrypoint.sh --port=3307
mongodb:
image: docker.io/mongo:5.0.5
environment:

View File

@ -5,7 +5,9 @@ import subprocess
import sys
import tempfile
import pymongo
import pytest
import ruamel.yaml
def write_configuration(
@ -21,7 +23,7 @@ def write_configuration(
for testing. This includes injecting the given repository path, borgmatic source directory for
storing database dumps, dump format (for PostgreSQL), and encryption passphrase.
'''
config = f'''
config_yaml = f'''
source_directories:
- {source_directory}
repositories:
@ -61,16 +63,16 @@ mariadb_databases:
password: test
mysql_databases:
- name: test
hostname: mariadb
hostname: not-actually-mysql
username: root
password: test
- name: all
hostname: mariadb
hostname: not-actually-mysql
username: root
password: test
- name: all
format: sql
hostname: mariadb
hostname: not-actually-mysql
username: root
password: test
mongodb_databases:
@ -90,7 +92,9 @@ sqlite_databases:
'''
with open(config_path, 'w') as config_file:
config_file.write(config)
config_file.write(config_yaml)
return ruamel.yaml.YAML(typ='safe').load(config_yaml)
def write_custom_restore_configuration(
@ -106,7 +110,7 @@ def write_custom_restore_configuration(
for testing with custom restore options. This includes a custom restore_hostname, restore_port,
restore_username, restore_password and restore_path.
'''
config = f'''
config_yaml = f'''
source_directories:
- {source_directory}
repositories:
@ -123,7 +127,6 @@ postgresql_databases:
format: {postgresql_dump_format}
restore_hostname: postgresql2
restore_port: 5433
restore_username: postgres2
restore_password: test2
mariadb_databases:
- name: test
@ -136,10 +139,10 @@ mariadb_databases:
restore_password: test2
mysql_databases:
- name: test
hostname: mariadb
hostname: not-actually-mysql
username: root
password: test
restore_hostname: mariadb2
restore_hostname: not-actually-mysql2
restore_port: 3307
restore_username: root
restore_password: test2
@ -161,7 +164,9 @@ sqlite_databases:
'''
with open(config_path, 'w') as config_file:
config_file.write(config)
config_file.write(config_yaml)
return ruamel.yaml.YAML(typ='safe').load(config_yaml)
def write_simple_custom_restore_configuration(
@ -177,7 +182,7 @@ def write_simple_custom_restore_configuration(
custom restore_hostname, restore_port, restore_username and restore_password as we only test
these options for PostgreSQL.
'''
config = f'''
config_yaml = f'''
source_directories:
- {source_directory}
repositories:
@ -195,7 +200,147 @@ postgresql_databases:
'''
with open(config_path, 'w') as config_file:
config_file.write(config)
config_file.write(config_yaml)
return ruamel.yaml.YAML(typ='safe').load(config_yaml)
def get_connection_params(database, use_restore_options=False):
hostname = (database.get('restore_hostname') if use_restore_options else None) or database.get(
'hostname'
)
port = (database.get('restore_port') if use_restore_options else None) or database.get('port')
username = (database.get('restore_username') if use_restore_options else None) or database.get(
'username'
)
password = (database.get('restore_password') if use_restore_options else None) or database.get(
'password'
)
return (hostname, port, username, password)
def run_postgresql_command(command, config, use_restore_options=False):
(hostname, port, username, password) = get_connection_params(
config['postgresql_databases'][0], use_restore_options
)
subprocess.check_call(
[
'/usr/bin/psql',
f'--host={hostname}',
f'--port={port or 5432}',
f"--username={username or 'root'}",
f'--command={command}',
'test',
],
env={'PGPASSWORD': password},
)
def run_mariadb_command(command, config, use_restore_options=False, binary_name='mariadb'):
(hostname, port, username, password) = get_connection_params(
config[f'{binary_name}_databases'][0], use_restore_options
)
subprocess.check_call(
[
f'/usr/bin/{binary_name}',
f'--host={hostname}',
f'--port={port or 3306}',
f'--user={username}',
f'--execute={command}',
'test',
],
env={'MYSQL_PWD': password},
)
def get_mongodb_database_client(config, use_restore_options=False):
(hostname, port, username, password) = get_connection_params(
config['mongodb_databases'][0], use_restore_options
)
return pymongo.MongoClient(f'mongodb://{username}:{password}@{hostname}:{port or 27017}').test
def run_sqlite_command(command, config, use_restore_options=False):
database = config['sqlite_databases'][0]
path = (database.get('restore_path') if use_restore_options else None) or database.get('path')
subprocess.check_call(
[
'/usr/bin/sqlite3',
path,
command,
'.exit',
],
)
DEFAULT_HOOK_NAMES = {'postgresql', 'mariadb', 'mysql', 'mongodb', 'sqlite'}
def create_test_tables(config, use_restore_options=False):
'''
Create test tables for borgmatic to dump and backup.
'''
command = 'create table test{id} (thing int); insert into test{id} values (1);'
if 'postgresql_databases' in config:
run_postgresql_command(command.format(id=1), config, use_restore_options)
if 'mariadb_databases' in config:
run_mariadb_command(command.format(id=2), config, use_restore_options)
if 'mysql_databases' in config:
run_mariadb_command(command.format(id=3), config, use_restore_options, binary_name='mysql')
if 'mongodb_databases' in config:
get_mongodb_database_client(config, use_restore_options)['test4'].insert_one({'thing': 1})
if 'sqlite_databases' in config:
run_sqlite_command(command.format(id=5), config, use_restore_options)
def drop_test_tables(config, use_restore_options=False):
'''
Drop the test tables in preparation for borgmatic restoring them.
'''
command = 'drop table if exists test{id};'
if 'postgresql_databases' in config:
run_postgresql_command(command.format(id=1), config, use_restore_options)
if 'mariadb_databases' in config:
run_mariadb_command(command.format(id=2), config, use_restore_options)
if 'mysql_databases' in config:
run_mariadb_command(command.format(id=3), config, use_restore_options, binary_name='mysql')
if 'mongodb_databases' in config:
get_mongodb_database_client(config, use_restore_options)['test4'].drop()
if 'sqlite_databases' in config:
run_sqlite_command(command.format(id=5), config, use_restore_options)
def select_test_tables(config, use_restore_options=False):
'''
Select the test tables to make sure they exist.
Raise if the expected tables cannot be selected, for instance if a restore hasn't worked as
expected.
'''
command = 'select count(*) from test{id};'
if 'postgresql_databases' in config:
run_postgresql_command(command.format(id=1), config, use_restore_options)
if 'mariadb_databases' in config:
run_mariadb_command(command.format(id=2), config, use_restore_options)
if 'mysql_databases' in config:
run_mariadb_command(command.format(id=3), config, use_restore_options, binary_name='mysql')
if 'mongodb_databases' in config:
assert (
get_mongodb_database_client(config, use_restore_options)['test4'].count_documents(
filter={}
)
> 0
)
if 'sqlite_databases' in config:
run_sqlite_command(command.format(id=5), config, use_restore_options)
def test_database_dump_and_restore():
@ -211,15 +356,17 @@ def test_database_dump_and_restore():
try:
config_path = os.path.join(temporary_directory, 'test.yaml')
write_configuration(
config = write_configuration(
temporary_directory, config_path, repository_path, borgmatic_source_directory
)
create_test_tables(config)
select_test_tables(config)
subprocess.check_call(
['borgmatic', '-v', '2', '--config', config_path, 'rcreate', '--encryption', 'repokey']
)
# Run borgmatic to generate a backup archive including a database dump.
# Run borgmatic to generate a backup archive including database dumps.
subprocess.check_call(['borgmatic', 'create', '--config', config_path, '-v', '2'])
# Get the created archive name.
@ -232,16 +379,21 @@ def test_database_dump_and_restore():
assert len(parsed_output[0]['archives']) == 1
archive_name = parsed_output[0]['archives'][0]['archive']
# Restore the database from the archive.
# Restore the databases from the archive.
drop_test_tables(config)
subprocess.check_call(
['borgmatic', '-v', '2', '--config', config_path, 'restore', '--archive', archive_name]
)
# Ensure the test tables have actually been restored.
select_test_tables(config)
finally:
os.chdir(original_working_directory)
shutil.rmtree(temporary_directory)
drop_test_tables(config)
def test_database_dump_and_restore_with_restore_cli_arguments():
def test_database_dump_and_restore_with_restore_cli_flags():
# Create a Borg repository.
temporary_directory = tempfile.mkdtemp()
repository_path = os.path.join(temporary_directory, 'test.borg')
@ -251,9 +403,11 @@ def test_database_dump_and_restore_with_restore_cli_arguments():
try:
config_path = os.path.join(temporary_directory, 'test.yaml')
write_simple_custom_restore_configuration(
config = write_simple_custom_restore_configuration(
temporary_directory, config_path, repository_path, borgmatic_source_directory
)
create_test_tables(config)
select_test_tables(config)
subprocess.check_call(
['borgmatic', '-v', '2', '--config', config_path, 'rcreate', '--encryption', 'repokey']
@ -273,6 +427,7 @@ def test_database_dump_and_restore_with_restore_cli_arguments():
archive_name = parsed_output[0]['archives'][0]['archive']
# Restore the database from the archive.
drop_test_tables(config)
subprocess.check_call(
[
'borgmatic',
@ -287,15 +442,25 @@ def test_database_dump_and_restore_with_restore_cli_arguments():
'postgresql2',
'--port',
'5433',
'--username',
'postgres2',
'--password',
'test2',
]
)
# Ensure the test tables have actually been restored. But first modify the config to contain
# the altered restore values from the borgmatic command above. This ensures that the test
# tables are selected from the correct database.
database = config['postgresql_databases'][0]
database['restore_hostname'] = 'postgresql2'
database['restore_port'] = '5433'
database['restore_password'] = 'test2'
select_test_tables(config, use_restore_options=True)
finally:
os.chdir(original_working_directory)
shutil.rmtree(temporary_directory)
drop_test_tables(config)
drop_test_tables(config, use_restore_options=True)
def test_database_dump_and_restore_with_restore_configuration_options():
@ -308,9 +473,11 @@ def test_database_dump_and_restore_with_restore_configuration_options():
try:
config_path = os.path.join(temporary_directory, 'test.yaml')
write_custom_restore_configuration(
config = write_custom_restore_configuration(
temporary_directory, config_path, repository_path, borgmatic_source_directory
)
create_test_tables(config)
select_test_tables(config)
subprocess.check_call(
['borgmatic', '-v', '2', '--config', config_path, 'rcreate', '--encryption', 'repokey']
@ -330,12 +497,18 @@ def test_database_dump_and_restore_with_restore_configuration_options():
archive_name = parsed_output[0]['archives'][0]['archive']
# Restore the database from the archive.
drop_test_tables(config)
subprocess.check_call(
['borgmatic', '-v', '2', '--config', config_path, 'restore', '--archive', archive_name]
)
# Ensure the test tables have actually been restored.
select_test_tables(config, use_restore_options=True)
finally:
os.chdir(original_working_directory)
shutil.rmtree(temporary_directory)
drop_test_tables(config)
drop_test_tables(config, use_restore_options=True)
def test_database_dump_and_restore_with_directory_format():
@ -348,7 +521,7 @@ def test_database_dump_and_restore_with_directory_format():
try:
config_path = os.path.join(temporary_directory, 'test.yaml')
write_configuration(
config = write_configuration(
temporary_directory,
config_path,
repository_path,
@ -356,6 +529,8 @@ def test_database_dump_and_restore_with_directory_format():
postgresql_dump_format='directory',
mongodb_dump_format='directory',
)
create_test_tables(config)
select_test_tables(config)
subprocess.check_call(
['borgmatic', '-v', '2', '--config', config_path, 'rcreate', '--encryption', 'repokey']
@ -365,12 +540,17 @@ def test_database_dump_and_restore_with_directory_format():
subprocess.check_call(['borgmatic', 'create', '--config', config_path, '-v', '2'])
# Restore the database from the archive.
drop_test_tables(config)
subprocess.check_call(
['borgmatic', '--config', config_path, 'restore', '--archive', 'latest']
)
# Ensure the test tables have actually been restored.
select_test_tables(config)
finally:
os.chdir(original_working_directory)
shutil.rmtree(temporary_directory)
drop_test_tables(config)
def test_database_dump_with_error_causes_borgmatic_to_exit():

View File

@ -6,9 +6,9 @@ from flexmock import flexmock
from borgmatic.hooks import loki as module
def test_log_handler_label_replacment():
def test_initialize_monitor_replaces_labels():
'''
Assert that label placeholders get replaced
Assert that label placeholders get replaced.
'''
hook_config = {
'url': 'http://localhost:3100/loki/api/v1/push',
@ -17,18 +17,20 @@ def test_log_handler_label_replacment():
config_filename = '/mock/path/test.yaml'
dry_run = True
module.initialize_monitor(hook_config, flexmock(), config_filename, flexmock(), dry_run)
for handler in tuple(logging.getLogger().handlers):
if isinstance(handler, module.Loki_log_handler):
assert handler.buffer.root['streams'][0]['stream']['hostname'] == platform.node()
assert handler.buffer.root['streams'][0]['stream']['config'] == 'test.yaml'
assert handler.buffer.root['streams'][0]['stream']['config_full'] == config_filename
return
assert False
def test_initalize_adds_log_handler():
def test_initialize_monitor_adds_log_handler():
'''
Assert that calling initialize_monitor adds our logger to the root logger
Assert that calling initialize_monitor adds our logger to the root logger.
'''
hook_config = {'url': 'http://localhost:3100/loki/api/v1/push', 'labels': {'app': 'borgmatic'}}
module.initialize_monitor(
@ -38,15 +40,17 @@ def test_initalize_adds_log_handler():
monitoring_log_level=flexmock(),
dry_run=True,
)
for handler in tuple(logging.getLogger().handlers):
if isinstance(handler, module.Loki_log_handler):
return
assert False
def test_ping_adds_log_message():
def test_ping_monitor_adds_log_message():
'''
Assert that calling ping_monitor adds a message to our logger
Assert that calling ping_monitor adds a message to our logger.
'''
hook_config = {'url': 'http://localhost:3100/loki/api/v1/push', 'labels': {'app': 'borgmatic'}}
config_filename = 'test.yaml'
@ -55,6 +59,7 @@ def test_ping_adds_log_message():
module.ping_monitor(
hook_config, flexmock(), config_filename, module.monitor.State.FINISH, flexmock(), dry_run
)
for handler in tuple(logging.getLogger().handlers):
if isinstance(handler, module.Loki_log_handler):
assert any(
@ -65,18 +70,20 @@ def test_ping_adds_log_message():
)
)
return
assert False
def test_log_handler_gets_removed():
def test_destroy_monitor_removes_log_handler():
'''
Assert that destroy_monitor removes the logger from the root logger
Assert that destroy_monitor removes the logger from the root logger.
'''
hook_config = {'url': 'http://localhost:3100/loki/api/v1/push', 'labels': {'app': 'borgmatic'}}
config_filename = 'test.yaml'
dry_run = True
module.initialize_monitor(hook_config, flexmock(), config_filename, flexmock(), dry_run)
module.destroy_monitor(hook_config, flexmock(), config_filename, flexmock(), dry_run)
for handler in tuple(logging.getLogger().handlers):
if isinstance(handler, module.Loki_log_handler):
assert False

View File

@ -4,60 +4,71 @@ from flexmock import flexmock
import borgmatic.actions.restore as module
def test_get_configured_database_matches_database_by_name():
assert module.get_configured_database(
def test_get_configured_data_source_matches_data_source_by_name():
assert module.get_configured_data_source(
config={
'other_databases': [{'name': 'other'}],
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
},
archive_database_names={'postgresql_databases': ['other', 'foo', 'bar']},
archive_data_source_names={'postgresql_databases': ['other', 'foo', 'bar']},
hook_name='postgresql_databases',
database_name='bar',
data_source_name='bar',
) == ('postgresql_databases', {'name': 'bar'})
def test_get_configured_database_matches_nothing_when_database_name_not_configured():
assert module.get_configured_database(
config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['foo']},
def test_get_configured_data_source_matches_nothing_when_nothing_configured():
assert module.get_configured_data_source(
config={},
archive_data_source_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases',
database_name='quux',
data_source_name='quux',
) == (None, None)
def test_get_configured_database_matches_nothing_when_database_name_not_in_archive():
assert module.get_configured_database(
def test_get_configured_data_source_matches_nothing_when_data_source_name_not_configured():
assert module.get_configured_data_source(
config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['bar']},
archive_data_source_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases',
database_name='foo',
data_source_name='quux',
) == (None, None)
def test_get_configured_database_matches_database_by_configuration_database_name():
assert module.get_configured_database(
def test_get_configured_data_source_matches_nothing_when_data_source_name_not_in_archive():
assert module.get_configured_data_source(
config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_data_source_names={'postgresql_databases': ['bar']},
hook_name='postgresql_databases',
data_source_name='foo',
) == (None, None)
def test_get_configured_data_source_matches_data_source_by_configuration_data_source_name():
assert module.get_configured_data_source(
config={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['foo']},
archive_data_source_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases',
database_name='foo',
configuration_database_name='all',
data_source_name='foo',
configuration_data_source_name='all',
) == ('postgresql_databases', {'name': 'all'})
def test_get_configured_database_with_unspecified_hook_matches_database_by_name():
assert module.get_configured_database(
def test_get_configured_data_source_with_unspecified_hook_matches_data_source_by_name():
assert module.get_configured_data_source(
config={
'other_databases': [{'name': 'other'}],
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
},
archive_database_names={'postgresql_databases': ['other', 'foo', 'bar']},
archive_data_source_names={'postgresql_databases': ['other', 'foo', 'bar']},
hook_name=module.UNSPECIFIED_HOOK,
database_name='bar',
data_source_name='bar',
) == ('postgresql_databases', {'name': 'bar'})
def test_collect_archive_database_names_parses_archive_paths():
flexmock(module.borgmatic.hooks.dump).should_receive('make_database_dump_path').and_return('')
def test_collect_archive_data_source_names_parses_archive_paths():
flexmock(module.borgmatic.hooks.dump).should_receive('make_data_source_dump_path').and_return(
''
)
flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return(
[
'.borgmatic/postgresql_databases/localhost/foo',
@ -66,7 +77,7 @@ def test_collect_archive_database_names_parses_archive_paths():
]
)
archive_database_names = module.collect_archive_database_names(
archive_data_source_names = module.collect_archive_data_source_names(
repository={'path': 'repo'},
archive='archive',
config={'borgmatic_source_directory': '.borgmatic'},
@ -76,14 +87,16 @@ def test_collect_archive_database_names_parses_archive_paths():
remote_path=flexmock(),
)
assert archive_database_names == {
assert archive_data_source_names == {
'postgresql_databases': ['foo', 'bar'],
'mysql_databases': ['quux'],
}
def test_collect_archive_database_names_parses_directory_format_archive_paths():
flexmock(module.borgmatic.hooks.dump).should_receive('make_database_dump_path').and_return('')
def test_collect_archive_data_source_names_parses_directory_format_archive_paths():
flexmock(module.borgmatic.hooks.dump).should_receive('make_data_source_dump_path').and_return(
''
)
flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return(
[
'.borgmatic/postgresql_databases/localhost/foo/table1',
@ -91,7 +104,7 @@ def test_collect_archive_database_names_parses_directory_format_archive_paths():
]
)
archive_database_names = module.collect_archive_database_names(
archive_data_source_names = module.collect_archive_data_source_names(
repository={'path': 'repo'},
archive='archive',
config={'borgmatic_source_directory': '.borgmatic'},
@ -101,18 +114,20 @@ def test_collect_archive_database_names_parses_directory_format_archive_paths():
remote_path=flexmock(),
)
assert archive_database_names == {
assert archive_data_source_names == {
'postgresql_databases': ['foo'],
}
def test_collect_archive_database_names_skips_bad_archive_paths():
flexmock(module.borgmatic.hooks.dump).should_receive('make_database_dump_path').and_return('')
def test_collect_archive_data_source_names_skips_bad_archive_paths():
flexmock(module.borgmatic.hooks.dump).should_receive('make_data_source_dump_path').and_return(
''
)
flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return(
['.borgmatic/postgresql_databases/localhost/foo', '.borgmatic/invalid', 'invalid/as/well']
)
archive_database_names = module.collect_archive_database_names(
archive_data_source_names = module.collect_archive_data_source_names(
repository={'path': 'repo'},
archive='archive',
config={'borgmatic_source_directory': '.borgmatic'},
@ -122,96 +137,96 @@ def test_collect_archive_database_names_skips_bad_archive_paths():
remote_path=flexmock(),
)
assert archive_database_names == {
assert archive_data_source_names == {
'postgresql_databases': ['foo'],
}
def test_find_databases_to_restore_passes_through_requested_names_found_in_archive():
restore_names = module.find_databases_to_restore(
requested_database_names=['foo', 'bar'],
archive_database_names={'postresql_databases': ['foo', 'bar', 'baz']},
def test_find_data_sources_to_restore_passes_through_requested_names_found_in_archive():
restore_names = module.find_data_sources_to_restore(
requested_data_source_names=['foo', 'bar'],
archive_data_source_names={'postresql_databases': ['foo', 'bar', 'baz']},
)
assert restore_names == {module.UNSPECIFIED_HOOK: ['foo', 'bar']}
def test_find_databases_to_restore_raises_for_requested_names_missing_from_archive():
def test_find_data_sources_to_restore_raises_for_requested_names_missing_from_archive():
with pytest.raises(ValueError):
module.find_databases_to_restore(
requested_database_names=['foo', 'bar'],
archive_database_names={'postresql_databases': ['foo']},
module.find_data_sources_to_restore(
requested_data_source_names=['foo', 'bar'],
archive_data_source_names={'postresql_databases': ['foo']},
)
def test_find_databases_to_restore_without_requested_names_finds_all_archive_databases():
archive_database_names = {'postresql_databases': ['foo', 'bar']}
def test_find_data_sources_to_restore_without_requested_names_finds_all_archive_data_sources():
archive_data_source_names = {'postresql_databases': ['foo', 'bar']}
restore_names = module.find_databases_to_restore(
requested_database_names=[],
archive_database_names=archive_database_names,
restore_names = module.find_data_sources_to_restore(
requested_data_source_names=[],
archive_data_source_names=archive_data_source_names,
)
assert restore_names == archive_database_names
assert restore_names == archive_data_source_names
def test_find_databases_to_restore_with_all_in_requested_names_finds_all_archive_databases():
archive_database_names = {'postresql_databases': ['foo', 'bar']}
def test_find_data_sources_to_restore_with_all_in_requested_names_finds_all_archive_data_sources():
archive_data_source_names = {'postresql_databases': ['foo', 'bar']}
restore_names = module.find_databases_to_restore(
requested_database_names=['all'],
archive_database_names=archive_database_names,
restore_names = module.find_data_sources_to_restore(
requested_data_source_names=['all'],
archive_data_source_names=archive_data_source_names,
)
assert restore_names == archive_database_names
assert restore_names == archive_data_source_names
def test_find_databases_to_restore_with_all_in_requested_names_plus_additional_requested_names_omits_duplicates():
archive_database_names = {'postresql_databases': ['foo', 'bar']}
def test_find_data_sources_to_restore_with_all_in_requested_names_plus_additional_requested_names_omits_duplicates():
archive_data_source_names = {'postresql_databases': ['foo', 'bar']}
restore_names = module.find_databases_to_restore(
requested_database_names=['all', 'foo', 'bar'],
archive_database_names=archive_database_names,
restore_names = module.find_data_sources_to_restore(
requested_data_source_names=['all', 'foo', 'bar'],
archive_data_source_names=archive_data_source_names,
)
assert restore_names == archive_database_names
assert restore_names == archive_data_source_names
def test_find_databases_to_restore_raises_for_all_in_requested_names_and_requested_named_missing_from_archives():
def test_find_data_sources_to_restore_raises_for_all_in_requested_names_and_requested_named_missing_from_archives():
with pytest.raises(ValueError):
module.find_databases_to_restore(
requested_database_names=['all', 'foo', 'bar'],
archive_database_names={'postresql_databases': ['foo']},
module.find_data_sources_to_restore(
requested_data_source_names=['all', 'foo', 'bar'],
archive_data_source_names={'postresql_databases': ['foo']},
)
def test_ensure_databases_found_with_all_databases_found_does_not_raise():
module.ensure_databases_found(
def test_ensure_data_sources_found_with_all_data_sources_found_does_not_raise():
module.ensure_data_sources_found(
restore_names={'postgresql_databases': ['foo']},
remaining_restore_names={'postgresql_databases': ['bar']},
found_names=['foo', 'bar'],
)
def test_ensure_databases_found_with_no_databases_raises():
def test_ensure_data_sources_found_with_no_data_sources_raises():
with pytest.raises(ValueError):
module.ensure_databases_found(
module.ensure_data_sources_found(
restore_names={'postgresql_databases': []},
remaining_restore_names={},
found_names=[],
)
def test_ensure_databases_found_with_missing_databases_raises():
def test_ensure_data_sources_found_with_missing_data_sources_raises():
with pytest.raises(ValueError):
module.ensure_databases_found(
module.ensure_data_sources_found(
restore_names={'postgresql_databases': ['foo']},
remaining_restore_names={'postgresql_databases': ['bar']},
found_names=['foo'],
)
def test_run_restore_restores_each_database():
def test_run_restore_restores_each_data_source():
restore_names = {
'postgresql_databases': ['foo', 'bar'],
}
@ -221,12 +236,12 @@ def test_run_restore_restores_each_database():
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').and_return(
flexmock(module).should_receive('collect_archive_data_source_names').and_return(flexmock())
flexmock(module).should_receive('find_data_sources_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_data_source').and_return(
('postgresql_databases', {'name': 'foo'})
).and_return(('postgresql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args(
flexmock(module).should_receive('restore_single_data_source').with_args(
repository=object,
config=object,
local_borg_version=object,
@ -235,10 +250,10 @@ def test_run_restore_restores_each_database():
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'foo', 'schemas': None},
data_source={'name': 'foo', 'schemas': None},
connection_params=object,
).once()
flexmock(module).should_receive('restore_single_database').with_args(
flexmock(module).should_receive('restore_single_data_source').with_args(
repository=object,
config=object,
local_borg_version=object,
@ -247,10 +262,10 @@ def test_run_restore_restores_each_database():
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'bar', 'schemas': None},
data_source={'name': 'bar', 'schemas': None},
connection_params=object,
).once()
flexmock(module).should_receive('ensure_databases_found')
flexmock(module).should_receive('ensure_data_sources_found')
module.run_restore(
repository={'path': 'repo'},
@ -259,7 +274,7 @@ def test_run_restore_restores_each_database():
restore_arguments=flexmock(
repository='repo',
archive='archive',
databases=flexmock(),
data_sources=flexmock(),
schemas=None,
hostname=None,
port=None,
@ -280,20 +295,20 @@ def test_run_restore_bails_for_non_matching_repository():
flexmock(module.borgmatic.hooks.dispatch).should_receive(
'call_hooks_even_if_unconfigured'
).never()
flexmock(module).should_receive('restore_single_database').never()
flexmock(module).should_receive('restore_single_data_source').never()
module.run_restore(
repository={'path': 'repo'},
config=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()),
restore_arguments=flexmock(repository='repo', archive='archive', data_sources=flexmock()),
global_arguments=flexmock(dry_run=False),
local_path=flexmock(),
remote_path=flexmock(),
)
def test_run_restore_restores_database_configured_with_all_name():
def test_run_restore_restores_data_source_configured_with_all_name():
restore_names = {
'postgresql_databases': ['foo', 'bar'],
}
@ -303,28 +318,28 @@ def test_run_restore_restores_database_configured_with_all_name():
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args(
flexmock(module).should_receive('collect_archive_data_source_names').and_return(flexmock())
flexmock(module).should_receive('find_data_sources_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
archive_database_names=object,
archive_data_source_names=object,
hook_name='postgresql_databases',
database_name='foo',
data_source_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args(
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
archive_database_names=object,
archive_data_source_names=object,
hook_name='postgresql_databases',
database_name='bar',
data_source_name='bar',
).and_return((None, None))
flexmock(module).should_receive('get_configured_database').with_args(
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
archive_database_names=object,
archive_data_source_names=object,
hook_name='postgresql_databases',
database_name='bar',
configuration_database_name='all',
data_source_name='bar',
configuration_data_source_name='all',
).and_return(('postgresql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args(
flexmock(module).should_receive('restore_single_data_source').with_args(
repository=object,
config=object,
local_borg_version=object,
@ -333,10 +348,10 @@ def test_run_restore_restores_database_configured_with_all_name():
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'foo', 'schemas': None},
data_source={'name': 'foo', 'schemas': None},
connection_params=object,
).once()
flexmock(module).should_receive('restore_single_database').with_args(
flexmock(module).should_receive('restore_single_data_source').with_args(
repository=object,
config=object,
local_borg_version=object,
@ -345,10 +360,10 @@ def test_run_restore_restores_database_configured_with_all_name():
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'bar', 'schemas': None},
data_source={'name': 'bar', 'schemas': None},
connection_params=object,
).once()
flexmock(module).should_receive('ensure_databases_found')
flexmock(module).should_receive('ensure_data_sources_found')
module.run_restore(
repository={'path': 'repo'},
@ -357,7 +372,7 @@ def test_run_restore_restores_database_configured_with_all_name():
restore_arguments=flexmock(
repository='repo',
archive='archive',
databases=flexmock(),
data_sources=flexmock(),
schemas=None,
hostname=None,
port=None,
@ -371,7 +386,7 @@ def test_run_restore_restores_database_configured_with_all_name():
)
def test_run_restore_skips_missing_database():
def test_run_restore_skips_missing_data_source():
restore_names = {
'postgresql_databases': ['foo', 'bar'],
}
@ -381,28 +396,28 @@ def test_run_restore_skips_missing_database():
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args(
flexmock(module).should_receive('collect_archive_data_source_names').and_return(flexmock())
flexmock(module).should_receive('find_data_sources_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
archive_database_names=object,
archive_data_source_names=object,
hook_name='postgresql_databases',
database_name='foo',
data_source_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args(
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
archive_database_names=object,
archive_data_source_names=object,
hook_name='postgresql_databases',
database_name='bar',
data_source_name='bar',
).and_return((None, None))
flexmock(module).should_receive('get_configured_database').with_args(
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
archive_database_names=object,
archive_data_source_names=object,
hook_name='postgresql_databases',
database_name='bar',
configuration_database_name='all',
data_source_name='bar',
configuration_data_source_name='all',
).and_return((None, None))
flexmock(module).should_receive('restore_single_database').with_args(
flexmock(module).should_receive('restore_single_data_source').with_args(
repository=object,
config=object,
local_borg_version=object,
@ -411,10 +426,10 @@ def test_run_restore_skips_missing_database():
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'foo', 'schemas': None},
data_source={'name': 'foo', 'schemas': None},
connection_params=object,
).once()
flexmock(module).should_receive('restore_single_database').with_args(
flexmock(module).should_receive('restore_single_data_source').with_args(
repository=object,
config=object,
local_borg_version=object,
@ -423,10 +438,10 @@ def test_run_restore_skips_missing_database():
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'bar', 'schemas': None},
data_source={'name': 'bar', 'schemas': None},
connection_params=object,
).never()
flexmock(module).should_receive('ensure_databases_found')
flexmock(module).should_receive('ensure_data_sources_found')
module.run_restore(
repository={'path': 'repo'},
@ -435,7 +450,7 @@ def test_run_restore_skips_missing_database():
restore_arguments=flexmock(
repository='repo',
archive='archive',
databases=flexmock(),
data_sources=flexmock(),
schemas=None,
hostname=None,
port=None,
@ -449,7 +464,7 @@ def test_run_restore_skips_missing_database():
)
def test_run_restore_restores_databases_from_different_hooks():
def test_run_restore_restores_data_sources_from_different_hooks():
restore_names = {
'postgresql_databases': ['foo'],
'mysql_databases': ['bar'],
@ -460,21 +475,21 @@ def test_run_restore_restores_databases_from_different_hooks():
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args(
flexmock(module).should_receive('collect_archive_data_source_names').and_return(flexmock())
flexmock(module).should_receive('find_data_sources_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
archive_database_names=object,
archive_data_source_names=object,
hook_name='postgresql_databases',
database_name='foo',
data_source_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args(
flexmock(module).should_receive('get_configured_data_source').with_args(
config=object,
archive_database_names=object,
archive_data_source_names=object,
hook_name='mysql_databases',
database_name='bar',
data_source_name='bar',
).and_return(('mysql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args(
flexmock(module).should_receive('restore_single_data_source').with_args(
repository=object,
config=object,
local_borg_version=object,
@ -483,10 +498,10 @@ def test_run_restore_restores_databases_from_different_hooks():
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'foo', 'schemas': None},
data_source={'name': 'foo', 'schemas': None},
connection_params=object,
).once()
flexmock(module).should_receive('restore_single_database').with_args(
flexmock(module).should_receive('restore_single_data_source').with_args(
repository=object,
config=object,
local_borg_version=object,
@ -495,10 +510,10 @@ def test_run_restore_restores_databases_from_different_hooks():
remote_path=object,
archive_name=object,
hook_name='mysql_databases',
database={'name': 'bar', 'schemas': None},
data_source={'name': 'bar', 'schemas': None},
connection_params=object,
).once()
flexmock(module).should_receive('ensure_databases_found')
flexmock(module).should_receive('ensure_data_sources_found')
module.run_restore(
repository={'path': 'repo'},
@ -507,7 +522,7 @@ def test_run_restore_restores_databases_from_different_hooks():
restore_arguments=flexmock(
repository='repo',
archive='archive',
databases=flexmock(),
data_sources=flexmock(),
schemas=None,
hostname=None,
port=None,

View File

@ -4,34 +4,36 @@ from flexmock import flexmock
from borgmatic.hooks import dump as module
def test_make_database_dump_path_joins_arguments():
assert module.make_database_dump_path('/tmp', 'super_databases') == '/tmp/super_databases'
def test_make_data_source_dump_path_joins_arguments():
assert module.make_data_source_dump_path('/tmp', 'super_databases') == '/tmp/super_databases'
def test_make_database_dump_path_defaults_without_source_directory():
assert module.make_database_dump_path(None, 'super_databases') == '~/.borgmatic/super_databases'
def test_make_data_source_dump_path_defaults_without_source_directory():
assert (
module.make_data_source_dump_path(None, 'super_databases') == '~/.borgmatic/super_databases'
)
def test_make_database_dump_filename_uses_name_and_hostname():
def test_make_data_source_dump_filename_uses_name_and_hostname():
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
assert (
module.make_database_dump_filename('databases', 'test', 'hostname')
module.make_data_source_dump_filename('databases', 'test', 'hostname')
== 'databases/hostname/test'
)
def test_make_database_dump_filename_without_hostname_defaults_to_localhost():
def test_make_data_source_dump_filename_without_hostname_defaults_to_localhost():
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
assert module.make_database_dump_filename('databases', 'test') == 'databases/localhost/test'
assert module.make_data_source_dump_filename('databases', 'test') == 'databases/localhost/test'
def test_make_database_dump_filename_with_invalid_name_raises():
def test_make_data_source_dump_filename_with_invalid_name_raises():
flexmock(module.os.path).should_receive('expanduser').and_return('databases')
with pytest.raises(ValueError):
module.make_database_dump_filename('databases', 'invalid/name')
module.make_data_source_dump_filename('databases', 'invalid/name')
def test_create_parent_directory_for_dump_does_not_raise():
@ -47,28 +49,28 @@ def test_create_named_pipe_for_dump_does_not_raise():
module.create_named_pipe_for_dump('/path/to/pipe')
def test_remove_database_dumps_removes_dump_path():
def test_remove_data_source_dumps_removes_dump_path():
flexmock(module.os.path).should_receive('expanduser').and_return('databases/localhost')
flexmock(module.os.path).should_receive('exists').and_return(True)
flexmock(module.shutil).should_receive('rmtree').with_args('databases/localhost').once()
module.remove_database_dumps('databases', 'SuperDB', 'test.yaml', dry_run=False)
module.remove_data_source_dumps('databases', 'SuperDB', 'test.yaml', dry_run=False)
def test_remove_database_dumps_with_dry_run_skips_removal():
def test_remove_data_source_dumps_with_dry_run_skips_removal():
flexmock(module.os.path).should_receive('expanduser').and_return('databases/localhost')
flexmock(module.os.path).should_receive('exists').never()
flexmock(module.shutil).should_receive('rmtree').never()
module.remove_database_dumps('databases', 'SuperDB', 'test.yaml', dry_run=True)
module.remove_data_source_dumps('databases', 'SuperDB', 'test.yaml', dry_run=True)
def test_remove_database_dumps_without_dump_path_present_skips_removal():
def test_remove_data_source_dumps_without_dump_path_present_skips_removal():
flexmock(module.os.path).should_receive('expanduser').and_return('databases/localhost')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.shutil).should_receive('rmtree').never()
module.remove_database_dumps('databases', 'SuperDB', 'test.yaml', dry_run=False)
module.remove_data_source_dumps('databases', 'SuperDB', 'test.yaml', dry_run=False)
def test_convert_glob_patterns_to_borg_patterns_removes_leading_slash():

View File

@ -6,93 +6,103 @@ from flexmock import flexmock
from borgmatic.hooks import loki as module
def test_log_handler_gets_labels():
def test_loki_log_buffer_add_value_gets_raw():
'''
Assert that adding labels works
'''
buffer = module.Loki_log_buffer(flexmock(), False)
buffer.add_label('test', 'label')
assert buffer.root['streams'][0]['stream']['test'] == 'label'
buffer.add_label('test2', 'label2')
assert buffer.root['streams'][0]['stream']['test2'] == 'label2'
def test_log_buffer_gets_raw():
'''
Assert that adding values to the log buffer increases it's length
Assert that adding values to the log buffer increases it's length.
'''
buffer = module.Loki_log_buffer(flexmock(), False)
assert len(buffer) == 0
buffer.add_value('Some test log line')
assert len(buffer) == 1
buffer.add_value('Another test log line')
assert len(buffer) == 2
def test_log_buffer_gets_log_messages():
def test_loki_log_buffer_json_serializes_empty_buffer():
'''
Assert that adding log records works
'''
handler = module.Loki_log_handler(flexmock(), False)
handler.emit(flexmock(getMessage=lambda: 'Some test log line'))
assert len(handler.buffer) == 1
def test_log_buffer_json():
'''
Assert that the buffer correctly serializes when empty
Assert that the buffer correctly serializes when empty.
'''
buffer = module.Loki_log_buffer(flexmock(), False)
assert json.loads(buffer.to_request()) == json.loads('{"streams":[{"stream":{},"values":[]}]}')
def test_log_buffer_json_labels():
def test_loki_log_buffer_json_serializes_labels():
'''
Assert that the buffer correctly serializes with labels
Assert that the buffer correctly serializes with labels.
'''
buffer = module.Loki_log_buffer(flexmock(), False)
buffer.add_label('test', 'label')
assert json.loads(buffer.to_request()) == json.loads(
'{"streams":[{"stream":{"test": "label"},"values":[]}]}'
)
def test_log_buffer_json_log_lines():
def test_loki_log_buffer_json_serializes_log_lines():
'''
Assert that log lines end up in the correct place in the log buffer
Assert that log lines end up in the correct place in the log buffer.
'''
buffer = module.Loki_log_buffer(flexmock(), False)
buffer.add_value('Some test log line')
assert json.loads(buffer.to_request())['streams'][0]['values'][0][1] == 'Some test log line'
def test_log_handler_post():
def test_loki_log_handler_add_label_gets_labels():
'''
Assert that the flush function sends a post request after a certain limit
Assert that adding labels works.
'''
buffer = module.Loki_log_buffer(flexmock(), False)
buffer.add_label('test', 'label')
assert buffer.root['streams'][0]['stream']['test'] == 'label'
buffer.add_label('test2', 'label2')
assert buffer.root['streams'][0]['stream']['test2'] == 'label2'
def test_loki_log_handler_emit_gets_log_messages():
'''
Assert that adding log records works.
'''
handler = module.Loki_log_handler(flexmock(), False)
handler.emit(flexmock(getMessage=lambda: 'Some test log line'))
assert len(handler.buffer) == 1
def test_loki_log_handler_raw_posts_to_server():
'''
Assert that the flush function sends a post request after a certain limit.
'''
handler = module.Loki_log_handler(flexmock(), False)
flexmock(module.requests).should_receive('post').and_return(
flexmock(raise_for_status=lambda: '')
).once()
for num in range(int(module.MAX_BUFFER_LINES * 1.5)):
handler.raw(num)
def test_log_handler_post_failiure():
def test_loki_log_handler_raw_post_failure_does_not_raise():
'''
Assert that the flush function catches request exceptions
Assert that the flush function catches request exceptions.
'''
handler = module.Loki_log_handler(flexmock(), False)
flexmock(module.requests).should_receive('post').and_return(
flexmock(raise_for_status=lambda: (_ for _ in ()).throw(requests.RequestException()))
).once()
for num in range(int(module.MAX_BUFFER_LINES * 1.5)):
handler.raw(num)
def test_log_handler_empty_flush_noop():
def test_loki_log_handler_flush_with_empty_buffer_does_not_raise():
'''
Test that flushing an empty buffer does indeed nothing
Test that flushing an empty buffer does indeed nothing.
'''
handler = module.Loki_log_handler(flexmock(), False)
handler.flush()

View File

@ -44,7 +44,7 @@ def test_database_names_to_dump_queries_mariadb_for_database_names():
assert names == ('foo', 'bar')
def test_dump_databases_dumps_each_database():
def test_dump_data_sources_dumps_each_database():
databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('')
@ -63,10 +63,10 @@ def test_dump_databases_dumps_each_database():
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
def test_dump_databases_dumps_with_password():
def test_dump_data_sources_dumps_with_password():
database = {'name': 'foo', 'username': 'root', 'password': 'trustsome1'}
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
@ -84,10 +84,10 @@ def test_dump_databases_dumps_with_password():
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases([database], {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources([database], {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_dumps_all_databases_at_once():
def test_dump_data_sources_dumps_all_databases_at_once():
databases = [{'name': 'all'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
@ -102,10 +102,10 @@ def test_dump_databases_dumps_all_databases_at_once():
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_dumps_all_databases_separately_when_format_configured():
def test_dump_data_sources_dumps_all_databases_separately_when_format_configured():
databases = [{'name': 'all', 'format': 'sql'}]
processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('')
@ -122,7 +122,7 @@ def test_dump_databases_dumps_all_databases_separately_when_format_configured():
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
def test_database_names_to_dump_runs_mariadb_with_list_options():
@ -144,7 +144,7 @@ def test_database_names_to_dump_runs_mariadb_with_list_options():
def test_execute_dump_command_runs_mariadb_dump():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -177,7 +177,7 @@ def test_execute_dump_command_runs_mariadb_dump():
def test_execute_dump_command_runs_mariadb_dump_without_add_drop_database():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -209,7 +209,7 @@ def test_execute_dump_command_runs_mariadb_dump_without_add_drop_database():
def test_execute_dump_command_runs_mariadb_dump_with_hostname_and_port():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -248,7 +248,7 @@ def test_execute_dump_command_runs_mariadb_dump_with_hostname_and_port():
def test_execute_dump_command_runs_mariadb_dump_with_username_and_password():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -283,7 +283,7 @@ def test_execute_dump_command_runs_mariadb_dump_with_username_and_password():
def test_execute_dump_command_runs_mariadb_dump_with_options():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -316,7 +316,7 @@ def test_execute_dump_command_runs_mariadb_dump_with_options():
def test_execute_dump_command_with_duplicate_dump_skips_mariadb_dump():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(True)
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never()
@ -336,7 +336,7 @@ def test_execute_dump_command_with_duplicate_dump_skips_mariadb_dump():
def test_execute_dump_command_with_dry_run_skips_mariadb_dump():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -356,31 +356,31 @@ def test_execute_dump_command_with_dry_run_skips_mariadb_dump():
)
def test_dump_databases_errors_for_missing_all_databases():
def test_dump_data_sources_errors_for_missing_all_databases():
databases = [{'name': 'all'}]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module).should_receive('database_names_to_dump').and_return(())
with pytest.raises(ValueError):
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False)
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False)
def test_dump_databases_does_not_error_for_missing_all_databases_with_dry_run():
def test_dump_data_sources_does_not_error_for_missing_all_databases_with_dry_run():
databases = [{'name': 'all'}]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module).should_receive('database_names_to_dump').and_return(())
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == []
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=True) == []
def test_restore_database_dump_runs_mariadb_to_restore():
databases_config = [{'name': 'foo'}, {'name': 'bar'}]
def test_restore_data_source_dump_runs_mariadb_to_restore():
hook_config = [{'name': 'foo'}, {'name': 'bar'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').with_args(
@ -391,11 +391,11 @@ def test_restore_database_dump_runs_mariadb_to_restore():
extra_environment=None,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=False,
extract_process=extract_process,
connection_params={
@ -407,31 +407,8 @@ def test_restore_database_dump_runs_mariadb_to_restore():
)
def test_restore_database_dump_errors_when_database_missing_from_configuration():
databases_config = [{'name': 'foo'}, {'name': 'bar'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').never()
with pytest.raises(ValueError):
module.restore_database_dump(
databases_config,
{},
'test.yaml',
database_name='other',
dry_run=False,
extract_process=extract_process,
connection_params={
'hostname': None,
'port': None,
'username': None,
'password': None,
},
)
def test_restore_database_dump_runs_mariadb_with_options():
databases_config = [{'name': 'foo', 'restore_options': '--harder'}]
def test_restore_data_source_dump_runs_mariadb_with_options():
hook_config = [{'name': 'foo', 'restore_options': '--harder'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').with_args(
@ -442,11 +419,11 @@ def test_restore_database_dump_runs_mariadb_with_options():
extra_environment=None,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -458,8 +435,8 @@ def test_restore_database_dump_runs_mariadb_with_options():
)
def test_restore_database_dump_runs_mariadb_with_hostname_and_port():
databases_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
def test_restore_data_source_dump_runs_mariadb_with_hostname_and_port():
hook_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').with_args(
@ -479,11 +456,11 @@ def test_restore_database_dump_runs_mariadb_with_hostname_and_port():
extra_environment=None,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -495,8 +472,8 @@ def test_restore_database_dump_runs_mariadb_with_hostname_and_port():
)
def test_restore_database_dump_runs_mariadb_with_username_and_password():
databases_config = [{'name': 'foo', 'username': 'root', 'password': 'trustsome1'}]
def test_restore_data_source_dump_runs_mariadb_with_username_and_password():
hook_config = [{'name': 'foo', 'username': 'root', 'password': 'trustsome1'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').with_args(
@ -507,11 +484,11 @@ def test_restore_database_dump_runs_mariadb_with_username_and_password():
extra_environment={'MYSQL_PWD': 'trustsome1'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -523,8 +500,8 @@ def test_restore_database_dump_runs_mariadb_with_username_and_password():
)
def test_restore_database_dump_with_connection_params_uses_connection_params_for_restore():
databases_config = [
def test_restore_data_source_dump_with_connection_params_uses_connection_params_for_restore():
hook_config = [
{
'name': 'foo',
'username': 'root',
@ -556,11 +533,11 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
extra_environment={'MYSQL_PWD': 'clipassword'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -572,8 +549,8 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
)
def test_restore_database_dump_without_connection_params_uses_restore_params_in_config_for_restore():
databases_config = [
def test_restore_data_source_dump_without_connection_params_uses_restore_params_in_config_for_restore():
hook_config = [
{
'name': 'foo',
'username': 'root',
@ -607,11 +584,11 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
extra_environment={'MYSQL_PWD': 'restorepass'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -623,16 +600,16 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
)
def test_restore_database_dump_with_dry_run_skips_restore():
databases_config = [{'name': 'foo'}]
def test_restore_data_source_dump_with_dry_run_skips_restore():
hook_config = [{'name': 'foo'}]
flexmock(module).should_receive('execute_command_with_processes').never()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=True,
extract_process=flexmock(),
connection_params={

View File

@ -1,16 +1,15 @@
import logging
import pytest
from flexmock import flexmock
from borgmatic.hooks import mongodb as module
def test_dump_databases_runs_mongodump_for_each_database():
def test_dump_data_sources_runs_mongodump_for_each_database():
databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -22,26 +21,26 @@ def test_dump_databases_runs_mongodump_for_each_database():
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
def test_dump_databases_with_dry_run_skips_mongodump():
def test_dump_data_sources_with_dry_run_skips_mongodump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == []
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=True) == []
def test_dump_databases_runs_mongodump_with_hostname_and_port():
def test_dump_data_sources_runs_mongodump_with_hostname_and_port():
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/database.example.org/foo'
)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -63,10 +62,10 @@ def test_dump_databases_runs_mongodump_with_hostname_and_port():
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_runs_mongodump_with_username_and_password():
def test_dump_data_sources_runs_mongodump_with_username_and_password():
databases = [
{
'name': 'foo',
@ -77,7 +76,7 @@ def test_dump_databases_runs_mongodump_with_username_and_password():
]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -101,13 +100,13 @@ def test_dump_databases_runs_mongodump_with_username_and_password():
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_runs_mongodump_with_directory_format():
def test_dump_data_sources_runs_mongodump_with_directory_format():
databases = [{'name': 'foo', 'format': 'directory'}]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.dump).should_receive('create_parent_directory_for_dump')
@ -118,32 +117,40 @@ def test_dump_databases_runs_mongodump_with_directory_format():
shell=True,
).and_return(flexmock()).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == []
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == []
def test_dump_databases_runs_mongodump_with_options():
def test_dump_data_sources_runs_mongodump_with_options():
databases = [{'name': 'foo', 'options': '--stuff=such'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
('mongodump', '--db', 'foo', '--stuff=such', '--archive', '>', 'databases/localhost/foo'),
(
'mongodump',
'--db',
'foo',
'--stuff=such',
'--archive',
'>',
'databases/localhost/foo',
),
shell=True,
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_runs_mongodumpall_for_all_databases():
def test_dump_data_sources_runs_mongodumpall_for_all_databases():
databases = [{'name': 'all'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -154,27 +161,27 @@ def test_dump_databases_runs_mongodumpall_for_all_databases():
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_restore_database_dump_runs_mongorestore():
databases_config = [{'name': 'foo', 'schemas': None}, {'name': 'bar'}]
def test_restore_data_source_dump_runs_mongorestore():
hook_config = [{'name': 'foo', 'schemas': None}, {'name': 'bar'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
['mongorestore', '--archive', '--drop', '--db', 'foo'],
['mongorestore', '--archive', '--drop'],
processes=[extract_process],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=False,
extract_process=extract_process,
connection_params={
@ -186,46 +193,19 @@ def test_restore_database_dump_runs_mongorestore():
)
def test_restore_database_dump_errors_on_empty_databases_config():
databases_config = []
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').never()
flexmock(module).should_receive('execute_command').never()
with pytest.raises(ValueError):
module.restore_database_dump(
databases_config,
{},
'test.yaml',
database_name='foo',
dry_run=False,
extract_process=flexmock(),
connection_params={
'hostname': None,
'port': None,
'username': None,
'password': None,
},
)
def test_restore_database_dump_runs_mongorestore_with_hostname_and_port():
databases_config = [
def test_restore_data_source_dump_runs_mongorestore_with_hostname_and_port():
hook_config = [
{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433, 'schemas': None}
]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
[
'mongorestore',
'--archive',
'--drop',
'--db',
'foo',
'--host',
'database.example.org',
'--port',
@ -236,11 +216,11 @@ def test_restore_database_dump_runs_mongorestore_with_hostname_and_port():
input_file=extract_process.stdout,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -252,8 +232,8 @@ def test_restore_database_dump_runs_mongorestore_with_hostname_and_port():
)
def test_restore_database_dump_runs_mongorestore_with_username_and_password():
databases_config = [
def test_restore_data_source_dump_runs_mongorestore_with_username_and_password():
hook_config = [
{
'name': 'foo',
'username': 'mongo',
@ -265,14 +245,12 @@ def test_restore_database_dump_runs_mongorestore_with_username_and_password():
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
[
'mongorestore',
'--archive',
'--drop',
'--db',
'foo',
'--username',
'mongo',
'--password',
@ -285,11 +263,11 @@ def test_restore_database_dump_runs_mongorestore_with_username_and_password():
input_file=extract_process.stdout,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -301,8 +279,8 @@ def test_restore_database_dump_runs_mongorestore_with_username_and_password():
)
def test_restore_database_dump_with_connection_params_uses_connection_params_for_restore():
databases_config = [
def test_restore_data_source_dump_with_connection_params_uses_connection_params_for_restore():
hook_config = [
{
'name': 'foo',
'username': 'mongo',
@ -318,14 +296,12 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
[
'mongorestore',
'--archive',
'--drop',
'--db',
'foo',
'--host',
'clihost',
'--port',
@ -342,11 +318,11 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
input_file=extract_process.stdout,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -358,8 +334,8 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
)
def test_restore_database_dump_without_connection_params_uses_restore_params_in_config_for_restore():
databases_config = [
def test_restore_data_source_dump_without_connection_params_uses_restore_params_in_config_for_restore():
hook_config = [
{
'name': 'foo',
'username': 'mongo',
@ -375,14 +351,12 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
[
'mongorestore',
'--archive',
'--drop',
'--db',
'foo',
'--host',
'restorehost',
'--port',
@ -399,11 +373,11 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
input_file=extract_process.stdout,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -415,24 +389,24 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
)
def test_restore_database_dump_runs_mongorestore_with_options():
databases_config = [{'name': 'foo', 'restore_options': '--harder', 'schemas': None}]
def test_restore_data_source_dump_runs_mongorestore_with_options():
hook_config = [{'name': 'foo', 'restore_options': '--harder', 'schemas': None}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
['mongorestore', '--archive', '--drop', '--db', 'foo', '--harder'],
['mongorestore', '--archive', '--drop', '--harder'],
processes=[extract_process],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -445,18 +419,16 @@ def test_restore_database_dump_runs_mongorestore_with_options():
def test_restore_databases_dump_runs_mongorestore_with_schemas():
databases_config = [{'name': 'foo', 'schemas': ['bar', 'baz']}]
hook_config = [{'name': 'foo', 'schemas': ['bar', 'baz']}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
[
'mongorestore',
'--archive',
'--drop',
'--db',
'foo',
'--nsInclude',
'bar',
'--nsInclude',
@ -467,11 +439,11 @@ def test_restore_databases_dump_runs_mongorestore_with_schemas():
input_file=extract_process.stdout,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -483,12 +455,12 @@ def test_restore_databases_dump_runs_mongorestore_with_schemas():
)
def test_restore_database_dump_runs_psql_for_all_database_dump():
databases_config = [{'name': 'all', 'schemas': None}]
def test_restore_data_source_dump_runs_psql_for_all_database_dump():
hook_config = [{'name': 'all', 'schemas': None}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
['mongorestore', '--archive'],
processes=[extract_process],
@ -496,11 +468,11 @@ def test_restore_database_dump_runs_psql_for_all_database_dump():
input_file=extract_process.stdout,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='all',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -512,18 +484,18 @@ def test_restore_database_dump_runs_psql_for_all_database_dump():
)
def test_restore_database_dump_with_dry_run_skips_restore():
databases_config = [{'name': 'foo', 'schemas': None}]
def test_restore_data_source_dump_with_dry_run_skips_restore():
hook_config = [{'name': 'foo', 'schemas': None}]
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').never()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=True,
extract_process=flexmock(),
connection_params={
@ -535,23 +507,23 @@ def test_restore_database_dump_with_dry_run_skips_restore():
)
def test_restore_database_dump_without_extract_process_restores_from_disk():
databases_config = [{'name': 'foo', 'format': 'directory', 'schemas': None}]
def test_restore_data_source_dump_without_extract_process_restores_from_disk():
hook_config = [{'name': 'foo', 'format': 'directory', 'schemas': None}]
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('/dump/path')
flexmock(module).should_receive('execute_command_with_processes').with_args(
['mongorestore', '--dir', '/dump/path', '--drop', '--db', 'foo'],
['mongorestore', '--dir', '/dump/path', '--drop'],
processes=[],
output_log_level=logging.DEBUG,
input_file=None,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=False,
extract_process=None,
connection_params={

View File

@ -44,7 +44,7 @@ def test_database_names_to_dump_queries_mysql_for_database_names():
assert names == ('foo', 'bar')
def test_dump_databases_dumps_each_database():
def test_dump_data_sources_dumps_each_database():
databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('')
@ -63,10 +63,10 @@ def test_dump_databases_dumps_each_database():
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
def test_dump_databases_dumps_with_password():
def test_dump_data_sources_dumps_with_password():
database = {'name': 'foo', 'username': 'root', 'password': 'trustsome1'}
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
@ -84,10 +84,10 @@ def test_dump_databases_dumps_with_password():
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases([database], {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources([database], {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_dumps_all_databases_at_once():
def test_dump_data_sources_dumps_all_databases_at_once():
databases = [{'name': 'all'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
@ -102,10 +102,10 @@ def test_dump_databases_dumps_all_databases_at_once():
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_dumps_all_databases_separately_when_format_configured():
def test_dump_data_sources_dumps_all_databases_separately_when_format_configured():
databases = [{'name': 'all', 'format': 'sql'}]
processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('')
@ -122,7 +122,7 @@ def test_dump_databases_dumps_all_databases_separately_when_format_configured():
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
def test_database_names_to_dump_runs_mysql_with_list_options():
@ -144,7 +144,7 @@ def test_database_names_to_dump_runs_mysql_with_list_options():
def test_execute_dump_command_runs_mysqldump():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -177,7 +177,7 @@ def test_execute_dump_command_runs_mysqldump():
def test_execute_dump_command_runs_mysqldump_without_add_drop_database():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -209,7 +209,7 @@ def test_execute_dump_command_runs_mysqldump_without_add_drop_database():
def test_execute_dump_command_runs_mysqldump_with_hostname_and_port():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -248,7 +248,7 @@ def test_execute_dump_command_runs_mysqldump_with_hostname_and_port():
def test_execute_dump_command_runs_mysqldump_with_username_and_password():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -283,7 +283,7 @@ def test_execute_dump_command_runs_mysqldump_with_username_and_password():
def test_execute_dump_command_runs_mysqldump_with_options():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -316,7 +316,7 @@ def test_execute_dump_command_runs_mysqldump_with_options():
def test_execute_dump_command_with_duplicate_dump_skips_mysqldump():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(True)
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never()
@ -336,7 +336,7 @@ def test_execute_dump_command_with_duplicate_dump_skips_mysqldump():
def test_execute_dump_command_with_dry_run_skips_mysqldump():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
@ -356,31 +356,31 @@ def test_execute_dump_command_with_dry_run_skips_mysqldump():
)
def test_dump_databases_errors_for_missing_all_databases():
def test_dump_data_sources_errors_for_missing_all_databases():
databases = [{'name': 'all'}]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module).should_receive('database_names_to_dump').and_return(())
with pytest.raises(ValueError):
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False)
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False)
def test_dump_databases_does_not_error_for_missing_all_databases_with_dry_run():
def test_dump_data_sources_does_not_error_for_missing_all_databases_with_dry_run():
databases = [{'name': 'all'}]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module).should_receive('database_names_to_dump').and_return(())
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == []
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=True) == []
def test_restore_database_dump_runs_mysql_to_restore():
databases_config = [{'name': 'foo'}, {'name': 'bar'}]
def test_restore_data_source_dump_runs_mysql_to_restore():
hook_config = [{'name': 'foo'}, {'name': 'bar'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').with_args(
@ -391,11 +391,11 @@ def test_restore_database_dump_runs_mysql_to_restore():
extra_environment=None,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=False,
extract_process=extract_process,
connection_params={
@ -407,31 +407,8 @@ def test_restore_database_dump_runs_mysql_to_restore():
)
def test_restore_database_dump_errors_when_database_missing_from_configuration():
databases_config = [{'name': 'foo'}, {'name': 'bar'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').never()
with pytest.raises(ValueError):
module.restore_database_dump(
databases_config,
{},
'test.yaml',
database_name='other',
dry_run=False,
extract_process=extract_process,
connection_params={
'hostname': None,
'port': None,
'username': None,
'password': None,
},
)
def test_restore_database_dump_runs_mysql_with_options():
databases_config = [{'name': 'foo', 'restore_options': '--harder'}]
def test_restore_data_source_dump_runs_mysql_with_options():
hook_config = [{'name': 'foo', 'restore_options': '--harder'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').with_args(
@ -442,11 +419,11 @@ def test_restore_database_dump_runs_mysql_with_options():
extra_environment=None,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -458,8 +435,8 @@ def test_restore_database_dump_runs_mysql_with_options():
)
def test_restore_database_dump_runs_mysql_with_hostname_and_port():
databases_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
def test_restore_data_source_dump_runs_mysql_with_hostname_and_port():
hook_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').with_args(
@ -479,11 +456,11 @@ def test_restore_database_dump_runs_mysql_with_hostname_and_port():
extra_environment=None,
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -495,8 +472,8 @@ def test_restore_database_dump_runs_mysql_with_hostname_and_port():
)
def test_restore_database_dump_runs_mysql_with_username_and_password():
databases_config = [{'name': 'foo', 'username': 'root', 'password': 'trustsome1'}]
def test_restore_data_source_dump_runs_mysql_with_username_and_password():
hook_config = [{'name': 'foo', 'username': 'root', 'password': 'trustsome1'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').with_args(
@ -507,11 +484,11 @@ def test_restore_database_dump_runs_mysql_with_username_and_password():
extra_environment={'MYSQL_PWD': 'trustsome1'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -523,8 +500,8 @@ def test_restore_database_dump_runs_mysql_with_username_and_password():
)
def test_restore_database_dump_with_connection_params_uses_connection_params_for_restore():
databases_config = [
def test_restore_data_source_dump_with_connection_params_uses_connection_params_for_restore():
hook_config = [
{
'name': 'foo',
'username': 'root',
@ -556,11 +533,11 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
extra_environment={'MYSQL_PWD': 'clipassword'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=False,
extract_process=extract_process,
connection_params={
@ -572,8 +549,8 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
)
def test_restore_database_dump_without_connection_params_uses_restore_params_in_config_for_restore():
databases_config = [
def test_restore_data_source_dump_without_connection_params_uses_restore_params_in_config_for_restore():
hook_config = [
{
'name': 'foo',
'username': 'root',
@ -607,11 +584,11 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
extra_environment={'MYSQL_PWD': 'restorepass'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -623,16 +600,16 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
)
def test_restore_database_dump_with_dry_run_skips_restore():
databases_config = [{'name': 'foo'}]
def test_restore_data_source_dump_with_dry_run_skips_restore():
hook_config = [{'name': 'foo'}]
flexmock(module).should_receive('execute_command_with_processes').never()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=True,
extract_process=flexmock(),
connection_params={

View File

@ -185,7 +185,7 @@ def test_database_names_to_dump_with_all_and_psql_command_uses_custom_command():
)
def test_dump_databases_runs_pg_dump_for_each_database():
def test_dump_data_sources_runs_pg_dump_for_each_database():
databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
@ -193,7 +193,7 @@ def test_dump_databases_runs_pg_dump_for_each_database():
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return(
('bar',)
)
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.os.path).should_receive('exists').and_return(False)
@ -217,69 +217,69 @@ def test_dump_databases_runs_pg_dump_for_each_database():
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
def test_dump_databases_raises_when_no_database_names_to_dump():
def test_dump_data_sources_raises_when_no_database_names_to_dump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(())
with pytest.raises(ValueError):
module.dump_databases(databases, {}, 'test.yaml', dry_run=False)
module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False)
def test_dump_databases_does_not_raise_when_no_database_names_to_dump():
def test_dump_data_sources_does_not_raise_when_no_database_names_to_dump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(())
module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == []
module.dump_data_sources(databases, {}, 'test.yaml', dry_run=True) == []
def test_dump_databases_with_duplicate_dump_skips_pg_dump():
def test_dump_data_sources_with_duplicate_dump_skips_pg_dump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return(
('bar',)
)
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.os.path).should_receive('exists').and_return(True)
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == []
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == []
def test_dump_databases_with_dry_run_skips_pg_dump():
def test_dump_data_sources_with_dry_run_skips_pg_dump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return(
('bar',)
)
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == []
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=True) == []
def test_dump_databases_runs_pg_dump_with_hostname_and_port():
def test_dump_data_sources_runs_pg_dump_with_hostname_and_port():
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
process = flexmock()
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',))
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/database.example.org/foo'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
@ -306,10 +306,10 @@ def test_dump_databases_runs_pg_dump_with_hostname_and_port():
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_runs_pg_dump_with_username_and_password():
def test_dump_data_sources_runs_pg_dump_with_username_and_password():
databases = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}]
process = flexmock()
flexmock(module).should_receive('make_extra_environment').and_return(
@ -317,7 +317,7 @@ def test_dump_databases_runs_pg_dump_with_username_and_password():
)
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',))
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
@ -342,15 +342,15 @@ def test_dump_databases_runs_pg_dump_with_username_and_password():
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_runs_pg_dump_with_directory_format():
def test_dump_data_sources_runs_pg_dump_with_directory_format():
databases = [{'name': 'foo', 'format': 'directory'}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',))
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
@ -373,16 +373,16 @@ def test_dump_databases_runs_pg_dump_with_directory_format():
extra_environment={'PGSSLMODE': 'disable'},
).and_return(flexmock()).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == []
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == []
def test_dump_databases_runs_pg_dump_with_options():
def test_dump_data_sources_runs_pg_dump_with_options():
databases = [{'name': 'foo', 'options': '--stuff=such'}]
process = flexmock()
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',))
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
@ -406,16 +406,16 @@ def test_dump_databases_runs_pg_dump_with_options():
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_runs_pg_dumpall_for_all_databases():
def test_dump_data_sources_runs_pg_dumpall_for_all_databases():
databases = [{'name': 'all'}]
process = flexmock()
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('all',))
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
@ -428,16 +428,16 @@ def test_dump_databases_runs_pg_dumpall_for_all_databases():
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_runs_non_default_pg_dump():
def test_dump_data_sources_runs_non_default_pg_dump():
databases = [{'name': 'foo', 'pg_dump_command': 'special_pg_dump'}]
process = flexmock()
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',))
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
@ -460,16 +460,16 @@ def test_dump_databases_runs_non_default_pg_dump():
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_restore_database_dump_runs_pg_restore():
databases_config = [{'name': 'foo', 'schemas': None}, {'name': 'bar'}]
def test_restore_data_source_dump_runs_pg_restore():
hook_config = [{'name': 'foo', 'schemas': None}, {'name': 'bar'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'pg_restore',
@ -499,11 +499,11 @@ def test_restore_database_dump_runs_pg_restore():
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=False,
extract_process=extract_process,
connection_params={
@ -515,39 +515,15 @@ def test_restore_database_dump_runs_pg_restore():
)
def test_restore_database_dump_errors_when_database_missing_from_configuration():
databases_config = [{'name': 'foo', 'schemas': None}, {'name': 'bar'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').never()
flexmock(module).should_receive('execute_command').never()
with pytest.raises(ValueError):
module.restore_database_dump(
databases_config,
{},
'test.yaml',
database_name='other',
dry_run=False,
extract_process=extract_process,
connection_params={
'hostname': None,
'port': None,
'username': None,
'password': None,
},
)
def test_restore_database_dump_runs_pg_restore_with_hostname_and_port():
databases_config = [
def test_restore_data_source_dump_runs_pg_restore_with_hostname_and_port():
hook_config = [
{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433, 'schemas': None}
]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'pg_restore',
@ -585,11 +561,11 @@ def test_restore_database_dump_runs_pg_restore_with_hostname_and_port():
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -601,8 +577,8 @@ def test_restore_database_dump_runs_pg_restore_with_hostname_and_port():
)
def test_restore_database_dump_runs_pg_restore_with_username_and_password():
databases_config = [
def test_restore_data_source_dump_runs_pg_restore_with_username_and_password():
hook_config = [
{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1', 'schemas': None}
]
extract_process = flexmock(stdout=flexmock())
@ -611,7 +587,7 @@ def test_restore_database_dump_runs_pg_restore_with_username_and_password():
{'PGPASSWORD': 'trustsome1', 'PGSSLMODE': 'disable'}
)
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'pg_restore',
@ -645,11 +621,11 @@ def test_restore_database_dump_runs_pg_restore_with_username_and_password():
extra_environment={'PGPASSWORD': 'trustsome1', 'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -661,8 +637,8 @@ def test_restore_database_dump_runs_pg_restore_with_username_and_password():
)
def test_restore_database_dump_with_connection_params_uses_connection_params_for_restore():
databases_config = [
def test_restore_data_source_dump_with_connection_params_uses_connection_params_for_restore():
hook_config = [
{
'name': 'foo',
'hostname': 'database.example.org',
@ -682,7 +658,7 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
{'PGPASSWORD': 'clipassword', 'PGSSLMODE': 'disable'}
)
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'pg_restore',
@ -724,11 +700,11 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
extra_environment={'PGPASSWORD': 'clipassword', 'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=False,
extract_process=extract_process,
connection_params={
@ -740,8 +716,8 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
)
def test_restore_database_dump_without_connection_params_uses_restore_params_in_config_for_restore():
databases_config = [
def test_restore_data_source_dump_without_connection_params_uses_restore_params_in_config_for_restore():
hook_config = [
{
'name': 'foo',
'hostname': 'database.example.org',
@ -761,7 +737,7 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
{'PGPASSWORD': 'restorepassword', 'PGSSLMODE': 'disable'}
)
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'pg_restore',
@ -803,11 +779,11 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
extra_environment={'PGPASSWORD': 'restorepassword', 'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -819,8 +795,8 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
)
def test_restore_database_dump_runs_pg_restore_with_options():
databases_config = [
def test_restore_data_source_dump_runs_pg_restore_with_options():
hook_config = [
{
'name': 'foo',
'restore_options': '--harder',
@ -832,7 +808,7 @@ def test_restore_database_dump_runs_pg_restore_with_options():
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'pg_restore',
@ -864,11 +840,11 @@ def test_restore_database_dump_runs_pg_restore_with_options():
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -880,13 +856,13 @@ def test_restore_database_dump_runs_pg_restore_with_options():
)
def test_restore_database_dump_runs_psql_for_all_database_dump():
databases_config = [{'name': 'all', 'schemas': None}]
def test_restore_data_source_dump_runs_psql_for_all_database_dump():
hook_config = [{'name': 'all', 'schemas': None}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'psql',
@ -903,11 +879,11 @@ def test_restore_database_dump_runs_psql_for_all_database_dump():
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='all',
data_source={'name': 'all'},
dry_run=False,
extract_process=extract_process,
connection_params={
@ -919,13 +895,13 @@ def test_restore_database_dump_runs_psql_for_all_database_dump():
)
def test_restore_database_dump_runs_psql_for_plain_database_dump():
databases_config = [{'name': 'foo', 'format': 'plain', 'schemas': None}]
def test_restore_data_source_dump_runs_psql_for_plain_database_dump():
hook_config = [{'name': 'foo', 'format': 'plain', 'schemas': None}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
('psql', '--no-password', '--no-psqlrc', '--dbname', 'foo'),
processes=[extract_process],
@ -947,11 +923,11 @@ def test_restore_database_dump_runs_psql_for_plain_database_dump():
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -963,8 +939,8 @@ def test_restore_database_dump_runs_psql_for_plain_database_dump():
)
def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
databases_config = [
def test_restore_data_source_dump_runs_non_default_pg_restore_and_psql():
hook_config = [
{
'name': 'foo',
'pg_restore_command': 'docker exec mycontainer pg_restore',
@ -976,7 +952,7 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'docker',
@ -1012,11 +988,11 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={
@ -1028,19 +1004,19 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
)
def test_restore_database_dump_with_dry_run_skips_restore():
databases_config = [{'name': 'foo', 'schemas': None}]
def test_restore_data_source_dump_with_dry_run_skips_restore():
hook_config = [{'name': 'foo', 'schemas': None}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module.dump).should_receive('make_data_source_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').never()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=True,
extract_process=flexmock(),
connection_params={
@ -1052,12 +1028,12 @@ def test_restore_database_dump_with_dry_run_skips_restore():
)
def test_restore_database_dump_without_extract_process_restores_from_disk():
databases_config = [{'name': 'foo', 'schemas': None}]
def test_restore_data_source_dump_without_extract_process_restores_from_disk():
hook_config = [{'name': 'foo', 'schemas': None}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('/dump/path')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'pg_restore',
@ -1088,11 +1064,11 @@ def test_restore_database_dump_without_extract_process_restores_from_disk():
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source={'name': 'foo'},
dry_run=False,
extract_process=None,
connection_params={
@ -1104,12 +1080,12 @@ def test_restore_database_dump_without_extract_process_restores_from_disk():
)
def test_restore_database_dump_with_schemas_restores_schemas():
databases_config = [{'name': 'foo', 'schemas': ['bar', 'baz']}]
def test_restore_data_source_dump_with_schemas_restores_schemas():
hook_config = [{'name': 'foo', 'schemas': ['bar', 'baz']}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path')
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return('/dump/path')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'pg_restore',
@ -1144,11 +1120,11 @@ def test_restore_database_dump_with_schemas_restores_schemas():
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='foo',
data_source=hook_config[0],
dry_run=False,
extract_process=None,
connection_params={

View File

@ -1,26 +1,25 @@
import logging
import pytest
from flexmock import flexmock
from borgmatic.hooks import sqlite as module
def test_dump_databases_logs_and_skips_if_dump_already_exists():
def test_dump_data_sources_logs_and_skips_if_dump_already_exists():
databases = [{'path': '/path/to/database', 'name': 'database'}]
flexmock(module).should_receive('make_dump_path').and_return('/path/to/dump')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'/path/to/dump/database'
)
flexmock(module.os.path).should_receive('exists').and_return(True)
flexmock(module.dump).should_receive('create_parent_directory_for_dump').never()
flexmock(module).should_receive('execute_command').never()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == []
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == []
def test_dump_databases_dumps_each_database():
def test_dump_data_sources_dumps_each_database():
databases = [
{'path': '/path/to/database1', 'name': 'database1'},
{'path': '/path/to/database2', 'name': 'database2'},
@ -28,7 +27,7 @@ def test_dump_databases_dumps_each_database():
processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('/path/to/dump')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'/path/to/dump/database'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
@ -37,7 +36,7 @@ def test_dump_databases_dumps_each_database():
processes[1]
)
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
def test_dumping_database_with_non_existent_path_warns_and_dumps_database():
@ -48,14 +47,14 @@ def test_dumping_database_with_non_existent_path_warns_and_dumps_database():
flexmock(module).should_receive('make_dump_path').and_return('/path/to/dump')
flexmock(module.logger).should_receive('warning').once()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'/path/to/dump/database'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_parent_directory_for_dump')
flexmock(module).should_receive('execute_command').and_return(processes[0])
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
def test_dumping_database_with_name_all_warns_and_dumps_all_databases():
@ -68,32 +67,32 @@ def test_dumping_database_with_name_all_warns_and_dumps_all_databases():
flexmock(module.logger).should_receive(
'warning'
).twice() # once for the name=all, once for the non-existent path
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'/path/to/dump/database'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_parent_directory_for_dump')
flexmock(module).should_receive('execute_command').and_return(processes[0])
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=False) == processes
def test_dump_databases_does_not_dump_if_dry_run():
def test_dump_data_sources_does_not_dump_if_dry_run():
databases = [{'path': '/path/to/database', 'name': 'database'}]
flexmock(module).should_receive('make_dump_path').and_return('/path/to/dump')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
flexmock(module.dump).should_receive('make_data_source_dump_filename').and_return(
'/path/to/dump/database'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_parent_directory_for_dump').never()
flexmock(module).should_receive('execute_command').never()
assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == []
assert module.dump_data_sources(databases, {}, 'test.yaml', dry_run=True) == []
def test_restore_database_dump_restores_database():
databases_config = [{'path': '/path/to/database', 'name': 'database'}, {'name': 'other'}]
def test_restore_data_source_dump_restores_database():
hook_config = [{'path': '/path/to/database', 'name': 'database'}, {'name': 'other'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').with_args(
@ -108,19 +107,19 @@ def test_restore_database_dump_restores_database():
flexmock(module.os).should_receive('remove').once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='database',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={'restore_path': None},
)
def test_restore_database_dump_with_connection_params_uses_connection_params_for_restore():
databases_config = [
def test_restore_data_source_dump_with_connection_params_uses_connection_params_for_restore():
hook_config = [
{'path': '/path/to/database', 'name': 'database', 'restore_path': 'config/path/to/database'}
]
extract_process = flexmock(stdout=flexmock())
@ -137,19 +136,19 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
flexmock(module.os).should_receive('remove').once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='database',
data_source={'name': 'database'},
dry_run=False,
extract_process=extract_process,
connection_params={'restore_path': 'cli/path/to/database'},
)
def test_restore_database_dump_without_connection_params_uses_restore_params_in_config_for_restore():
databases_config = [
def test_restore_data_source_dump_without_connection_params_uses_restore_params_in_config_for_restore():
hook_config = [
{'path': '/path/to/database', 'name': 'database', 'restore_path': 'config/path/to/database'}
]
extract_process = flexmock(stdout=flexmock())
@ -166,46 +165,30 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
flexmock(module.os).should_receive('remove').once()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='database',
data_source=hook_config[0],
dry_run=False,
extract_process=extract_process,
connection_params={'restore_path': None},
)
def test_restore_database_dump_does_not_restore_database_if_dry_run():
databases_config = [{'path': '/path/to/database', 'name': 'database'}]
def test_restore_data_source_dump_does_not_restore_database_if_dry_run():
hook_config = [{'path': '/path/to/database', 'name': 'database'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('execute_command_with_processes').never()
flexmock(module.os).should_receive('remove').never()
module.restore_database_dump(
databases_config,
module.restore_data_source_dump(
hook_config,
{},
'test.yaml',
database_name='database',
data_source={'name': 'database'},
dry_run=True,
extract_process=extract_process,
connection_params={'restore_path': None},
)
def test_restore_database_dump_raises_error_if_database_config_is_empty():
databases_config = []
extract_process = flexmock(stdout=flexmock())
with pytest.raises(ValueError):
module.restore_database_dump(
databases_config,
{},
'test.yaml',
database_name='database',
dry_run=False,
extract_process=extract_process,
connection_params={'restore_path': None},
)