Merge branch 'master' into pr-working-directory

This commit is contained in:
fabianschilling 2022-01-05 09:30:27 +00:00
commit b39f08694d
18 changed files with 608 additions and 85 deletions

View File

@ -1,54 +1,3 @@
---
kind: pipeline
name: python-3-6-alpine-3-9
services:
- name: postgresql
image: postgres:11.9-alpine
environment:
POSTGRES_PASSWORD: test
POSTGRES_DB: test
- name: mysql
image: mariadb:10.3
environment:
MYSQL_ROOT_PASSWORD: test
MYSQL_DATABASE: test
clone:
skip_verify: true
steps:
- name: build
image: alpine:3.9
pull: always
commands:
- scripts/run-full-tests
---
kind: pipeline
name: python-3-7-alpine-3-10
services:
- name: postgresql
image: postgres:11.9-alpine
environment:
POSTGRES_PASSWORD: test
POSTGRES_DB: test
- name: mysql
image: mariadb:10.3
environment:
MYSQL_ROOT_PASSWORD: test
MYSQL_DATABASE: test
clone:
skip_verify: true
steps:
- name: build
image: alpine:3.10
pull: always
commands:
- scripts/run-full-tests
---
kind: pipeline
name: python-3-8-alpine-3-13
@ -63,6 +12,11 @@ services:
environment:
MYSQL_ROOT_PASSWORD: test
MYSQL_DATABASE: test
- name: mongodb
image: mongo:5.0.5
environment:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: test
clone:
skip_verify: true

3
NEWS
View File

@ -1,8 +1,11 @@
1.5.22.dev0
* #288: Database dump hooks for MongoDB.
* #470: Move mysqldump options to the beginning of the command due to MySQL bug 30994.
* #471: When command-line configuration override produces a parse error, error cleanly instead of
tracebacking.
* #476: Fix unicode error when restoring particular MySQL databases.
* Drop support for Python 3.6, which has been end-of-lifed.
* Add support for Python 3.10.
1.5.21
* #28: Optionally retry failing backups via "retries" and "retry_wait" configuration options.

View File

@ -26,7 +26,6 @@ location:
repositories:
- 1234@usw-s001.rsync.net:backups.borg
- k8pDxu32@k8pDxu32.repo.borgbase.com:repo
- user1@scp2.cdn.lima-labs.com:repo
- /var/lib/backups/local.borg
retention:
@ -66,11 +65,11 @@ borgmatic is powered by [Borg Backup](https://www.borgbackup.org/).
<a href="https://www.postgresql.org/"><img src="docs/static/postgresql.png" alt="PostgreSQL" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://www.mysql.com/"><img src="docs/static/mysql.png" alt="MySQL" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://mariadb.com/"><img src="docs/static/mariadb.png" alt="MariaDB" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://www.mongodb.com/"><img src="docs/static/mongodb.png" alt="MongoDB" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://healthchecks.io/"><img src="docs/static/healthchecks.png" alt="Healthchecks" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://cronitor.io/"><img src="docs/static/cronitor.png" alt="Cronitor" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://cronhub.io/"><img src="docs/static/cronhub.png" alt="Cronhub" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://www.pagerduty.com/"><img src="docs/static/pagerduty.png" alt="PagerDuty" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://www.rsync.net/cgi-bin/borg.cgi?campaign=borg&adgroup=borgmatic"><img src="docs/static/rsyncnet.png" alt="rsync.net" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
<a href="https://www.borgbase.com/?utm_source=borgmatic"><img src="docs/static/borgbase.png" alt="BorgBase" height="60px" style="margin-bottom:20px;"></a>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
@ -93,7 +92,6 @@ referral links, but without any tracking scripts or cookies.)
<ul>
<li class="referral"><a href="https://www.borgbase.com/?utm_source=borgmatic">BorgBase</a>: Borg hosting service with support for monitoring, 2FA, and append-only repos</li>
<li class="referral"><a href="https://storage.lima-labs.com/special-pricing-offer-for-borgmatic-users/">Lima-Labs</a>: Affordable, reliable cloud data storage accessable via SSH/SCP/FTP for Borg backups or any other bulk storage needs</li>
</ul>
Additionally, [rsync.net](https://www.rsync.net/products/borg.html) and

View File

@ -781,6 +781,80 @@ properties:
mysqldump/mysql commands (from either MySQL or MariaDB). See
https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html or
https://mariadb.com/kb/en/library/mysqldump/ for details.
mongodb_databases:
type: array
items:
type: object
required: ['name']
additionalProperties: false
properties:
name:
type: string
description: |
Database name (required if using this hook). Or
"all" to dump all databases on the host. Note
that using this database hook implicitly enables
both read_special and one_file_system (see
above) to support dump and restore streaming.
example: users
hostname:
type: string
description: |
Database hostname to connect to. Defaults to
connecting to localhost.
example: database.example.org
port:
type: integer
description: Port to connect to. Defaults to 27017.
example: 27018
username:
type: string
description: |
Username with which to connect to the database.
Skip it if no authentication is needed.
example: dbuser
password:
type: string
description: |
Password with which to connect to the database.
Skip it if no authentication is needed.
example: trustsome1
authentication_database:
type: string
description: |
Authentication database where the specified
username exists. If no authentication database
is specified, the database provided in "name"
is used. If "name" is "all", the "admin"
database is used.
example: admin
format:
type: string
enum: ['archive', 'directory']
description: |
Database dump output format. One of "archive",
or "directory". Defaults to "archive". See
mongodump documentation for details. Note that
format is ignored when the database name is
"all".
example: directory
options:
type: string
description: |
Additional mongodump options to pass
directly to the dump command, without performing
any validation on them. See mongodump
documentation for details.
example: --role=someone
description: |
List of one or more MongoDB databases to dump before
creating a backup, run once per configuration file. The
database dumps are added to your source directories at
runtime, backed up, and removed afterwards. Requires
mongodump/mongorestore commands. See
https://docs.mongodb.com/database-tools/mongodump/ and
https://docs.mongodb.com/database-tools/mongorestore/ for
details.
healthchecks:
type: string
description: |

View File

@ -1,6 +1,6 @@
import logging
from borgmatic.hooks import cronhub, cronitor, healthchecks, mysql, pagerduty, postgresql
from borgmatic.hooks import cronhub, cronitor, healthchecks, mongodb, mysql, pagerduty, postgresql
logger = logging.getLogger(__name__)
@ -11,6 +11,7 @@ HOOK_NAME_TO_MODULE = {
'pagerduty': pagerduty,
'postgresql_databases': postgresql,
'mysql_databases': mysql,
'mongodb_databases': mongodb,
}

View File

@ -6,7 +6,7 @@ from borgmatic.borg.create import DEFAULT_BORGMATIC_SOURCE_DIRECTORY
logger = logging.getLogger(__name__)
DATABASE_HOOK_NAMES = ('postgresql_databases', 'mysql_databases')
DATABASE_HOOK_NAMES = ('postgresql_databases', 'mysql_databases', 'mongodb_databases')
def make_database_dump_path(borgmatic_source_directory, database_hook_name):

162
borgmatic/hooks/mongodb.py Normal file
View File

@ -0,0 +1,162 @@
import logging
from borgmatic.execute import execute_command, execute_command_with_processes
from borgmatic.hooks import dump
logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover
'''
Make the dump path from the given location configuration and the name of this hook.
'''
return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'mongodb_databases'
)
def dump_databases(databases, log_prefix, location_config, dry_run):
'''
Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given log
prefix in any log entries. Use the given location configuration dict to construct the
destination path.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
'''
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
logger.info('{}: Dumping MongoDB databases{}'.format(log_prefix, dry_run_label))
processes = []
for database in databases:
name = database['name']
dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), name, database.get('hostname')
)
dump_format = database.get('format', 'archive')
logger.debug(
'{}: Dumping MongoDB database {} to {}{}'.format(
log_prefix, name, dump_filename, dry_run_label
)
)
if dry_run:
continue
if dump_format == 'directory':
dump.create_parent_directory_for_dump(dump_filename)
else:
dump.create_named_pipe_for_dump(dump_filename)
command = build_dump_command(database, dump_filename, dump_format)
processes.append(execute_command(command, shell=True, run_to_completion=False))
return processes
def build_dump_command(database, dump_filename, dump_format):
'''
Return the mongodump command from a single database configuration.
'''
all_databases = database['name'] == 'all'
command = ['mongodump', '--archive']
if dump_format == 'directory':
command.append(dump_filename)
if 'hostname' in database:
command.extend(('--host', database['hostname']))
if 'port' in database:
command.extend(('--port', str(database['port'])))
if 'username' in database:
command.extend(('--username', database['username']))
if 'password' in database:
command.extend(('--password', database['password']))
if 'authentication_database' in database:
command.extend(('--authenticationDatabase', database['authentication_database']))
if not all_databases:
command.extend(('--db', database['name']))
if 'options' in database:
command.extend(database['options'].split(' '))
if dump_format != 'directory':
command.extend(('>', dump_filename))
return command
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
'''
Remove all database dump files for this hook regardless of the given databases. Use the log
prefix in any log entries. Use the given location configuration dict to construct the
destination path. If this is a dry run, then don't actually remove anything.
'''
dump.remove_database_dumps(make_dump_path(location_config), 'MongoDB', log_prefix, dry_run)
def make_database_dump_pattern(
databases, log_prefix, location_config, name=None
): # pragma: no cover
'''
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict,
and a database name to match, return the corresponding glob patterns to match the database dump
in an archive.
'''
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*')
def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process):
'''
Restore the given MongoDB database from an extract stream. The database is supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema.
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream.
'''
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
if len(database_config) != 1:
raise ValueError('The database configuration value is invalid')
database = database_config[0]
dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), database['name'], database.get('hostname')
)
restore_command = build_restore_command(extract_process, database, dump_filename)
logger.debug(
'{}: Restoring MongoDB database {}{}'.format(log_prefix, database['name'], dry_run_label)
)
if dry_run:
return
execute_command_with_processes(
restore_command,
[extract_process] if extract_process else [],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout if extract_process else None,
borg_local_path=location_config.get('local_path', 'borg'),
)
def build_restore_command(extract_process, database, dump_filename):
'''
Return the mongorestore command from a single database configuration.
'''
command = ['mongorestore', '--archive']
if not extract_process:
command.append(dump_filename)
if database['name'] != 'all':
command.extend(('--drop', '--db', database['name']))
if 'hostname' in database:
command.extend(('--host', database['hostname']))
if 'port' in database:
command.extend(('--port', str(database['port'])))
if 'username' in database:
command.extend(('--username', database['username']))
if 'password' in database:
command.extend(('--password', database['password']))
if 'authentication_database' in database:
command.extend(('--authenticationDatabase', database['authentication_database']))
return command

View File

@ -15,7 +15,8 @@ consistent snapshot that is more suited for backups.
Fortunately, borgmatic includes built-in support for creating database dumps
prior to running backups. For example, here is everything you need to dump and
backup a couple of local PostgreSQL databases and a MySQL/MariaDB database:
backup a couple of local PostgreSQL databases, a MySQL/MariaDB database, and a
MongoDB database:
```yaml
hooks:
@ -24,12 +25,15 @@ hooks:
- name: orders
mysql_databases:
- name: posts
mongodb_databases:
- name: messages
```
As part of each backup, borgmatic streams a database dump for each configured
database directly to Borg, so it's included in the backup without consuming
additional disk space. (The one exception is PostgreSQL's "directory" dump
format, which can't stream and therefore does consume temporary disk space.)
additional disk space. (The exceptions are the PostgreSQL/MongoDB "directory"
dump formats, which can't stream and therefore do consume temporary disk
space.)
To support this, borgmatic creates temporary named pipes in `~/.borgmatic` by
default. To customize this path, set the `borgmatic_source_directory` option
@ -59,6 +63,14 @@ hooks:
username: root
password: trustsome1
options: "--skip-comments"
mongodb_databases:
- name: messages
hostname: database3.example.org
port: 27018
username: dbuser
password: trustsome1
authentication_database: mongousers
options: "--ssl"
```
If you want to dump all databases on a host, use `all` for the database name:
@ -69,13 +81,15 @@ hooks:
- name: all
mysql_databases:
- name: all
mongodb_databases:
- name: all
```
Note that you may need to use a `username` of the `postgres` superuser for
this to work with PostgreSQL.
If you would like to backup databases only and not source directories, you can
specify an empty `source_directories` value because it is a mandatory field:
specify an empty `source_directories` value (as it is a mandatory field):
```yaml
location:
@ -97,7 +111,7 @@ bring back any missing configuration files in order to restore a database.
## Supported databases
As of now, borgmatic supports PostgreSQL and MySQL/MariaDB databases
As of now, borgmatic supports PostgreSQL, MySQL/MariaDB, and MongoDB databases
directly. But see below about general-purpose preparation and cleanup hooks as
a work-around with other database systems. Also, please [file a
ticket](https://torsion.org/borgmatic/#issues) for additional database systems
@ -196,8 +210,8 @@ that may not be exhaustive.
If you prefer to restore a database without the help of borgmatic, first
[extract](https://torsion.org/borgmatic/docs/how-to/extract-a-backup/) an
archive containing a database dump, and then manually restore the dump file
found within the extracted `~/.borgmatic/` path (e.g. with `pg_restore` or
`mysql` commands).
found within the extracted `~/.borgmatic/` path (e.g. with `pg_restore`,
`mysql`, or `mongorestore`, commands).
## Preparation and cleanup hooks

View File

@ -22,7 +22,6 @@ location:
repositories:
- 1234@usw-s001.rsync.net:backups.borg
- k8pDxu32@k8pDxu32.repo.borgbase.com:repo
- user1@scp2.cdn.lima-labs.com:repo
- /var/lib/backups/local.borg
```
@ -35,8 +34,7 @@ Here's a way of visualizing what borgmatic does with the above configuration:
1. Backup `/home` and `/etc` to `1234@usw-s001.rsync.net:backups.borg`
2. Backup `/home` and `/etc` to `k8pDxu32@k8pDxu32.repo.borgbase.com:repo`
3. Backup `/home` and `/etc` to `user1@scp2.cdn.lima-labs.com:repo`
4. Backup `/home` and `/etc` to `/var/lib/backups/local.borg`
3. Backup `/home` and `/etc` to `/var/lib/backups/local.borg`
This gives you redundancy of your data across repositories and even
potentially across providers.

View File

@ -101,7 +101,6 @@ referral links, but without any tracking scripts or cookies.)
<ul>
<li class="referral"><a href="https://www.borgbase.com/?utm_source=borgmatic">BorgBase</a>: Borg hosting service with support for monitoring, 2FA, and append-only repos</li>
<li class="referral"><a href="https://storage.lima-labs.com/special-pricing-offer-for-borgmatic-users/">Lima-Labs</a>: Affordable, reliable cloud data storage accessable via SSH/SCP/FTP for Borg backups or any other bulk storage needs</li>
</ul>
Additionally, [rsync.net](https://www.rsync.net/products/borg.html) and

BIN
docs/static/mongodb.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.3 KiB

View File

@ -10,7 +10,7 @@
set -e
apk add --no-cache python3 py3-pip borgbackup postgresql-client mariadb-client
apk add --no-cache python3 py3-pip borgbackup postgresql-client mariadb-client mongodb-tools
# If certain dependencies of black are available in this version of Alpine, install them.
apk add --no-cache py3-typed-ast py3-regex || true
python3 -m pip install --upgrade pip==21.3.1 setuptools==58.2.0

View File

@ -14,10 +14,10 @@ py==1.10.0
pycodestyle==2.6.0
pyflakes==2.2.0
jsonschema==3.2.0
pytest==6.1.2
pytest-cov==2.10.1
pytest==6.2.5
pytest-cov==3.0.0
regex; python_version >= '3.8'
requests==2.25.0
ruamel.yaml>0.15.0,<0.18.0
toml==0.10.2; python_version >= '3.8'
typed-ast==1.4.2; python_version >= '3.8'
typed-ast; python_version >= '3.8'

View File

@ -10,6 +10,11 @@ services:
environment:
MYSQL_ROOT_PASSWORD: test
MYSQL_DATABASE: test
mongodb:
image: mongo:5.0.5
environment:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: test
tests:
image: alpine:3.13
volumes:

View File

@ -47,13 +47,22 @@ hooks:
hostname: mysql
username: root
password: test
mongodb_databases:
- name: test
hostname: mongodb
username: root
password: test
authentication_database: admin
- name: all
hostname: mongodb
username: root
password: test
'''.format(
config_path, repository_path, borgmatic_source_directory, postgresql_dump_format
)
config_file = open(config_path, 'w')
config_file.write(config)
config_file.close()
with open(config_path, 'w') as config_file:
config_file.write(config)
def test_database_dump_and_restore():
@ -69,15 +78,15 @@ def test_database_dump_and_restore():
write_configuration(config_path, repository_path, borgmatic_source_directory)
subprocess.check_call(
'borgmatic -v 2 --config {} init --encryption repokey'.format(config_path).split(' ')
['borgmatic', '-v', '2', '--config', config_path, 'init', '--encryption', 'repokey']
)
# Run borgmatic to generate a backup archive including a database dump.
subprocess.check_call('borgmatic create --config {} -v 2'.format(config_path).split(' '))
subprocess.check_call(['borgmatic', 'create', '--config', config_path, '-v', '2'])
# Get the created archive name.
output = subprocess.check_output(
'borgmatic --config {} list --json'.format(config_path).split(' ')
['borgmatic', '--config', config_path, 'list', '--json']
).decode(sys.stdout.encoding)
parsed_output = json.loads(output)
@ -87,9 +96,7 @@ def test_database_dump_and_restore():
# Restore the database from the archive.
subprocess.check_call(
'borgmatic --config {} restore --archive {}'.format(config_path, archive_name).split(
' '
)
['borgmatic', '--config', config_path, 'restore', '--archive', archive_name]
)
finally:
os.chdir(original_working_directory)
@ -114,15 +121,15 @@ def test_database_dump_and_restore_with_directory_format():
)
subprocess.check_call(
'borgmatic -v 2 --config {} init --encryption repokey'.format(config_path).split(' ')
['borgmatic', '-v', '2', '--config', config_path, 'init', '--encryption', 'repokey']
)
# Run borgmatic to generate a backup archive including a database dump.
subprocess.check_call('borgmatic create --config {} -v 2'.format(config_path).split(' '))
subprocess.check_call(['borgmatic', 'create', '--config', config_path, '-v', '2'])
# Restore the database from the archive.
subprocess.check_call(
'borgmatic --config {} restore --archive latest'.format(config_path).split(' ')
['borgmatic', '--config', config_path, 'restore', '--archive', 'latest']
)
finally:
os.chdir(original_working_directory)
@ -142,7 +149,7 @@ def test_database_dump_with_error_causes_borgmatic_to_exit():
write_configuration(config_path, repository_path, borgmatic_source_directory)
subprocess.check_call(
'borgmatic -v 2 --config {} init --encryption repokey'.format(config_path).split(' ')
['borgmatic', '-v', '2', '--config', config_path, 'init', '--encryption', 'repokey']
)
# Run borgmatic with a config override such that the database dump fails.

View File

@ -0,0 +1,308 @@
import logging
import pytest
from flexmock import flexmock
from borgmatic.hooks import mongodb as module
def test_dump_databases_runs_mongodump_for_each_database():
databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
for name, process in zip(('foo', 'bar'), processes):
flexmock(module).should_receive('execute_command').with_args(
['mongodump', '--archive', '--db', name, '>', 'databases/localhost/{}'.format(name)],
shell=True,
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes
def test_dump_databases_with_dry_run_skips_mongodump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == []
def test_dump_databases_runs_mongodump_with_hostname_and_port():
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/database.example.org/foo'
)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
[
'mongodump',
'--archive',
'--host',
'database.example.org',
'--port',
'5433',
'--db',
'foo',
'>',
'databases/database.example.org/foo',
],
shell=True,
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
def test_dump_databases_runs_mongodump_with_username_and_password():
databases = [
{
'name': 'foo',
'username': 'mongo',
'password': 'trustsome1',
'authentication_database': "admin",
}
]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
[
'mongodump',
'--archive',
'--username',
'mongo',
'--password',
'trustsome1',
'--authenticationDatabase',
'admin',
'--db',
'foo',
'>',
'databases/localhost/foo',
],
shell=True,
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
def test_dump_databases_runs_mongodump_with_directory_format():
databases = [{'name': 'foo', 'format': 'directory'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.dump).should_receive('create_parent_directory_for_dump')
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').with_args(
['mongodump', '--archive', 'databases/localhost/foo', '--db', 'foo'],
shell=True,
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
def test_dump_databases_runs_mongodump_with_options():
databases = [{'name': 'foo', 'options': '--stuff=such'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
['mongodump', '--archive', '--db', 'foo', '--stuff=such', '>', 'databases/localhost/foo'],
shell=True,
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
def test_dump_databases_runs_mongodumpall_for_all_databases():
databases = [{'name': 'all'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
['mongodump', '--archive', '>', 'databases/localhost/all'],
shell=True,
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
def test_restore_database_dump_runs_pg_restore():
database_config = [{'name': 'foo'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
['mongorestore', '--archive', '--drop', '--db', 'foo'],
processes=[extract_process],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout,
borg_local_path='borg',
).once()
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process
)
def test_restore_database_dump_errors_on_multiple_database_config():
database_config = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').never()
flexmock(module).should_receive('execute_command').never()
with pytest.raises(ValueError):
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=False, extract_process=flexmock()
)
def test_restore_database_dump_runs_pg_restore_with_hostname_and_port():
database_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
[
'mongorestore',
'--archive',
'--drop',
'--db',
'foo',
'--host',
'database.example.org',
'--port',
'5433',
],
processes=[extract_process],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout,
borg_local_path='borg',
).once()
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process
)
def test_restore_database_dump_runs_pg_restore_with_username_and_password():
database_config = [
{
'name': 'foo',
'username': 'mongo',
'password': 'trustsome1',
'authentication_database': 'admin',
}
]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
[
'mongorestore',
'--archive',
'--drop',
'--db',
'foo',
'--username',
'mongo',
'--password',
'trustsome1',
'--authenticationDatabase',
'admin',
],
processes=[extract_process],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout,
borg_local_path='borg',
).once()
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process
)
def test_restore_database_dump_runs_psql_for_all_database_dump():
database_config = [{'name': 'all'}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
['mongorestore', '--archive'],
processes=[extract_process],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout,
borg_local_path='borg',
).once()
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process
)
def test_restore_database_dump_with_dry_run_skips_restore():
database_config = [{'name': 'foo'}]
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').never()
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=True, extract_process=flexmock()
)
def test_restore_database_dump_without_extract_process_restores_from_disk():
database_config = [{'name': 'foo'}]
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path')
flexmock(module).should_receive('execute_command_with_processes').with_args(
['mongorestore', '--archive', '/dump/path', '--drop', '--db', 'foo'],
processes=[],
output_log_level=logging.DEBUG,
input_file=None,
borg_local_path='borg',
).once()
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=False, extract_process=None
)

View File

@ -1,5 +1,5 @@
[tox]
envlist = py36,py37,py38,py39
envlist = py37,py38,py39,py310
skip_missing_interpreters = True
skipsdist = True
minversion = 3.14.1