Compare commits

...

21 Commits

Author SHA1 Message Date
Dan Helfman dbf8301c19 Add "checkpoint_volume" configuration option to creates checkpoints every specified number of bytes. 2023-02-27 10:47:17 -08:00
Dan Helfman 2a306bef12 Fix tests. 2023-02-26 23:34:17 -08:00
Dan Helfman 2a36a2a312 Add "--repository" flag to the "rcreate" action. Add "--progress" flag to the "transfer" action. 2023-02-26 23:22:23 -08:00
Dan Helfman d7a07f0428 Support status character changes in Borg 2.0.0b5 when filtering out special files that cause Borg to hang. 2023-02-26 22:36:13 -08:00
Dan Helfman da321e180d Fix the "create" action with the "--dry-run" flag querying for databases when a PostgreSQL/MySQL "all" database is configured. 2023-02-26 22:15:12 -08:00
Dan Helfman c6582e1171 Internally support new Borg 2.0.0b5 "--filter" status characters / item flags for the "create" action. 2023-02-26 17:17:25 -08:00
Dan Helfman 9b83afe491 With the "create" action, only one of "--list" ("--files") and "--progress" flags can be used. 2023-02-26 17:05:56 -08:00
Dan Helfman 2814ac3642 Update Borg 2.0 documentation links. 2023-02-26 16:44:43 -08:00
Dan Helfman 8a9d5d93f5 Add ntfy authentication to NEWS. 2023-02-25 14:23:42 -08:00
Dan Helfman 783a6d3b45 Add authentication to the ntfy hook (#621).
Reviewed-on: borgmatic-collective/borgmatic#644
2023-02-25 22:04:37 +00:00
Tom Hubrecht 95575c3450 Add auth test for the ntfy hook 2023-02-25 20:04:39 +01:00
Tom Hubrecht 9b071ff92f Make the auth logic more explicit and warnings if necessary 2023-02-25 20:04:39 +01:00
Tom Hubrecht d80e716822 Add authentication to the ntfy hook 2023-02-24 17:35:53 +01:00
Dan Helfman 418ebc8843 Add MySQL database hook "add_drop_database" configuration option to control whether dumped MySQL databases get dropped right before restore (#642). 2023-02-20 15:32:47 -08:00
Dan Helfman f5a448c7c2 Fix for potential data loss (data not getting backed up) when dumping large "directory" format PostgreSQL/MongoDB databases (#643). 2023-02-20 15:18:51 -08:00
Dan Helfman 37ac542b31 Merge pull request 'setup: Add link to MacPorts package' (#641) from neverpanic/borgmatic:cal-docs-macports-port into master
Reviewed-on: borgmatic-collective/borgmatic#641
2023-02-15 17:31:03 +00:00
Clemens Lang 8c7d7e3e41 setup: Add link to MacPorts package 2023-02-15 10:47:59 +01:00
Dan Helfman b811f125b2 Clarify "checks" configuration documentation for older versions of borgmatic (#639). 2023-02-12 21:42:43 -08:00
Dan Helfman 061f3e7917 Remove related documentation links. 2023-01-26 16:12:01 -08:00
Dan Helfman 6055918907 Upgrade documentation image dependencies. 2023-01-26 16:11:41 -08:00
Dan Helfman 4a90e090ad Clarify NEWS on database "all" dump feature applying to MySQL as well. 2023-01-26 15:28:17 -08:00
27 changed files with 484 additions and 107 deletions

29
NEWS
View File

@ -1,6 +1,31 @@
1.7.8.dev0
* #621: Add optional authentication to the ntfy monitoring hook.
* With the "create" action, only one of "--list" ("--files") and "--progress" flags can be used.
This lines up with the new behavior in Borg 2.0.0b5.
* Internally support new Borg 2.0.0b5 "--filter" status characters / item flags for the "create"
action.
* Fix the "create" action with the "--dry-run" flag querying for databases when a PostgreSQL/MySQL
"all" database is configured. Now, these queries are skipped due to the dry run.
* Add "--repository" flag to the "rcreate" action to optionally select one configured repository to
create.
* Add "--progress" flag to the "transfer" action, new in Borg 2.0.0b5.
* Add "checkpoint_volume" configuration option to creates checkpoints every specified number of
bytes during a long-running backup, new in Borg 2.0.0b5.
1.7.7
* #642: Add MySQL database hook "add_drop_database" configuration option to control whether dumped
MySQL databases get dropped right before restore.
* #643: Fix for potential data loss (data not getting backed up) when dumping large "directory"
format PostgreSQL/MongoDB databases. Prior to the fix, these dumps would not finish writing to
disk before Borg consumed them. Now, the dumping process completes before Borg starts. This only
applies to "directory" format databases; other formats still stream to Borg without using
temporary disk space.
* Fix MongoDB "directory" format to work with mongodump/mongorestore without error. Prior to this
fix, only the "archive" format worked.
1.7.6
* #393, #438, #560: Optionally dump "all" PostgreSQL databases to separate files instead of one
combined dump file, allowing more convenient restores of individual databases. You can enable
* #393, #438, #560: Optionally dump "all" PostgreSQL/MySQL databases to separate files instead of
one combined dump file, allowing more convenient restores of individual databases. You can enable
this by specifying the database dump "format" option when the database is named "all".
* #602: Fix logs that interfere with JSON output by making warnings go to stderr instead of stdout.
* #622: Fix traceback when include merging configuration files on ARM64.

View File

@ -1,6 +1,7 @@
import logging
import borgmatic.borg.rcreate
import borgmatic.config.validate
logger = logging.getLogger(__name__)
@ -17,6 +18,11 @@ def run_rcreate(
'''
Run the "rcreate" action for the given repository.
'''
if rcreate_arguments.repository and not borgmatic.config.validate.repositories_match(
repository, rcreate_arguments.repository
):
return
logger.info('{}: Creating repository'.format(repository))
borgmatic.borg.rcreate.create_repository(
global_arguments.dry_run,

View File

@ -275,7 +275,7 @@ def collect_special_file_paths(
paths = tuple(
path_line.split(' ', 1)[1]
for path_line in paths_output.split('\n')
if path_line and path_line.startswith('- ')
if path_line and path_line.startswith('- ') or path_line.startswith('+ ')
)
return tuple(
@ -337,6 +337,7 @@ def create_archive(
expand_home_directories(location_config.get('exclude_patterns'))
)
checkpoint_interval = storage_config.get('checkpoint_interval', None)
checkpoint_volume = storage_config.get('checkpoint_volume', None)
chunker_params = storage_config.get('chunker_params', None)
compression = storage_config.get('compression', None)
upload_rate_limit = storage_config.get('upload_rate_limit', None)
@ -381,6 +382,7 @@ def create_archive(
+ make_pattern_flags(location_config, pattern_file.name if pattern_file else None)
+ make_exclude_flags(location_config, exclude_file.name if exclude_file else None)
+ (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ())
+ (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ())
+ (('--chunker-params', chunker_params) if chunker_params else ())
+ (('--compression', compression) if compression else ())
+ upload_ratelimit_flags
@ -399,7 +401,7 @@ def create_archive(
+ (('--remote-path', remote_path) if remote_path else ())
+ (('--umask', str(umask)) if umask else ())
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
+ (('--list', '--filter', 'AMEx-') if list_files and not json and not progress else ())
+ (('--list', '--filter', 'AMEx+-') if list_files and not json and not progress else ())
+ (('--dry-run',) if dry_run else ())
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ flags.make_repository_archive_flags(repository, archive_name_format, local_borg_version)

View File

@ -2,7 +2,7 @@ import logging
import borgmatic.logger
from borgmatic.borg import environment, flags
from borgmatic.execute import execute_command
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
logger = logging.getLogger(__name__)
@ -28,6 +28,7 @@ def transfer_archives(
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ flags.make_flags('remote-path', remote_path)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait', None))
+ (('--progress',) if transfer_arguments.progress else ())
+ (
flags.make_flags(
'match-archives', transfer_arguments.match_archives or transfer_arguments.archive
@ -45,6 +46,7 @@ def transfer_archives(
return execute_command(
full_command,
output_log_level=logging.ANSWER,
output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None,
borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config),
)

View File

@ -247,6 +247,10 @@ def make_parsers():
metavar='KEY_REPOSITORY',
help='Path to an existing Borg repository whose key material should be reused (Borg 2.x+ only)',
)
rcreate_group.add_argument(
'--repository',
help='Path of the new repository to create (must be already specified in a borgmatic configuration file), defaults to the configured repository if there is only one',
)
rcreate_group.add_argument(
'--copy-crypt-key',
action='store_true',
@ -292,6 +296,12 @@ def make_parsers():
'--upgrader',
help='Upgrader type used to convert the transfered data, e.g. "From12To20" to upgrade data from Borg 1.2 to 2.0 format, defaults to no conversion',
)
transfer_group.add_argument(
'--progress',
default=False,
action='store_true',
help='Display progress as each archive is transferred',
)
transfer_group.add_argument(
'-a',
'--match-archives',
@ -833,6 +843,11 @@ def parse_arguments(*unparsed_arguments):
'The --excludes flag has been replaced with exclude_patterns in configuration.'
)
if 'create' in arguments and arguments['create'].list_files and arguments['create'].progress:
raise ValueError(
'With the create action, only one of --list (--files) and --progress flags can be used.'
)
if (
('list' in arguments and 'rinfo' in arguments and arguments['list'].json)
or ('list' in arguments and 'info' in arguments and arguments['list'].json)

View File

@ -240,6 +240,16 @@ properties:
for details. Defaults to checkpoints every 1800 seconds (30
minutes).
example: 1800
checkpoint_volume:
type: integer
description: |
Number of backed up bytes between each checkpoint during a
long-running backup. Only supported with Borg 2+. See
https://borgbackup.readthedocs.io/en/stable/faq.html
for details. Defaults to only time-based checkpointing (see
"checkpoint_interval") instead of volume-based
checkpointing.
example: 1048576
chunker_params:
type: string
description: |
@ -892,6 +902,13 @@ properties:
file of that format, allowing more convenient
restores of individual databases.
example: directory
add_drop_database:
type: boolean
description: |
Use the "--add-drop-database" flag with
mysqldump, causing the database to be dropped
right before restore. Defaults to true.
example: false
options:
type: string
description: |
@ -1022,6 +1039,16 @@ properties:
description: |
The address of your self-hosted ntfy.sh instance.
example: https://ntfy.your-domain.com
username:
type: string
description: |
The username used for authentication.
example: testuser
password:
type: string
description: |
The password used for authentication.
example: fakepassword
start:
type: object
properties:

View File

@ -45,13 +45,14 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
if dry_run:
continue
command = build_dump_command(database, dump_filename, dump_format)
if dump_format == 'directory':
dump.create_parent_directory_for_dump(dump_filename)
execute_command(command, shell=True)
else:
dump.create_named_pipe_for_dump(dump_filename)
command = build_dump_command(database, dump_filename, dump_format)
processes.append(execute_command(command, shell=True, run_to_completion=False))
processes.append(execute_command(command, shell=True, run_to_completion=False))
return processes
@ -61,9 +62,9 @@ def build_dump_command(database, dump_filename, dump_format):
Return the mongodump command from a single database configuration.
'''
all_databases = database['name'] == 'all'
command = ['mongodump', '--archive']
command = ['mongodump']
if dump_format == 'directory':
command.append(dump_filename)
command.extend(('--out', dump_filename))
if 'hostname' in database:
command.extend(('--host', database['hostname']))
if 'port' in database:
@ -79,7 +80,7 @@ def build_dump_command(database, dump_filename, dump_format):
if 'options' in database:
command.extend(database['options'].split(' '))
if dump_format != 'directory':
command.extend(('>', dump_filename))
command.extend(('--archive', '>', dump_filename))
return command
@ -145,9 +146,11 @@ def build_restore_command(extract_process, database, dump_filename):
'''
Return the mongorestore command from a single database configuration.
'''
command = ['mongorestore', '--archive']
if not extract_process:
command.append(dump_filename)
command = ['mongorestore']
if extract_process:
command.append('--archive')
else:
command.extend(('--dir', dump_filename))
if database['name'] != 'all':
command.extend(('--drop', '--db', database['name']))
if 'hostname' in database:

View File

@ -24,7 +24,7 @@ def make_dump_path(location_config): # pragma: no cover
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
def database_names_to_dump(database, extra_environment, log_prefix, dry_run_label):
def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
'''
Given a requested database config, return the corresponding sequence of database names to dump.
In the case of "all", query for the names of databases on the configured host and return them,
@ -32,6 +32,8 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run_labe
'''
if database['name'] != 'all':
return (database['name'],)
if dry_run:
return ()
show_command = (
('mysql',)
@ -43,9 +45,7 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run_labe
+ ('--skip-column-names', '--batch')
+ ('--execute', 'show schemas')
)
logger.debug(
'{}: Querying for "all" MySQL databases to dump{}'.format(log_prefix, dry_run_label)
)
logger.debug(f'{log_prefix}: Querying for "all" MySQL databases to dump')
show_output = execute_command_and_capture_output(
show_command, extra_environment=extra_environment
)
@ -81,7 +81,7 @@ def execute_dump_command(
dump_command = (
('mysqldump',)
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
+ ('--add-drop-database',)
+ (('--add-drop-database',) if database.get('add_drop_database', True) else ())
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
@ -125,9 +125,13 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
dump_path = make_dump_path(location_config)
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run_label
database, extra_environment, log_prefix, dry_run
)
if not dump_database_names:
if dry_run:
continue
raise ValueError('Cannot find any MySQL databases to dump.')
if database['name'] == 'all' and database.get('format'):

View File

@ -56,14 +56,30 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
'X-Tags': state_config.get('tags'),
}
username = hook_config.get('username')
password = hook_config.get('password')
auth = None
if (username and password) is not None:
auth = requests.auth.HTTPBasicAuth(username, password)
logger.info(f'{config_filename}: Using basic auth with user {username} for ntfy')
elif username is not None:
logger.warning(
f'{config_filename}: Password missing for ntfy authentication, defaulting to no auth'
)
elif password is not None:
logger.warning(
f'{config_filename}: Username missing for ntfy authentication, defaulting to no auth'
)
if not dry_run:
logging.getLogger('urllib3').setLevel(logging.ERROR)
try:
response = requests.post(f'{base_url}/{topic}', headers=headers)
response = requests.post(f'{base_url}/{topic}', headers=headers, auth=auth)
if not response.ok:
response.raise_for_status()
except requests.exceptions.RequestException as error:
logger.warning(f'{config_filename}: Ntfy error: {error}')
logger.warning(f'{config_filename}: ntfy error: {error}')
def destroy_monitor(

View File

@ -43,7 +43,7 @@ def make_extra_environment(database):
EXCLUDED_DATABASE_NAMES = ('template0', 'template1')
def database_names_to_dump(database, extra_environment, log_prefix, dry_run_label):
def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
'''
Given a requested database config, return the corresponding sequence of database names to dump.
In the case of "all" when a database format is given, query for the names of databases on the
@ -56,6 +56,8 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run_labe
return (requested_name,)
if not database.get('format'):
return ('all',)
if dry_run:
return ()
list_command = (
('psql', '--list', '--no-password', '--csv', '--tuples-only')
@ -64,9 +66,7 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run_labe
+ (('--username', database['username']) if 'username' in database else ())
+ (tuple(database['list_options'].split(' ')) if 'list_options' in database else ())
)
logger.debug(
'{}: Querying for "all" PostgreSQL databases to dump{}'.format(log_prefix, dry_run_label)
)
logger.debug(f'{log_prefix}: Querying for "all" PostgreSQL databases to dump')
list_output = execute_command_and_capture_output(
list_command, extra_environment=extra_environment
)
@ -99,10 +99,13 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
extra_environment = make_extra_environment(database)
dump_path = make_dump_path(location_config)
dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run_label
database, extra_environment, log_prefix, dry_run
)
if not dump_database_names:
if dry_run:
continue
raise ValueError('Cannot find any PostgreSQL databases to dump.')
for database_name in dump_database_names:
@ -141,17 +144,19 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
if dump_format == 'directory':
dump.create_parent_directory_for_dump(dump_filename)
execute_command(
command, shell=True, extra_environment=extra_environment,
)
else:
dump.create_named_pipe_for_dump(dump_filename)
processes.append(
execute_command(
command,
shell=True,
extra_environment=extra_environment,
run_to_completion=False,
processes.append(
execute_command(
command,
shell=True,
extra_environment=extra_environment,
run_to_completion=False,
)
)
)
return processes

View File

@ -1,4 +1,4 @@
FROM alpine:3.16.0 as borgmatic
FROM alpine:3.17.1 as borgmatic
COPY . /app
RUN apk add --no-cache py3-pip py3-ruamel.yaml py3-ruamel.yaml.clib
@ -8,7 +8,7 @@ RUN borgmatic --help > /command-line.txt \
echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \
&& borgmatic "$action" --help >> /command-line.txt; done
FROM node:18.4.0-alpine as html
FROM node:19.5.0-alpine as html
ARG ENVIRONMENT=production
@ -27,7 +27,7 @@ COPY . /source
RUN NODE_ENV=${ENVIRONMENT} npx eleventy --input=/source/docs --output=/output/docs \
&& mv /output/docs/index.html /output/index.html
FROM nginx:1.22.0-alpine
FROM nginx:1.22.1-alpine
COPY --from=html /output /usr/share/nginx/html
COPY --from=borgmatic /etc/borgmatic/config.yaml /usr/share/nginx/html/docs/reference/config.yaml

View File

@ -240,7 +240,7 @@ To restore all databases:
borgmatic restore --archive host-2023-... --database all
```
Or just omit the `--database` flag entirely:
Or omit the `--database` flag entirely:
```bash

View File

@ -60,7 +60,15 @@ consistency:
- name: repository
```
(Prior to borgmatic 1.6.2, `checks` was a plain list of strings without the `name:` part.)
<span class="minilink minilink-addedin">Prior to version 1.6.2</span> `checks`
was a plain list of strings without the `name:` part. For example:
```yaml
consistency:
checks:
- repository
```
Here are the available checks from fastest to slowest:
@ -125,7 +133,16 @@ consistency:
- name: disabled
```
Or, if you have multiple repositories in your borgmatic configuration file,
<span class="minilink minilink-addedin">Prior to version 1.6.2</span> `checks`
was a plain list of strings without the `name:` part. For instance:
```yaml
consistency:
checks:
- disabled
```
If you have multiple repositories in your borgmatic configuration file,
you can keep running consistency checks, but only against a subset of the
repositories:

View File

@ -93,6 +93,7 @@ installing borgmatic:
* [OpenBSD](http://ports.su/sysutils/borgmatic)
* [openSUSE](https://software.opensuse.org/package/borgmatic)
* [macOS (via Homebrew)](https://formulae.brew.sh/formula/borgmatic)
* [macOS (via MacPorts)](https://ports.macports.org/port/borgmatic/)
* [Ansible role](https://github.com/borgbase/ansible-role-borgbackup)
* [virtualenv](https://virtualenv.pypa.io/en/stable/)

View File

@ -169,12 +169,21 @@ The `--source-repository` flag is necessary to reuse key material from your
Borg 1 repository so that the subsequent data transfer can work.
The `--encryption` value above selects the same chunk ID algorithm (`blake2`)
used in Borg 1, thereby making deduplication work across transferred archives
and new archives. Note that `repokey-blake2-chacha20-poly1305` may be faster
than `repokey-blake2-aes-ocb` on certain platforms like ARM64. Read about
[Borg encryption
modes](https://borgbackup.readthedocs.io/en/2.0.0b4/usage/rcreate.html#encryption-mode-tldr)
for the menu of available encryption modes.
commonly used in Borg 1, thereby making deduplication work across transferred
archives and new archives.
If you get an error about "You must keep the same ID hash" from Borg, that
means the encryption value you specified doesn't correspond to your source
repository's chunk ID algorithm. In that case, try not using `blake2`:
```bash
borgmatic rcreate --verbosity 1 --encryption repokey-aes-ocb \
--source-repository original.borg --repository upgraded.borg
```
Read about [Borg encryption
modes](https://borgbackup.readthedocs.io/en/2.0.0b5/usage/rcreate.html#encryption-mode-tldr)
for more details.
To transfer data from your original Borg 1 repository to your newly created
Borg 2 repository:
@ -196,7 +205,7 @@ confirmation of success—or tells you if something hasn't been transferred yet.
Note that by omitting the `--upgrader` flag, you can also do archive transfers
between related Borg 2 repositories without upgrading, even down to individual
archives. For more on that functionality, see the [Borg transfer
documentation](https://borgbackup.readthedocs.io/en/2.0.0b4/usage/transfer.html).
documentation](https://borgbackup.readthedocs.io/en/2.0.0b5/usage/transfer.html).
That's it! Now you can use your new Borg 2 repository as normal with
borgmatic. If you've got multiple repositories, repeat the above process for

View File

@ -13,9 +13,3 @@ each action sub-command:
```
{% include borgmatic/command-line.txt %}
```
## Related documentation
* [Set up backups with borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/)
* [borgmatic configuration reference](https://torsion.org/borgmatic/docs/reference/configuration/)

View File

@ -15,9 +15,3 @@ Here is a full sample borgmatic configuration file including all available optio
Note that you can also [download this configuration
file](https://torsion.org/borgmatic/docs/reference/config.yaml) for use locally.
## Related documentation
* [Set up backups with borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/)
* [borgmatic command-line reference](https://torsion.org/borgmatic/docs/reference/command-line/)

View File

@ -1,6 +1,6 @@
from setuptools import find_packages, setup
VERSION = '1.7.6'
VERSION = '1.7.8.dev0'
setup(

View File

@ -14,6 +14,7 @@ def write_configuration(
repository_path,
borgmatic_source_directory,
postgresql_dump_format='custom',
mongodb_dump_format='archive',
):
'''
Write out borgmatic configuration into a file at the config path. Set the options so as to work
@ -67,6 +68,7 @@ hooks:
username: root
password: test
authentication_database: admin
format: {mongodb_dump_format}
- name: all
hostname: mongodb
username: root
@ -136,6 +138,7 @@ def test_database_dump_and_restore_with_directory_format():
repository_path,
borgmatic_source_directory,
postgresql_dump_format='directory',
mongodb_dump_format='directory',
)
subprocess.check_call(

View File

@ -422,6 +422,13 @@ def test_parse_arguments_with_list_flag_but_no_relevant_action_raises_value_erro
module.parse_arguments('--list', 'rcreate')
def test_parse_arguments_disallows_list_with_progress_for_create_action():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
with pytest.raises(ValueError):
module.parse_arguments('create', '--list', '--progress')
def test_parse_arguments_allows_json_with_list_or_info():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])

View File

@ -5,10 +5,39 @@ from borgmatic.actions import rcreate as module
def test_run_rcreate_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.borg.rcreate).should_receive('create_repository')
arguments = flexmock(
encryption_mode=flexmock(),
source_repository=flexmock(),
repository=flexmock(),
copy_crypt_key=flexmock(),
append_only=flexmock(),
storage_quota=flexmock(),
make_parent_dirs=flexmock(),
)
module.run_rcreate(
repository='repo',
storage={},
local_borg_version=None,
rcreate_arguments=arguments,
global_arguments=flexmock(dry_run=False),
local_path=None,
remote_path=None,
)
def test_run_rcreate_bails_if_repository_does_not_match():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(
False
)
flexmock(module.borgmatic.borg.rcreate).should_receive('create_repository').never()
arguments = flexmock(
encryption_mode=flexmock(),
source_repository=flexmock(),
repository=flexmock(),
copy_crypt_key=flexmock(),
append_only=flexmock(),
storage_quota=flexmock(),

View File

@ -358,7 +358,7 @@ def test_any_parent_directories_treats_unrelated_paths_as_non_match():
def test_collect_special_file_paths_parses_special_files_from_borg_dry_run_file_list():
flexmock(module).should_receive('execute_command_and_capture_output').and_return(
'Processing files ...\n- /foo\n- /bar\n- /baz'
'Processing files ...\n- /foo\n+ /bar\n- /baz'
)
flexmock(module).should_receive('special_file').and_return(True)
flexmock(module).should_receive('any_parent_directories').and_return(False)
@ -374,7 +374,7 @@ def test_collect_special_file_paths_parses_special_files_from_borg_dry_run_file_
def test_collect_special_file_paths_excludes_requested_directories():
flexmock(module).should_receive('execute_command_and_capture_output').and_return(
'- /foo\n- /bar\n- /baz'
'+ /foo\n- /bar\n- /baz'
)
flexmock(module).should_receive('special_file').and_return(True)
flexmock(module).should_receive('any_parent_directories').and_return(False).and_return(
@ -392,7 +392,7 @@ def test_collect_special_file_paths_excludes_requested_directories():
def test_collect_special_file_paths_excludes_non_special_files():
flexmock(module).should_receive('execute_command_and_capture_output').and_return(
'- /foo\n- /bar\n- /baz'
'+ /foo\n+ /bar\n+ /baz'
)
flexmock(module).should_receive('special_file').and_return(True).and_return(False).and_return(
True
@ -874,6 +874,47 @@ def test_create_archive_with_checkpoint_interval_calls_borg_with_checkpoint_inte
)
def test_create_archive_with_checkpoint_volume_calls_borg_with_checkpoint_volume_parameters():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([])
flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar'))
flexmock(module).should_receive('map_directories_to_devices').and_return({})
flexmock(module).should_receive('expand_directories').and_return(())
flexmock(module).should_receive('pattern_root_directories').and_return([])
flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError)
flexmock(module).should_receive('expand_home_directories').and_return(())
flexmock(module).should_receive('write_pattern_file').and_return(None)
flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module).should_receive('ensure_files_readable')
flexmock(module).should_receive('make_pattern_flags').and_return(())
flexmock(module).should_receive('make_exclude_flags').and_return(())
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(
(f'repo::{DEFAULT_ARCHIVE_NAME}',)
)
flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', '--checkpoint-volume', '1024') + REPO_ARCHIVE_WITH_PATHS,
output_log_level=logging.INFO,
output_file=None,
borg_local_path='borg',
working_directory=None,
extra_environment=None,
)
module.create_archive(
dry_run=False,
repository='repo',
location_config={
'source_directories': ['foo', 'bar'],
'repositories': ['repo'],
'exclude_patterns': None,
},
storage_config={'checkpoint_volume': 1024},
local_borg_version='1.2.3',
)
def test_create_archive_with_chunker_params_calls_borg_with_chunker_params_parameters():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
@ -1612,7 +1653,7 @@ def test_create_archive_with_files_calls_borg_with_list_parameter_and_answer_out
)
flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args(
('borg', 'create', '--list', '--filter', 'AMEx-') + REPO_ARCHIVE_WITH_PATHS,
('borg', 'create', '--list', '--filter', 'AMEx+-') + REPO_ARCHIVE_WITH_PATHS,
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',

View File

@ -18,6 +18,7 @@ def test_transfer_archives_calls_borg_with_flags():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--repo', 'repo'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',
extra_environment=None,
)
@ -27,7 +28,9 @@ def test_transfer_archives_calls_borg_with_flags():
repository='repo',
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(archive=None, match_archives=None, source_repository=None),
transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None
),
)
@ -44,6 +47,7 @@ def test_transfer_archives_with_dry_run_calls_borg_with_dry_run_flag():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--repo', 'repo', '--dry-run'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',
extra_environment=None,
)
@ -53,7 +57,9 @@ def test_transfer_archives_with_dry_run_calls_borg_with_dry_run_flag():
repository='repo',
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(archive=None, match_archives=None, source_repository=None),
transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None
),
)
@ -67,6 +73,7 @@ def test_transfer_archives_with_log_info_calls_borg_with_info_flag():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--info', '--repo', 'repo'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',
extra_environment=None,
)
@ -76,7 +83,9 @@ def test_transfer_archives_with_log_info_calls_borg_with_info_flag():
repository='repo',
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(archive=None, match_archives=None, source_repository=None),
transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None
),
)
@ -90,6 +99,7 @@ def test_transfer_archives_with_log_debug_calls_borg_with_debug_flag():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--debug', '--show-rc', '--repo', 'repo'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',
extra_environment=None,
)
@ -100,7 +110,9 @@ def test_transfer_archives_with_log_debug_calls_borg_with_debug_flag():
repository='repo',
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(archive=None, match_archives=None, source_repository=None),
transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None
),
)
@ -117,6 +129,7 @@ def test_transfer_archives_with_archive_calls_borg_with_match_archives_flag():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--match-archives', 'archive', '--repo', 'repo'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',
extra_environment=None,
)
@ -126,7 +139,9 @@ def test_transfer_archives_with_archive_calls_borg_with_match_archives_flag():
repository='repo',
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(archive='archive', match_archives=None, source_repository=None),
transfer_arguments=flexmock(
archive='archive', progress=None, match_archives=None, source_repository=None
),
)
@ -143,6 +158,7 @@ def test_transfer_archives_with_match_archives_calls_borg_with_match_archives_fl
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--match-archives', 'sh:foo*', '--repo', 'repo'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',
extra_environment=None,
)
@ -152,7 +168,9 @@ def test_transfer_archives_with_match_archives_calls_borg_with_match_archives_fl
repository='repo',
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(archive=None, match_archives='sh:foo*', source_repository=None),
transfer_arguments=flexmock(
archive=None, progress=None, match_archives='sh:foo*', source_repository=None
),
)
@ -166,6 +184,7 @@ def test_transfer_archives_with_local_path_calls_borg_via_local_path():
flexmock(module).should_receive('execute_command').with_args(
('borg2', 'transfer', '--repo', 'repo'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg2',
extra_environment=None,
)
@ -175,7 +194,9 @@ def test_transfer_archives_with_local_path_calls_borg_via_local_path():
repository='repo',
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(archive=None, match_archives=None, source_repository=None),
transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None
),
local_path='borg2',
)
@ -193,6 +214,7 @@ def test_transfer_archives_with_remote_path_calls_borg_with_remote_path_flags():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--remote-path', 'borg2', '--repo', 'repo'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',
extra_environment=None,
)
@ -202,7 +224,9 @@ def test_transfer_archives_with_remote_path_calls_borg_with_remote_path_flags():
repository='repo',
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(archive=None, match_archives=None, source_repository=None),
transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None
),
remote_path='borg2',
)
@ -221,6 +245,7 @@ def test_transfer_archives_with_lock_wait_calls_borg_with_lock_wait_flags():
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--lock-wait', '5', '--repo', 'repo'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',
extra_environment=None,
)
@ -230,7 +255,35 @@ def test_transfer_archives_with_lock_wait_calls_borg_with_lock_wait_flags():
repository='repo',
storage_config=storage_config,
local_borg_version='2.3.4',
transfer_arguments=flexmock(archive=None, match_archives=None, source_repository=None),
transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None
),
)
def test_transfer_archives_with_progress_calls_borg_with_progress_flag():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module.flags).should_receive('make_flags').and_return(())
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--progress', '--repo', 'repo'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=module.DO_NOT_CAPTURE,
borg_local_path='borg',
extra_environment=None,
)
module.transfer_archives(
dry_run=False,
repository='repo',
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(
archive=None, progress=True, match_archives=None, source_repository=None
),
)
@ -248,6 +301,7 @@ def test_transfer_archives_passes_through_arguments_to_borg(argument_name):
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', flag_name, 'value', '--repo', 'repo'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',
extra_environment=None,
)
@ -258,7 +312,11 @@ def test_transfer_archives_passes_through_arguments_to_borg(argument_name):
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(
archive=None, match_archives=None, source_repository=None, **{argument_name: 'value'}
archive=None,
progress=None,
match_archives=None,
source_repository=None,
**{argument_name: 'value'},
),
)
@ -275,6 +333,7 @@ def test_transfer_archives_with_source_repository_calls_borg_with_other_repo_fla
flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--repo', 'repo', '--other-repo', 'other'),
output_log_level=module.borgmatic.logger.ANSWER,
output_file=None,
borg_local_path='borg',
extra_environment=None,
)
@ -284,5 +343,7 @@ def test_transfer_archives_with_source_repository_calls_borg_with_other_repo_fla
repository='repo',
storage_config={},
local_borg_version='2.3.4',
transfer_arguments=flexmock(archive=None, match_archives=None, source_repository='other'),
transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository='other'
),
)

View File

@ -17,7 +17,7 @@ def test_dump_databases_runs_mongodump_for_each_database():
for name, process in zip(('foo', 'bar'), processes):
flexmock(module).should_receive('execute_command').with_args(
['mongodump', '--archive', '--db', name, '>', 'databases/localhost/{}'.format(name)],
['mongodump', '--db', name, '--archive', '>', 'databases/localhost/{}'.format(name)],
shell=True,
run_to_completion=False,
).and_return(process).once()
@ -49,13 +49,13 @@ def test_dump_databases_runs_mongodump_with_hostname_and_port():
flexmock(module).should_receive('execute_command').with_args(
[
'mongodump',
'--archive',
'--host',
'database.example.org',
'--port',
'5433',
'--db',
'foo',
'--archive',
'>',
'databases/database.example.org/foo',
],
@ -85,7 +85,6 @@ def test_dump_databases_runs_mongodump_with_username_and_password():
flexmock(module).should_receive('execute_command').with_args(
[
'mongodump',
'--archive',
'--username',
'mongo',
'--password',
@ -94,6 +93,7 @@ def test_dump_databases_runs_mongodump_with_username_and_password():
'admin',
'--db',
'foo',
'--archive',
'>',
'databases/localhost/foo',
],
@ -106,7 +106,6 @@ def test_dump_databases_runs_mongodump_with_username_and_password():
def test_dump_databases_runs_mongodump_with_directory_format():
databases = [{'name': 'foo', 'format': 'directory'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
@ -115,12 +114,10 @@ def test_dump_databases_runs_mongodump_with_directory_format():
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').with_args(
['mongodump', '--archive', 'databases/localhost/foo', '--db', 'foo'],
shell=True,
run_to_completion=False,
).and_return(process).once()
['mongodump', '--out', 'databases/localhost/foo', '--db', 'foo'], shell=True,
).and_return(flexmock()).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == []
def test_dump_databases_runs_mongodump_with_options():
@ -133,7 +130,7 @@ def test_dump_databases_runs_mongodump_with_options():
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
['mongodump', '--archive', '--db', 'foo', '--stuff=such', '>', 'databases/localhost/foo'],
['mongodump', '--db', 'foo', '--stuff=such', '--archive', '>', 'databases/localhost/foo'],
shell=True,
run_to_completion=False,
).and_return(process).once()
@ -305,12 +302,12 @@ def test_restore_database_dump_with_dry_run_skips_restore():
def test_restore_database_dump_without_extract_process_restores_from_disk():
database_config = [{'name': 'foo'}]
database_config = [{'name': 'foo', 'format': 'directory'}]
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path')
flexmock(module).should_receive('execute_command_with_processes').with_args(
['mongorestore', '--archive', '/dump/path', '--drop', '--db', 'foo'],
['mongorestore', '--dir', '/dump/path', '--drop', '--db', 'foo'],
processes=[],
output_log_level=logging.DEBUG,
input_file=None,

View File

@ -9,26 +9,36 @@ from borgmatic.hooks import mysql as module
def test_database_names_to_dump_passes_through_name():
extra_environment = flexmock()
log_prefix = ''
dry_run_label = ''
names = module.database_names_to_dump(
{'name': 'foo'}, extra_environment, log_prefix, dry_run_label
{'name': 'foo'}, extra_environment, log_prefix, dry_run=False
)
assert names == ('foo',)
def test_database_names_to_dump_bails_for_dry_run():
extra_environment = flexmock()
log_prefix = ''
flexmock(module).should_receive('execute_command_and_capture_output').never()
names = module.database_names_to_dump(
{'name': 'all'}, extra_environment, log_prefix, dry_run=True
)
assert names == ()
def test_database_names_to_dump_queries_mysql_for_database_names():
extra_environment = flexmock()
log_prefix = ''
dry_run_label = ''
flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('mysql', '--skip-column-names', '--batch', '--execute', 'show schemas'),
extra_environment=extra_environment,
).and_return('foo\nbar\nmysql\n').once()
names = module.database_names_to_dump(
{'name': 'all'}, extra_environment, log_prefix, dry_run_label
{'name': 'all'}, extra_environment, log_prefix, dry_run=False
)
assert names == ('foo', 'bar')
@ -159,6 +169,33 @@ def test_execute_dump_command_runs_mysqldump():
)
def test_execute_dump_command_runs_mysqldump_without_add_drop_database():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
('mysqldump', '--databases', 'foo', '>', 'dump',),
shell=True,
extra_environment=None,
run_to_completion=False,
).and_return(process).once()
assert (
module.execute_dump_command(
database={'name': 'foo', 'add_drop_database': False},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
dry_run=False,
dry_run_label='',
)
== process
)
def test_execute_dump_command_runs_mysqldump_with_hostname_and_port():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
@ -296,7 +333,6 @@ def test_execute_dump_command_with_dry_run_skips_mysqldump():
def test_dump_databases_errors_for_missing_all_databases():
databases = [{'name': 'all'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/all'
@ -304,7 +340,18 @@ def test_dump_databases_errors_for_missing_all_databases():
flexmock(module).should_receive('database_names_to_dump').and_return(())
with pytest.raises(ValueError):
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
def test_dump_databases_does_not_error_for_missing_all_databases_with_dry_run():
databases = [{'name': 'all'}]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module).should_receive('database_names_to_dump').and_return(())
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == []
def test_restore_database_dump_runs_mysql_to_restore():

View File

@ -38,6 +38,7 @@ def test_ping_monitor_minimal_config_hits_hosted_ntfy_on_fail():
flexmock(module.requests).should_receive('post').with_args(
f'{default_base_url}/{topic}',
headers=return_default_message_headers(module.monitor.State.FAIL),
auth=None,
).and_return(flexmock(ok=True)).once()
module.ping_monitor(
@ -45,6 +46,51 @@ def test_ping_monitor_minimal_config_hits_hosted_ntfy_on_fail():
)
def test_ping_monitor_with_auth_hits_hosted_ntfy_on_fail():
hook_config = {
'topic': topic,
'username': 'testuser',
'password': 'fakepassword',
}
flexmock(module.requests).should_receive('post').with_args(
f'{default_base_url}/{topic}',
headers=return_default_message_headers(module.monitor.State.FAIL),
auth=module.requests.auth.HTTPBasicAuth('testuser', 'fakepassword'),
).and_return(flexmock(ok=True)).once()
module.ping_monitor(
hook_config, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False
)
def test_ping_monitor_auth_with_no_username_warning():
hook_config = {'topic': topic, 'password': 'fakepassword'}
flexmock(module.requests).should_receive('post').with_args(
f'{default_base_url}/{topic}',
headers=return_default_message_headers(module.monitor.State.FAIL),
auth=None,
).and_return(flexmock(ok=True)).once()
flexmock(module.logger).should_receive('warning').once()
module.ping_monitor(
hook_config, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False
)
def test_ping_monitor_auth_with_no_password_warning():
hook_config = {'topic': topic, 'username': 'testuser'}
flexmock(module.requests).should_receive('post').with_args(
f'{default_base_url}/{topic}',
headers=return_default_message_headers(module.monitor.State.FAIL),
auth=None,
).and_return(flexmock(ok=True)).once()
flexmock(module.logger).should_receive('warning').once()
module.ping_monitor(
hook_config, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False
)
def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_start():
hook_config = {'topic': topic}
flexmock(module.requests).should_receive('post').never()
@ -76,6 +122,7 @@ def test_ping_monitor_minimal_config_hits_selfhosted_ntfy_on_fail():
flexmock(module.requests).should_receive('post').with_args(
f'{custom_base_url}/{topic}',
headers=return_default_message_headers(module.monitor.State.FAIL),
auth=None,
).and_return(flexmock(ok=True)).once()
module.ping_monitor(
@ -95,7 +142,7 @@ def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_fail_dry_run():
def test_ping_monitor_custom_message_hits_hosted_ntfy_on_fail():
hook_config = {'topic': topic, 'fail': custom_message_config}
flexmock(module.requests).should_receive('post').with_args(
f'{default_base_url}/{topic}', headers=custom_message_headers,
f'{default_base_url}/{topic}', headers=custom_message_headers, auth=None
).and_return(flexmock(ok=True)).once()
module.ping_monitor(
@ -108,6 +155,7 @@ def test_ping_monitor_custom_state_hits_hosted_ntfy_on_start():
flexmock(module.requests).should_receive('post').with_args(
f'{default_base_url}/{topic}',
headers=return_default_message_headers(module.monitor.State.START),
auth=None,
).and_return(flexmock(ok=True)).once()
module.ping_monitor(
@ -124,6 +172,7 @@ def test_ping_monitor_with_connection_error_logs_warning():
flexmock(module.requests).should_receive('post').with_args(
f'{default_base_url}/{topic}',
headers=return_default_message_headers(module.monitor.State.FAIL),
auth=None,
).and_raise(module.requests.exceptions.ConnectionError)
flexmock(module.logger).should_receive('warning').once()
@ -145,6 +194,7 @@ def test_ping_monitor_with_other_error_logs_warning():
flexmock(module.requests).should_receive('post').with_args(
f'{default_base_url}/{topic}',
headers=return_default_message_headers(module.monitor.State.FAIL),
auth=None,
).and_return(response)
flexmock(module.logger).should_receive('warning').once()

View File

@ -9,19 +9,32 @@ from borgmatic.hooks import postgresql as module
def test_database_names_to_dump_passes_through_individual_database_name():
database = {'name': 'foo'}
assert module.database_names_to_dump(database, flexmock(), flexmock(), flexmock()) == ('foo',)
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == (
'foo',
)
def test_database_names_to_dump_passes_through_individual_database_name_with_format():
database = {'name': 'foo', 'format': 'custom'}
assert module.database_names_to_dump(database, flexmock(), flexmock(), flexmock()) == ('foo',)
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == (
'foo',
)
def test_database_names_to_dump_passes_through_all_without_format():
database = {'name': 'all'}
assert module.database_names_to_dump(database, flexmock(), flexmock(), flexmock()) == ('all',)
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == (
'all',
)
def test_database_names_to_dump_with_all_and_format_and_dry_run_bails():
database = {'name': 'all', 'format': 'custom'}
flexmock(module).should_receive('execute_command_and_capture_output').never()
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=True) == ()
def test_database_names_to_dump_with_all_and_format_lists_databases():
@ -30,7 +43,7 @@ def test_database_names_to_dump_with_all_and_format_lists_databases():
'foo,test,\nbar,test,"stuff and such"'
)
assert module.database_names_to_dump(database, flexmock(), flexmock(), flexmock()) == (
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == (
'foo',
'bar',
)
@ -53,7 +66,7 @@ def test_database_names_to_dump_with_all_and_format_lists_databases_with_hostnam
extra_environment=object,
).and_return('foo,test,\nbar,test,"stuff and such"')
assert module.database_names_to_dump(database, flexmock(), flexmock(), flexmock()) == (
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == (
'foo',
'bar',
)
@ -66,7 +79,7 @@ def test_database_names_to_dump_with_all_and_format_lists_databases_with_usernam
extra_environment=object,
).and_return('foo,test,\nbar,test,"stuff and such"')
assert module.database_names_to_dump(database, flexmock(), flexmock(), flexmock()) == (
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == (
'foo',
'bar',
)
@ -79,7 +92,7 @@ def test_database_names_to_dump_with_all_and_format_lists_databases_with_options
extra_environment=object,
).and_return('foo,test,\nbar,test,"stuff and such"')
assert module.database_names_to_dump(database, flexmock(), flexmock(), flexmock()) == (
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == (
'foo',
'bar',
)
@ -91,7 +104,9 @@ def test_database_names_to_dump_with_all_and_format_excludes_particular_database
'foo,test,\ntemplate0,test,blah'
)
assert module.database_names_to_dump(database, flexmock(), flexmock(), flexmock()) == ('foo',)
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == (
'foo',
)
def test_dump_databases_runs_pg_dump_for_each_database():
@ -139,6 +154,15 @@ def test_dump_databases_raises_when_no_database_names_to_dump():
module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
def test_dump_databases_does_not_raise_when_no_database_names_to_dump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(())
module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == []
def test_dump_databases_with_duplicate_dump_skips_pg_dump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
@ -270,7 +294,6 @@ def test_make_extra_environment_maps_options_to_environment():
def test_dump_databases_runs_pg_dump_with_directory_format():
databases = [{'name': 'foo', 'format': 'directory'}]
process = flexmock()
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',))
@ -295,10 +318,9 @@ def test_dump_databases_runs_pg_dump_with_directory_format():
),
shell=True,
extra_environment={'PGSSLMODE': 'disable'},
run_to_completion=False,
).and_return(process).once()
).and_return(flexmock()).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == []
def test_dump_databases_runs_pg_dump_with_options():