Compare commits

...

64 Commits

Author SHA1 Message Date
77c3161c77 Fix canonical home link in README. 2023-04-28 08:36:03 -07:00
22b84a2fea Switch to Docker Compose for dev-docs script, so podman-docker is no longer needed for Podman users. 2023-04-22 10:07:40 -07:00
5962fd473e Another try. Backing out psql error changes (#678). 2023-04-21 10:34:50 -07:00
7e64f415ba Attempt to fix failing end-to-end database test that only fails in CI. 2023-04-21 10:03:29 -07:00
ae12ccd8e6 And fixing again... 2023-04-21 09:31:37 -07:00
3cefeaa229 Fix end-to-end test command-line syntax. 2023-04-21 09:30:08 -07:00
71b75800cd Get more verbose in the end-to-end test restore. 2023-04-20 23:32:57 -07:00
9ca31530a0 Add missing test for check_all_source_directories_exist() raising. 2023-04-20 23:15:22 -07:00
b555fcb956 Add "source_directories_must_exist" expansion fix to NEWS (#682). 2023-04-20 23:08:21 -07:00
5829196b70 Expand source directories when checking for existence (#682).
Reviewed-on: borgmatic-collective/borgmatic#683
2023-04-21 06:05:59 +00:00
a14870ce48 Expand source directories when checking for existence (#682). 2023-04-21 05:52:04 +00:00
ee5c25f3bd Add additional tests for PostgreSQL hook fixes (#678). 2023-04-20 21:44:42 -07:00
da0f5a34f2 Fix multiple bugs in PostgreSQL hook (#678).
Reviewed-on: borgmatic-collective/borgmatic#677
2023-04-21 04:05:22 +00:00
065be1d9d4 More inclusive language. 2023-04-20 14:28:04 -07:00
f2f6fb537a !!! 2023-04-20 14:19:34 -07:00
7ff994a964 🤦 2023-04-20 13:56:12 -07:00
08edecacae WTF?! 2023-04-20 13:55:37 -07:00
1e03046d9a *Seriously?* 2023-04-20 13:50:26 -07:00
c9bf52ee45 Sigh again. 2023-04-20 13:46:49 -07:00
f947525fca ? 2023-04-20 13:45:26 -07:00
7f7b89d79c Trying a different approach: Ditching Podman-in-Podman. 2023-04-20 12:03:51 -07:00
499e42df35 😭 2023-04-20 11:58:06 -07:00
4302a07c9b WTF. 2023-04-20 11:53:52 -07:00
1721c05d2e Yet more. 2023-04-20 11:52:23 -07:00
8a31c27078 To see what sticks. 2023-04-20 11:50:25 -07:00
d6e1cef356 Throwing stuff at the wall. 2023-04-20 11:49:43 -07:00
f82bf619ff More. 2023-04-20 11:41:35 -07:00
02eeca1fc2 Hmm. 2023-04-20 11:36:30 -07:00
4e78cf1b95 ಠ_ಠ 2023-04-20 11:33:15 -07:00
9e9a7c50e5 😊🔫 2023-04-20 11:30:30 -07:00
51bc53e5ca Whee. 2023-04-20 11:24:59 -07:00
b85538c54c Double sigh. 2023-04-20 11:11:49 -07:00
bb5028e484 Sigh. 2023-04-20 11:11:08 -07:00
53ee0fcfad Another attempt at Podman-in-Podman incantations. 2023-04-20 11:06:15 -07:00
5f8c79dd16 Attempt to get Podman-in-Podman builds working. 2023-04-20 10:50:44 -07:00
0a6f5452f4 Fix broken Podman image name. 2023-04-19 23:16:15 -07:00
269fac074b Attempt to use Podman-in-Podman for building docs instead of Docker-in-Podman. 2023-04-19 23:14:51 -07:00
3b21ce4ce8 Rename "master" development branch to "main" to use more inclusive language (#684). 2023-04-19 21:43:08 -07:00
8bb7631f50 Fix missing mock in unit test. 2023-04-19 21:22:51 -07:00
9f5769f87b Make docs/schema a little more container agnostic / less Docker specific. 2023-04-16 15:41:17 -07:00
991e08f16d Add Unraid borgmatic installation link to docs. 2023-04-15 09:13:13 -07:00
25506b8d2c Backing out upgrade of end-to-end test packages, because apparently we can't have nice things. 2023-04-14 23:47:51 -07:00
28e62d824b Upgrade end-to-end test packages. 2023-04-14 23:28:07 -07:00
7ee37a890e Fix broken end-to-end tests by no longer using an editable package there, a work-around for https://github.com/pypa/packaging-problems/issues/609 2023-04-14 23:22:07 -07:00
8cb5a42a9e Drop deprecated pkg_resources in favor of importlib.metadata and packaging. 2023-04-14 21:21:25 -07:00
5dbb71709c Upgrade test requirements and code style requirements. Auto-reformat code accordingly. 2023-04-14 19:35:24 -07:00
1c67db5d62 Add documentation for "borgmatic restore --schema" (#375). 2023-04-14 16:40:58 -07:00
96d4a8ee45 Add "borgmatic restore --schema" flag to NEWS (#375). 2023-04-14 16:33:06 -07:00
81e167959b
feat: restore specific schemas (#375).
Merge pull request #67 from diivi/feat/restore-specific-schemas
2023-04-14 16:26:25 -07:00
f273e82d74 add tests 2023-04-15 02:57:51 +05:30
17f122bfe5 Use psql instead of pg_restore when format is "plain"
pg_restore: error: input file appears to be a text format dump. Please use psql.
2023-04-14 17:38:19 +02:00
f0f43174c6 Swap if-else in restore_database_dump in postgresql hook for cleanliness 2023-04-14 17:38:19 +02:00
dfccc1b94a Exit on error when restoring all PostgreSQL databases
"--set ON_ERROR_STOP=on" is equivalent to "--exit-on-error" in
pg_restore.
2023-04-14 17:38:18 +02:00
195024e505 Fix psql_command and pg_restore_command to accept command with arguments
These commands are executed without `shell=True`, so the subprocess
module treats e.g. "docker exec my_pg_container psql" as a single command
(resulting in Errno 2 "No such file or directory") instead of a command
with arguments.
2023-04-14 17:37:38 +02:00
19a00371f5 Run "psql" with "--no-psqlrc"
Some settings in user's .psqlrc, e.g. "linestyle unicode", may break the
CSV output. "--no-psqlrc" tells psql to not read startup file.

This is not necessary for the analyze_command and restore_command (with
all_databases), but it's generally recommended when running psql from a
script.
2023-04-14 17:37:37 +02:00
874fba7672 Fix PostgreSQL hook not using "psql_command" for list when dumping "all" 2023-04-14 15:13:49 +02:00
50b0a9ce38 Remove newline at end of file. 2023-04-13 19:13:50 -07:00
8802f6888e Fix "TypeError: 'module' object is not callable" in test_commands.py' (#676).
Reviewed-on: borgmatic-collective/borgmatic#676
2023-04-14 02:12:58 +00:00
ebe5c5e839 Fix "TypeError: 'module' object is not callable" in test_commands.py 2023-04-14 01:01:31 +00:00
613f6c602c Bump version for release. 2023-04-13 15:12:19 -07:00
4a94c2c9bf Selectively omit list values when including configuration files (#672). 2023-04-13 14:39:36 -07:00
2fea429d78 collection restore for mongodb 2023-04-12 09:34:19 +05:30
264cebd2b1 complete psql multi schema backup 2023-04-11 23:19:49 +05:30
9bc2322f9a feat: restore specific schemas 2023-04-06 02:10:36 +05:30
73 changed files with 1454 additions and 315 deletions

View File

@ -1,19 +1,20 @@
---
kind: pipeline kind: pipeline
name: python-3-8-alpine-3-13 name: python-3-8-alpine-3-13
services: services:
- name: postgresql - name: postgresql
image: postgres:13.1-alpine image: docker.io/postgres:13.1-alpine
environment: environment:
POSTGRES_PASSWORD: test POSTGRES_PASSWORD: test
POSTGRES_DB: test POSTGRES_DB: test
- name: mysql - name: mysql
image: mariadb:10.5 image: docker.io/mariadb:10.5
environment: environment:
MYSQL_ROOT_PASSWORD: test MYSQL_ROOT_PASSWORD: test
MYSQL_DATABASE: test MYSQL_DATABASE: test
- name: mongodb - name: mongodb
image: mongo:5.0.5 image: docker.io/mongo:5.0.5
environment: environment:
MONGO_INITDB_ROOT_USERNAME: root MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: test MONGO_INITDB_ROOT_PASSWORD: test
@ -23,7 +24,7 @@ clone:
steps: steps:
- name: build - name: build
image: alpine:3.13 image: docker.io/alpine:3.13
environment: environment:
TEST_CONTAINER: true TEST_CONTAINER: true
pull: always pull: always
@ -32,27 +33,32 @@ steps:
--- ---
kind: pipeline kind: pipeline
name: documentation name: documentation
type: exec
platform:
os: linux
arch: amd64
clone: clone:
skip_verify: true skip_verify: true
steps: steps:
- name: build - name: build
image: plugins/docker environment:
settings: USERNAME:
username:
from_secret: docker_username from_secret: docker_username
password: PASSWORD:
from_secret: docker_password from_secret: docker_password
registry: projects.torsion.org IMAGE_NAME: projects.torsion.org/borgmatic-collective/borgmatic:docs
repo: projects.torsion.org/borgmatic-collective/borgmatic commands:
tags: docs - podman login --username "$USERNAME" --password "$PASSWORD" projects.torsion.org
dockerfile: docs/Dockerfile - podman build --tag "$IMAGE_NAME" --file docs/Dockerfile --storage-opt "overlay.mount_program=/usr/bin/fuse-overlayfs" .
- podman push "$IMAGE_NAME"
trigger: trigger:
repo: repo:
- borgmatic-collective/borgmatic - borgmatic-collective/borgmatic
branch: branch:
- master - main
event: event:
- push - push

20
NEWS
View File

@ -1,4 +1,17 @@
1.7.12.dev0 1.7.13.dev0
* #375: Restore particular PostgreSQL schemas from a database dump via "borgmatic restore --schema"
flag. See the documentation for more information:
https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#restore-particular-schemas
* #678: Fix error from PostgreSQL when dumping a database with a "format" of "plain".
* #678: Fix PostgreSQL hook to support "psql_command" and "pg_restore_command" options containing
commands with arguments.
* #678: Fix calls to psql in PostgreSQL hook to ignore "~/.psqlrc", whose settings can break
database dumping.
* #682: Fix "source_directories_must_exist" option to expand globs and tildes in source directories.
* #684: Rename "master" development branch to "main" to use more inclusive language. You'll need to
update your development checkouts accordingly.
1.7.12
* #413: Add "log_file" context to command hooks so your scripts can consume the borgmatic log file. * #413: Add "log_file" context to command hooks so your scripts can consume the borgmatic log file.
See the documentation for more information: See the documentation for more information:
https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/
@ -10,6 +23,9 @@
* #672: Selectively shallow merge certain mappings or sequences when including configuration files. * #672: Selectively shallow merge certain mappings or sequences when including configuration files.
See the documentation for more information: See the documentation for more information:
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#shallow-merge https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#shallow-merge
* #672: Selectively omit list values when including configuration files. See the documentation for
more information:
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#list-merge
* #673: View the results of configuration file merging via "validate-borgmatic-config --show" flag. * #673: View the results of configuration file merging via "validate-borgmatic-config --show" flag.
See the documentation for more information: See the documentation for more information:
https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#debugging-includes https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#debugging-includes
@ -359,7 +375,7 @@
* #398: Clarify canonical home of borgmatic in documentation. * #398: Clarify canonical home of borgmatic in documentation.
* #406: Clarify that spaces in path names should not be backslashed in path names. * #406: Clarify that spaces in path names should not be backslashed in path names.
* #423: Fix error handling to error loudly when Borg gets killed due to running out of memory! * #423: Fix error handling to error loudly when Borg gets killed due to running out of memory!
* Fix build so as not to attempt to build and push documentation for a non-master branch. * Fix build so as not to attempt to build and push documentation for a non-main branch.
* "Fix" build failure with Alpine Edge by switching from Edge to Alpine 3.13. * "Fix" build failure with Alpine Edge by switching from Edge to Alpine 3.13.
* Move #borgmatic IRC channel from Freenode to Libera Chat due to Freenode takeover drama. * Move #borgmatic IRC channel from Freenode to Libera Chat due to Freenode takeover drama.
IRC connection info: https://torsion.org/borgmatic/#issues IRC connection info: https://torsion.org/borgmatic/#issues

View File

@ -11,7 +11,7 @@ borgmatic is simple, configuration-driven backup software for servers and
workstations. Protect your files with client-side encryption. Backup your workstations. Protect your files with client-side encryption. Backup your
databases too. Monitor it all with integrated third-party services. databases too. Monitor it all with integrated third-party services.
The canonical home of borgmatic is at <a href="https://torsion.org/borgmatic">https://torsion.org/borgmatic</a>. The canonical home of borgmatic is at <a href="https://torsion.org/borgmatic">https://torsion.org/borgmatic/</a>
Here's an example configuration file: Here's an example configuration file:
@ -165,5 +165,5 @@ Also, please check out the [borgmatic development
how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for
info on cloning source code, running tests, etc. info on cloning source code, running tests, etc.
<a href="https://build.torsion.org/borgmatic-collective/borgmatic" alt="build status">![Build Status](https://build.torsion.org/api/badges/borgmatic-collective/borgmatic/status.svg?ref=refs/heads/master)</a> <a href="https://build.torsion.org/borgmatic-collective/borgmatic" alt="build status">![Build Status](https://build.torsion.org/api/badges/borgmatic-collective/borgmatic/status.svg?ref=refs/heads/main)</a>

View File

@ -7,8 +7,8 @@ permalink: security-policy/index.html
While we want to hear about security vulnerabilities in all versions of While we want to hear about security vulnerabilities in all versions of
borgmatic, security fixes are only made to the most recently released version. borgmatic, security fixes are only made to the most recently released version.
It's simply not practical for our small volunteer effort to maintain multiple It's not practical for our small volunteer effort to maintain multiple release
release branches and put out separate security patches for each. branches and put out separate security patches for each.
## Reporting a vulnerability ## Reporting a vulnerability

View File

@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
def run_borg( def run_borg(
repository, storage, local_borg_version, borg_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
borg_arguments,
local_path,
remote_path,
): ):
''' '''
Run the "borg" action for the given repository. Run the "borg" action for the given repository.

View File

@ -7,7 +7,12 @@ logger = logging.getLogger(__name__)
def run_break_lock( def run_break_lock(
repository, storage, local_borg_version, break_lock_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
break_lock_arguments,
local_path,
remote_path,
): ):
''' '''
Run the "break-lock" action for the given repository. Run the "break-lock" action for the given repository.

View File

@ -9,7 +9,12 @@ logger = logging.getLogger(__name__)
def run_info( def run_info(
repository, storage, local_borg_version, info_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
info_arguments,
local_path,
remote_path,
): ):
''' '''
Run the "info" action for the given repository and archive. Run the "info" action for the given repository and archive.

View File

@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
def run_list( def run_list(
repository, storage, local_borg_version, list_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
list_arguments,
local_path,
remote_path,
): ):
''' '''
Run the "list" action for the given repository and archive. Run the "list" action for the given repository and archive.

View File

@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
def run_mount( def run_mount(
repository, storage, local_borg_version, mount_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
mount_arguments,
local_path,
remote_path,
): ):
''' '''
Run the "mount" action for the given repository. Run the "mount" action for the given repository.

View File

@ -114,7 +114,13 @@ def restore_single_database(
def collect_archive_database_names( def collect_archive_database_names(
repository, archive, location, storage, local_borg_version, local_path, remote_path, repository,
archive,
location,
storage,
local_borg_version,
local_path,
remote_path,
): ):
''' '''
Given a local or remote repository path, a resolved archive name, a location configuration dict, Given a local or remote repository path, a resolved archive name, a location configuration dict,
@ -180,7 +186,7 @@ def find_databases_to_restore(requested_database_names, archive_database_names):
if 'all' in restore_names[UNSPECIFIED_HOOK]: if 'all' in restore_names[UNSPECIFIED_HOOK]:
restore_names[UNSPECIFIED_HOOK].remove('all') restore_names[UNSPECIFIED_HOOK].remove('all')
for (hook_name, database_names) in archive_database_names.items(): for hook_name, database_names in archive_database_names.items():
restore_names.setdefault(hook_name, []).extend(database_names) restore_names.setdefault(hook_name, []).extend(database_names)
# If a database is to be restored as part of "all", then remove it from restore names so # If a database is to be restored as part of "all", then remove it from restore names so
@ -313,7 +319,7 @@ def run_restore(
remote_path, remote_path,
archive_name, archive_name,
found_hook_name or hook_name, found_hook_name or hook_name,
found_database, dict(found_database, **{'schemas': restore_arguments.schemas}),
) )
# For any database that weren't found via exact matches in the hooks configuration, try to # For any database that weren't found via exact matches in the hooks configuration, try to
@ -342,7 +348,7 @@ def run_restore(
remote_path, remote_path,
archive_name, archive_name,
found_hook_name or hook_name, found_hook_name or hook_name,
database, dict(database, **{'schemas': restore_arguments.schemas}),
) )
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(

View File

@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
def run_rinfo( def run_rinfo(
repository, storage, local_borg_version, rinfo_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
rinfo_arguments,
local_path,
remote_path,
): ):
''' '''
Run the "rinfo" action for the given repository. Run the "rinfo" action for the given repository.

View File

@ -8,7 +8,12 @@ logger = logging.getLogger(__name__)
def run_rlist( def run_rlist(
repository, storage, local_borg_version, rlist_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
rlist_arguments,
local_path,
remote_path,
): ):
''' '''
Run the "rlist" action for the given repository. Run the "rlist" action for the given repository.

View File

@ -7,7 +7,11 @@ logger = logging.getLogger(__name__)
def break_lock( def break_lock(
repository_path, storage_config, local_borg_version, local_path='borg', remote_path=None, repository_path,
storage_config,
local_borg_version,
local_path='borg',
remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage configuration dict, the local Borg version, Given a local or remote repository path, a storage configuration dict, the local Borg version,

View File

@ -314,7 +314,7 @@ def check_all_source_directories_exist(source_directories):
missing_directories = [ missing_directories = [
source_directory source_directory
for source_directory in source_directories for source_directory in source_directories
if not os.path.exists(source_directory) if not all([os.path.exists(directory) for directory in expand_directory(source_directory)])
] ]
if missing_directories: if missing_directories:
raise ValueError(f"Source directories do not exist: {', '.join(missing_directories)}") raise ValueError(f"Source directories do not exist: {', '.join(missing_directories)}")
@ -509,7 +509,9 @@ def create_archive(
) )
elif output_log_level is None: elif output_log_level is None:
return execute_command_and_capture_output( return execute_command_and_capture_output(
create_command, working_directory=working_directory, extra_environment=borg_environment, create_command,
working_directory=working_directory,
extra_environment=borg_environment,
) )
else: else:
execute_command( execute_command(

View File

@ -45,7 +45,11 @@ def export_tar_archive(
+ (('--dry-run',) if dry_run else ()) + (('--dry-run',) if dry_run else ())
+ (('--tar-filter', tar_filter) if tar_filter else ()) + (('--tar-filter', tar_filter) if tar_filter else ())
+ (('--strip-components', str(strip_components)) if strip_components else ()) + (('--strip-components', str(strip_components)) if strip_components else ())
+ flags.make_repository_archive_flags(repository_path, archive, local_borg_version,) + flags.make_repository_archive_flags(
repository_path,
archive,
local_borg_version,
)
+ (destination_path,) + (destination_path,)
+ (tuple(paths) if paths else ()) + (tuple(paths) if paths else ())
) )

View File

@ -108,7 +108,11 @@ def extract_archive(
+ (('--strip-components', str(strip_components)) if strip_components else ()) + (('--strip-components', str(strip_components)) if strip_components else ())
+ (('--progress',) if progress else ()) + (('--progress',) if progress else ())
+ (('--stdout',) if extract_to_stdout else ()) + (('--stdout',) if extract_to_stdout else ())
+ flags.make_repository_archive_flags(repository, archive, local_borg_version,) + flags.make_repository_archive_flags(
repository,
archive,
local_borg_version,
)
+ (tuple(paths) if paths else ()) + (tuple(paths) if paths else ())
) )

View File

@ -1,6 +1,6 @@
from enum import Enum from enum import Enum
from pkg_resources import parse_version from packaging.version import parse
class Feature(Enum): class Feature(Enum):
@ -18,17 +18,17 @@ class Feature(Enum):
FEATURE_TO_MINIMUM_BORG_VERSION = { FEATURE_TO_MINIMUM_BORG_VERSION = {
Feature.COMPACT: parse_version('1.2.0a2'), # borg compact Feature.COMPACT: parse('1.2.0a2'), # borg compact
Feature.ATIME: parse_version('1.2.0a7'), # borg create --atime Feature.ATIME: parse('1.2.0a7'), # borg create --atime
Feature.NOFLAGS: parse_version('1.2.0a8'), # borg create --noflags Feature.NOFLAGS: parse('1.2.0a8'), # borg create --noflags
Feature.NUMERIC_IDS: parse_version('1.2.0b3'), # borg create/extract/mount --numeric-ids Feature.NUMERIC_IDS: parse('1.2.0b3'), # borg create/extract/mount --numeric-ids
Feature.UPLOAD_RATELIMIT: parse_version('1.2.0b3'), # borg create --upload-ratelimit Feature.UPLOAD_RATELIMIT: parse('1.2.0b3'), # borg create --upload-ratelimit
Feature.SEPARATE_REPOSITORY_ARCHIVE: parse_version('2.0.0a2'), # --repo with separate archive Feature.SEPARATE_REPOSITORY_ARCHIVE: parse('2.0.0a2'), # --repo with separate archive
Feature.RCREATE: parse_version('2.0.0a2'), # borg rcreate Feature.RCREATE: parse('2.0.0a2'), # borg rcreate
Feature.RLIST: parse_version('2.0.0a2'), # borg rlist Feature.RLIST: parse('2.0.0a2'), # borg rlist
Feature.RINFO: parse_version('2.0.0a2'), # borg rinfo Feature.RINFO: parse('2.0.0a2'), # borg rinfo
Feature.MATCH_ARCHIVES: parse_version('2.0.0b3'), # borg --match-archives Feature.MATCH_ARCHIVES: parse('2.0.0b3'), # borg --match-archives
Feature.EXCLUDED_FILES_MINUS: parse_version('2.0.0b5'), # --list --filter uses "-" for excludes Feature.EXCLUDED_FILES_MINUS: parse('2.0.0b5'), # --list --filter uses "-" for excludes
} }
@ -37,4 +37,4 @@ def available(feature, borg_version):
Given a Borg Feature constant and a Borg version string, return whether that feature is Given a Borg Feature constant and a Borg version string, return whether that feature is
available in that version of Borg. available in that version of Borg.
''' '''
return FEATURE_TO_MINIMUM_BORG_VERSION[feature] <= parse_version(borg_version) return FEATURE_TO_MINIMUM_BORG_VERSION[feature] <= parse(borg_version)

View File

@ -62,7 +62,8 @@ def display_archives_info(
if info_arguments.json: if info_arguments.json:
return execute_command_and_capture_output( return execute_command_and_capture_output(
full_command, extra_environment=environment.make_environment(storage_config), full_command,
extra_environment=environment.make_environment(storage_config),
) )
else: else:
execute_command( execute_command(

View File

@ -50,7 +50,8 @@ def display_repository_info(
if rinfo_arguments.json: if rinfo_arguments.json:
return execute_command_and_capture_output( return execute_command_and_capture_output(
full_command, extra_environment=extra_environment, full_command,
extra_environment=extra_environment,
) )
else: else:
execute_command( execute_command(

View File

@ -17,8 +17,8 @@ def resolve_archive_name(
): ):
''' '''
Given a local or remote repository path, an archive name, a storage config dict, a local Borg Given a local or remote repository path, an archive name, a storage config dict, a local Borg
path, and a remote Borg path, simply return the archive name. But if the archive name is path, and a remote Borg path, return the archive name. But if the archive name is "latest",
"latest", then instead introspect the repository for the latest archive and return its name. then instead introspect the repository for the latest archive and return its name.
Raise ValueError if "latest" is given but there are no archives in the repository. Raise ValueError if "latest" is given but there are no archives in the repository.
''' '''
@ -40,7 +40,8 @@ def resolve_archive_name(
) )
output = execute_command_and_capture_output( output = execute_command_and_capture_output(
full_command, extra_environment=environment.make_environment(storage_config), full_command,
extra_environment=environment.make_environment(storage_config),
) )
try: try:
latest_archive = output.strip().splitlines()[-1] latest_archive = output.strip().splitlines()[-1]

View File

@ -19,7 +19,8 @@ def local_borg_version(storage_config, local_path='borg'):
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
) )
output = execute_command_and_capture_output( output = execute_command_and_capture_output(
full_command, extra_environment=environment.make_environment(storage_config), full_command,
extra_environment=environment.make_environment(storage_config),
) )
try: try:

View File

@ -178,7 +178,9 @@ def make_parsers():
help='Log verbose progress to monitoring integrations that support logging (from only errors to very verbose: -1, 0, 1, or 2)', help='Log verbose progress to monitoring integrations that support logging (from only errors to very verbose: -1, 0, 1, or 2)',
) )
global_group.add_argument( global_group.add_argument(
'--log-file', type=str, help='Write log messages to this file instead of syslog', '--log-file',
type=str,
help='Write log messages to this file instead of syslog',
) )
global_group.add_argument( global_group.add_argument(
'--log-file-format', '--log-file-format',
@ -258,10 +260,13 @@ def make_parsers():
help='Copy the crypt key used for authenticated encryption from the source repository, defaults to a new random key (Borg 2.x+ only)', help='Copy the crypt key used for authenticated encryption from the source repository, defaults to a new random key (Borg 2.x+ only)',
) )
rcreate_group.add_argument( rcreate_group.add_argument(
'--append-only', action='store_true', help='Create an append-only repository', '--append-only',
action='store_true',
help='Create an append-only repository',
) )
rcreate_group.add_argument( rcreate_group.add_argument(
'--storage-quota', help='Create a repository with a fixed storage quota', '--storage-quota',
help='Create a repository with a fixed storage quota',
) )
rcreate_group.add_argument( rcreate_group.add_argument(
'--make-parent-dirs', '--make-parent-dirs',
@ -629,6 +634,13 @@ def make_parsers():
dest='databases', dest='databases',
help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration", help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration",
) )
restore_group.add_argument(
'--schema',
metavar='NAME',
nargs='+',
dest='schemas',
help='Names of schemas to restore from the database, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases',
)
restore_group.add_argument( restore_group.add_argument(
'-h', '--help', action='help', help='Show this help message and exit' '-h', '--help', action='help', help='Show this help message and exit'
) )
@ -642,7 +654,8 @@ def make_parsers():
) )
rlist_group = rlist_parser.add_argument_group('rlist arguments') rlist_group = rlist_parser.add_argument_group('rlist arguments')
rlist_group.add_argument( rlist_group.add_argument(
'--repository', help='Path of repository to list, defaults to the configured repositories', '--repository',
help='Path of repository to list, defaults to the configured repositories',
) )
rlist_group.add_argument( rlist_group.add_argument(
'--short', default=False, action='store_true', help='Output only archive names' '--short', default=False, action='store_true', help='Output only archive names'

View File

@ -8,7 +8,11 @@ from queue import Queue
from subprocess import CalledProcessError from subprocess import CalledProcessError
import colorama import colorama
import pkg_resources
try:
import importlib_metadata
except ModuleNotFoundError: # pragma: nocover
import importlib.metadata as importlib_metadata
import borgmatic.actions.borg import borgmatic.actions.borg
import borgmatic.actions.break_lock import borgmatic.actions.break_lock
@ -103,7 +107,9 @@ def run_configuration(config_filename, config, arguments):
if not encountered_error: if not encountered_error:
repo_queue = Queue() repo_queue = Queue()
for repo in location['repositories']: for repo in location['repositories']:
repo_queue.put((repo, 0),) repo_queue.put(
(repo, 0),
)
while not repo_queue.empty(): while not repo_queue.empty():
repository, retry_num = repo_queue.get() repository, retry_num = repo_queue.get()
@ -128,7 +134,9 @@ def run_configuration(config_filename, config, arguments):
) )
except (OSError, CalledProcessError, ValueError) as error: except (OSError, CalledProcessError, ValueError) as error:
if retry_num < retries: if retry_num < retries:
repo_queue.put((repository, retry_num + 1),) repo_queue.put(
(repository, retry_num + 1),
)
tuple( # Consume the generator so as to trigger logging. tuple( # Consume the generator so as to trigger logging.
log_error_records( log_error_records(
f'{repository["path"]}: Error running actions for repository', f'{repository["path"]}: Error running actions for repository',
@ -279,7 +287,7 @@ def run_actions(
**hook_context, **hook_context,
) )
for (action_name, action_arguments) in arguments.items(): for action_name, action_arguments in arguments.items():
if action_name == 'rcreate': if action_name == 'rcreate':
borgmatic.actions.rcreate.run_rcreate( borgmatic.actions.rcreate.run_rcreate(
repository, repository,
@ -408,19 +416,39 @@ def run_actions(
) )
elif action_name == 'rlist': elif action_name == 'rlist':
yield from borgmatic.actions.rlist.run_rlist( yield from borgmatic.actions.rlist.run_rlist(
repository, storage, local_borg_version, action_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
action_arguments,
local_path,
remote_path,
) )
elif action_name == 'list': elif action_name == 'list':
yield from borgmatic.actions.list.run_list( yield from borgmatic.actions.list.run_list(
repository, storage, local_borg_version, action_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
action_arguments,
local_path,
remote_path,
) )
elif action_name == 'rinfo': elif action_name == 'rinfo':
yield from borgmatic.actions.rinfo.run_rinfo( yield from borgmatic.actions.rinfo.run_rinfo(
repository, storage, local_borg_version, action_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
action_arguments,
local_path,
remote_path,
) )
elif action_name == 'info': elif action_name == 'info':
yield from borgmatic.actions.info.run_info( yield from borgmatic.actions.info.run_info(
repository, storage, local_borg_version, action_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
action_arguments,
local_path,
remote_path,
) )
elif action_name == 'break-lock': elif action_name == 'break-lock':
borgmatic.actions.break_lock.run_break_lock( borgmatic.actions.break_lock.run_break_lock(
@ -433,7 +461,12 @@ def run_actions(
) )
elif action_name == 'borg': elif action_name == 'borg':
borgmatic.actions.borg.run_borg( borgmatic.actions.borg.run_borg(
repository, storage, local_borg_version, action_arguments, local_path, remote_path, repository,
storage,
local_borg_version,
action_arguments,
local_path,
remote_path,
) )
command.execute_hook( command.execute_hook(
@ -626,7 +659,8 @@ def collect_configuration_run_summary_logs(configs, arguments):
logger.info(f"Unmounting mount point {arguments['umount'].mount_point}") logger.info(f"Unmounting mount point {arguments['umount'].mount_point}")
try: try:
borg_umount.unmount_archive( borg_umount.unmount_archive(
mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs), mount_point=arguments['umount'].mount_point,
local_path=get_local_path(configs),
) )
except (CalledProcessError, OSError) as error: except (CalledProcessError, OSError) as error:
yield from log_error_records('Error unmounting mount point', error) yield from log_error_records('Error unmounting mount point', error)
@ -676,7 +710,7 @@ def main(): # pragma: no cover
global_arguments = arguments['global'] global_arguments = arguments['global']
if global_arguments.version: if global_arguments.version:
print(pkg_resources.require('borgmatic')[0].version) print(importlib_metadata.version('borgmatic'))
sys.exit(0) sys.exit(0)
if global_arguments.bash_completion: if global_arguments.bash_completion:
print(borgmatic.commands.completion.bash_completion()) print(borgmatic.commands.completion.bash_completion())

View File

@ -260,7 +260,7 @@ def merge_source_configuration_into_destination(destination_config, source_confi
) )
continue continue
# This is some sort of scalar. Simply set it into the destination. # This is some sort of scalar. Set it into the destination.
destination_config[field_name] = source_config[field_name] destination_config[field_name] = source_config[field_name]
return destination_config return destination_config

View File

@ -38,9 +38,9 @@ def include_configuration(loader, filename_node, include_directory):
return load_configuration(include_filename) return load_configuration(include_filename)
def retain_node_error(loader, node): def raise_retain_node_error(loader, node):
''' '''
Given a ruamel.yaml.loader.Loader and a YAML node, raise an error. Given a ruamel.yaml.loader.Loader and a YAML node, raise an error about "!retain" usage.
Raise ValueError if a mapping or sequence node is given, as that indicates that "!retain" was Raise ValueError if a mapping or sequence node is given, as that indicates that "!retain" was
used in a configuration file without a merge. In configuration files with a merge, mapping and used in a configuration file without a merge. In configuration files with a merge, mapping and
@ -56,6 +56,19 @@ def retain_node_error(loader, node):
raise ValueError('The !retain tag may only be used on a YAML mapping or sequence.') raise ValueError('The !retain tag may only be used on a YAML mapping or sequence.')
def raise_omit_node_error(loader, node):
'''
Given a ruamel.yaml.loader.Loader and a YAML node, raise an error about "!omit" usage.
Raise ValueError unconditionally, as an "!omit" node here indicates it was used in a
configuration file without a merge. In configuration files with a merge, nodes with "!omit"
tags are handled by deep_merge_nodes() below.
'''
raise ValueError(
'The !omit tag may only be used on a scalar (e.g., string) list element within a configuration file containing a merged !include tag.'
)
class Include_constructor(ruamel.yaml.SafeConstructor): class Include_constructor(ruamel.yaml.SafeConstructor):
''' '''
A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including
@ -68,7 +81,8 @@ class Include_constructor(ruamel.yaml.SafeConstructor):
'!include', '!include',
functools.partial(include_configuration, include_directory=include_directory), functools.partial(include_configuration, include_directory=include_directory),
) )
self.add_constructor('!retain', retain_node_error) self.add_constructor('!retain', raise_retain_node_error)
self.add_constructor('!omit', raise_omit_node_error)
def flatten_mapping(self, node): def flatten_mapping(self, node):
''' '''
@ -107,6 +121,7 @@ def load_configuration(filename):
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
if there are too many recursive includes. if there are too many recursive includes.
''' '''
# Use an embedded derived class for the include constructor so as to capture the filename # Use an embedded derived class for the include constructor so as to capture the filename
# value. (functools.partial doesn't work for this use case because yaml.Constructor has to be # value. (functools.partial doesn't work for this use case because yaml.Constructor has to be
# an actual class.) # an actual class.)
@ -134,6 +149,16 @@ def load_configuration(filename):
return config return config
def filter_omitted_nodes(nodes):
'''
Given a list of nodes, return a filtered list omitting any nodes with an "!omit" tag or with a
value matching such nodes.
'''
omitted_values = tuple(node.value for node in nodes if node.tag == '!omit')
return [node for node in nodes if node.value not in omitted_values]
DELETED_NODE = object() DELETED_NODE = object()
@ -247,7 +272,7 @@ def deep_merge_nodes(nodes):
b_key, b_key,
ruamel.yaml.nodes.SequenceNode( ruamel.yaml.nodes.SequenceNode(
tag=b_value.tag, tag=b_value.tag,
value=a_value.value + b_value.value, value=filter_omitted_nodes(a_value.value + b_value.value),
start_mark=b_value.start_mark, start_mark=b_value.start_mark,
end_mark=b_value.end_mark, end_mark=b_value.end_mark,
flow_style=b_value.flow_style, flow_style=b_value.flow_style,

View File

@ -81,7 +81,10 @@ def normalize(config_filename, config):
repository_path.partition('file://')[-1] repository_path.partition('file://')[-1]
) )
config['location']['repositories'].append( config['location']['repositories'].append(
dict(repository_dict, path=updated_repository_path,) dict(
repository_dict,
path=updated_repository_path,
)
) )
elif repository_path.startswith('ssh://'): elif repository_path.startswith('ssh://'):
config['location']['repositories'].append(repository_dict) config['location']['repositories'].append(repository_dict)
@ -97,7 +100,10 @@ def normalize(config_filename, config):
) )
) )
config['location']['repositories'].append( config['location']['repositories'].append(
dict(repository_dict, path=rewritten_repository_path,) dict(
repository_dict,
path=rewritten_repository_path,
)
) )
else: else:
config['location']['repositories'].append(repository_dict) config['location']['repositories'].append(repository_dict)

View File

@ -57,7 +57,12 @@ def parse_overrides(raw_overrides):
for raw_override in raw_overrides: for raw_override in raw_overrides:
try: try:
raw_keys, value = raw_override.split('=', 1) raw_keys, value = raw_override.split('=', 1)
parsed_overrides.append((tuple(raw_keys.split('.')), convert_value_type(value),)) parsed_overrides.append(
(
tuple(raw_keys.split('.')),
convert_value_type(value),
)
)
except ValueError: except ValueError:
raise ValueError( raise ValueError(
f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE" f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE"
@ -75,5 +80,5 @@ def apply_overrides(config, raw_overrides):
''' '''
overrides = parse_overrides(raw_overrides) overrides = parse_overrides(raw_overrides)
for (keys, value) in overrides: for keys, value in overrides:
set_values(config, keys, value) set_values(config, keys, value)

View File

@ -60,7 +60,7 @@ properties:
or port. If systemd service is used, then add local or port. If systemd service is used, then add local
repository paths in the systemd service file to the repository paths in the systemd service file to the
ReadWritePaths list. Prior to borgmatic 1.7.10, repositories ReadWritePaths list. Prior to borgmatic 1.7.10, repositories
was just a list of plain path strings. was a list of plain path strings.
example: example:
- path: ssh://user@backupserver/./sourcehostname.borg - path: ssh://user@backupserver/./sourcehostname.borg
label: backupserver label: backupserver
@ -836,25 +836,25 @@ properties:
Command to use instead of "pg_dump" or Command to use instead of "pg_dump" or
"pg_dumpall". This can be used to run a specific "pg_dumpall". This can be used to run a specific
pg_dump version (e.g., one inside a running pg_dump version (e.g., one inside a running
docker container). Defaults to "pg_dump" for container). Defaults to "pg_dump" for single
single database dump or "pg_dumpall" to dump database dump or "pg_dumpall" to dump all
all databases. databases.
example: docker exec my_pg_container pg_dump example: docker exec my_pg_container pg_dump
pg_restore_command: pg_restore_command:
type: string type: string
description: | description: |
Command to use instead of "pg_restore". This Command to use instead of "pg_restore". This
can be used to run a specific pg_restore can be used to run a specific pg_restore
version (e.g., one inside a running docker version (e.g., one inside a running container).
container). Defaults to "pg_restore". Defaults to "pg_restore".
example: docker exec my_pg_container pg_restore example: docker exec my_pg_container pg_restore
psql_command: psql_command:
type: string type: string
description: | description: |
Command to use instead of "psql". This can be Command to use instead of "psql". This can be
used to run a specific psql version (e.g., used to run a specific psql version (e.g.,
one inside a running docker container). one inside a running container). Defaults to
Defaults to "psql". "psql".
example: docker exec my_pg_container psql example: docker exec my_pg_container psql
options: options:
type: string type: string
@ -1216,7 +1216,7 @@ properties:
type: string type: string
description: | description: |
Healthchecks ping URL or UUID to notify when a Healthchecks ping URL or UUID to notify when a
backup begins, ends, errors or just to send logs. backup begins, ends, errors, or to send only logs.
example: https://hc-ping.com/your-uuid-here example: https://hc-ping.com/your-uuid-here
verify_tls: verify_tls:
type: boolean type: boolean

View File

@ -1,9 +1,13 @@
import os import os
import jsonschema import jsonschema
import pkg_resources
import ruamel.yaml import ruamel.yaml
try:
import importlib_metadata
except ModuleNotFoundError: # pragma: nocover
import importlib.metadata as importlib_metadata
from borgmatic.config import environment, load, normalize, override from borgmatic.config import environment, load, normalize, override
@ -11,8 +15,17 @@ def schema_filename():
''' '''
Path to the installed YAML configuration schema file, used to validate and parse the Path to the installed YAML configuration schema file, used to validate and parse the
configuration. configuration.
Raise FileNotFoundError when the schema path does not exist.
''' '''
return pkg_resources.resource_filename('borgmatic', 'config/schema.yaml') try:
return next(
str(path.locate())
for path in importlib_metadata.files('borgmatic')
if path.match('config/schema.yaml')
)
except StopIteration:
raise FileNotFoundError('Configuration file schema could not be found')
def format_json_error_path_element(path_element): def format_json_error_path_element(path_element):

View File

@ -236,7 +236,11 @@ def execute_command(
def execute_command_and_capture_output( def execute_command_and_capture_output(
full_command, capture_stderr=False, shell=False, extra_environment=None, working_directory=None, full_command,
capture_stderr=False,
shell=False,
extra_environment=None,
working_directory=None,
): ):
''' '''
Execute the given command (a sequence of command/argument strings), capturing and returning its Execute the given command (a sequence of command/argument strings), capturing and returning its

View File

@ -161,4 +161,7 @@ def build_restore_command(extract_process, database, dump_filename):
command.extend(('--authenticationDatabase', database['authentication_database'])) command.extend(('--authenticationDatabase', database['authentication_database']))
if 'restore_options' in database: if 'restore_options' in database:
command.extend(database['restore_options'].split(' ')) command.extend(database['restore_options'].split(' '))
if database['schemas']:
for schema in database['schemas']:
command.extend(('--nsInclude', schema))
return command return command

View File

@ -100,7 +100,9 @@ def execute_dump_command(
dump.create_named_pipe_for_dump(dump_filename) dump.create_named_pipe_for_dump(dump_filename)
return execute_command( return execute_command(
dump_command, extra_environment=extra_environment, run_to_completion=False, dump_command,
extra_environment=extra_environment,
run_to_completion=False,
) )

View File

@ -1,6 +1,8 @@
import csv import csv
import itertools
import logging import logging
import os import os
import shlex
from borgmatic.execute import ( from borgmatic.execute import (
execute_command, execute_command,
@ -59,8 +61,10 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
if dry_run: if dry_run:
return () return ()
psql_command = shlex.split(database.get('psql_command') or 'psql')
list_command = ( list_command = (
('psql', '--list', '--no-password', '--csv', '--tuples-only') tuple(psql_command)
+ ('--list', '--no-password', '--no-psqlrc', '--csv', '--tuples-only')
+ (('--host', database['hostname']) if 'hostname' in database else ()) + (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ()) + (('--username', database['username']) if 'username' in database else ())
@ -122,7 +126,12 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
continue continue
command = ( command = (
(dump_command, '--no-password', '--clean', '--if-exists',) (
dump_command,
'--no-password',
'--clean',
'--if-exists',
)
+ (('--host', database['hostname']) if 'hostname' in database else ()) + (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ()) + (('--username', database['username']) if 'username' in database else ())
@ -145,7 +154,9 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
if dump_format == 'directory': if dump_format == 'directory':
dump.create_parent_directory_for_dump(dump_filename) dump.create_parent_directory_for_dump(dump_filename)
execute_command( execute_command(
command, shell=True, extra_environment=extra_environment, command,
shell=True,
extra_environment=extra_environment,
) )
else: else:
dump.create_named_pipe_for_dump(dump_filename) dump.create_named_pipe_for_dump(dump_filename)
@ -202,9 +213,10 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
dump_filename = dump.make_database_dump_filename( dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), database['name'], database.get('hostname') make_dump_path(location_config), database['name'], database.get('hostname')
) )
psql_command = database.get('psql_command') or 'psql' psql_command = shlex.split(database.get('psql_command') or 'psql')
analyze_command = ( analyze_command = (
(psql_command, '--no-password', '--quiet') tuple(psql_command)
+ ('--no-password', '--no-psqlrc', '--quiet')
+ (('--host', database['hostname']) if 'hostname' in database else ()) + (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ()) + (('--username', database['username']) if 'username' in database else ())
@ -212,20 +224,25 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
+ (tuple(database['analyze_options'].split(' ')) if 'analyze_options' in database else ()) + (tuple(database['analyze_options'].split(' ')) if 'analyze_options' in database else ())
+ ('--command', 'ANALYZE') + ('--command', 'ANALYZE')
) )
pg_restore_command = database.get('pg_restore_command') or 'pg_restore' use_psql_command = all_databases or database.get('format') == 'plain'
pg_restore_command = shlex.split(database.get('pg_restore_command') or 'pg_restore')
restore_command = ( restore_command = (
(psql_command if all_databases else pg_restore_command, '--no-password') tuple(psql_command if use_psql_command else pg_restore_command)
+ ( + ('--no-password',)
('--if-exists', '--exit-on-error', '--clean', '--dbname', database['name']) + (('--no-psqlrc',) if use_psql_command else ('--if-exists', '--exit-on-error', '--clean'))
if not all_databases + (('--dbname', database['name']) if not all_databases else ())
else ()
)
+ (('--host', database['hostname']) if 'hostname' in database else ()) + (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ())
+ (('--username', database['username']) if 'username' in database else ()) + (('--username', database['username']) if 'username' in database else ())
+ (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ()) + (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ())
+ (() if extract_process else (dump_filename,)) + (() if extract_process else (dump_filename,))
+ tuple(
itertools.chain.from_iterable(('--schema', schema) for schema in database['schemas'])
if database['schemas']
else ()
) )
)
extra_environment = make_extra_environment(database) extra_environment = make_extra_environment(database)
logger.debug(f"{log_prefix}: Restoring PostgreSQL database {database['name']}{dry_run_label}") logger.debug(f"{log_prefix}: Restoring PostgreSQL database {database['name']}{dry_run_label}")

22
docs/docker-compose.yaml Normal file
View File

@ -0,0 +1,22 @@
version: '3'
services:
docs:
image: borgmatic-docs
container_name: docs
ports:
- 8080:80
build:
dockerfile: docs/Dockerfile
context: ..
args:
ENVIRONMENT: dev
message:
image: alpine
container_name: message
command:
- sh
- -c
- |
echo "You can view dev docs at http://localhost:8080"
depends_on:
- docs

View File

@ -138,9 +138,9 @@ hooks:
### Containers ### Containers
If your database is running within a Docker container and borgmatic is too, no If your database is running within a container and borgmatic is too, no
problem—simply configure borgmatic to connect to the container's name on its problem—configure borgmatic to connect to the container's name on its exposed
exposed port. For instance: port. For instance:
```yaml ```yaml
hooks: hooks:
@ -154,10 +154,10 @@ hooks:
But what if borgmatic is running on the host? You can still connect to a But what if borgmatic is running on the host? You can still connect to a
database container if its ports are properly exposed to the host. For database container if its ports are properly exposed to the host. For
instance, when running the database container with Docker, you can specify instance, when running the database container, you can specify `--publish
`--publish 127.0.0.1:5433:5432` so that it exposes the container's port 5432 127.0.0.1:5433:5432` so that it exposes the container's port 5432 to port 5433
to port 5433 on the host (only reachable on localhost, in this case). Or the on the host (only reachable on localhost, in this case). Or the same thing
same thing with Docker Compose: with Docker Compose:
```yaml ```yaml
services: services:
@ -179,7 +179,7 @@ hooks:
password: trustsome1 password: trustsome1
``` ```
Of course, alter the ports in these examples to suit your particular database You can alter the ports in these examples to suit your particular database
system. system.
@ -324,6 +324,17 @@ includes any combined dump file named "all" and any other individual database
dumps found in the archive. dumps found in the archive.
### Restore particular schemas
<span class="minilink minilink-addedin">New in version 1.7.13</span> With
PostgreSQL and MongoDB, you can limit the restore to a single schema found
within the database dump:
```bash
borgmatic restore --archive latest --database users --schema tentant1
```
### Limitations ### Limitations
There are a few important limitations with borgmatic's current database There are a few important limitations with borgmatic's current database
@ -386,9 +397,9 @@ dumps with any database system.
With PostgreSQL and MySQL/MariaDB, if you're getting authentication errors With PostgreSQL and MySQL/MariaDB, if you're getting authentication errors
when borgmatic tries to connect to your database, a natural reaction is to when borgmatic tries to connect to your database, a natural reaction is to
increase your borgmatic verbosity with `--verbosity 2` and go looking in the increase your borgmatic verbosity with `--verbosity 2` and go looking in the
logs. You'll notice however that your database password does not show up in logs. You'll notice though that your database password does not show up in the
the logs. This is likely not the cause of the authentication problem unless logs. This is likely not the cause of the authentication problem unless you
you mistyped your password, however; borgmatic passes your password to the mistyped your password, however; borgmatic passes your password to the
database via an environment variable that does not appear in the logs. database via an environment variable that does not appear in the logs.
The cause of an authentication error is often on the database side—in the The cause of an authentication error is often on the database side—in the

View File

@ -7,7 +7,7 @@ eleventyNavigation:
--- ---
## Source code ## Source code
To get set up to hack on borgmatic, first clone master via HTTPS or SSH: To get set up to hack on borgmatic, first clone it via HTTPS or SSH:
```bash ```bash
git clone https://projects.torsion.org/borgmatic-collective/borgmatic.git git clone https://projects.torsion.org/borgmatic-collective/borgmatic.git
@ -87,19 +87,20 @@ tox -e codespell
borgmatic additionally includes some end-to-end tests that integration test borgmatic additionally includes some end-to-end tests that integration test
with Borg and supported databases for a few representative scenarios. These with Borg and supported databases for a few representative scenarios. These
tests don't run by default when running `tox`, because they're relatively slow tests don't run by default when running `tox`, because they're relatively slow
and depend on Docker containers for runtime dependencies. These tests tests do and depend on containers for runtime dependencies. These tests do run on the
run on the continuous integration (CI) server, and running them on your continuous integration (CI) server, and running them on your developer machine
developer machine is the closest thing to CI test parity. is the closest thing to CI-test parity.
If you would like to run the full test suite, first install Docker and [Docker If you would like to run the full test suite, first install Docker (or Podman;
Compose](https://docs.docker.com/compose/install/). Then run: see below) and [Docker Compose](https://docs.docker.com/compose/install/).
Then run:
```bash ```bash
scripts/run-end-to-end-dev-tests scripts/run-end-to-end-dev-tests
``` ```
Note that this scripts assumes you have permission to run Docker. If you This script assumes you have permission to run `docker`. If you don't, then
don't, then you may need to run with `sudo`. you may need to run with `sudo`.
#### Podman #### Podman
@ -112,13 +113,13 @@ borgmatic's end-to-end tests optionally support using
Setting up Podman is outside the scope of this documentation, but here are Setting up Podman is outside the scope of this documentation, but here are
some key points to double-check: some key points to double-check:
* Install Podman along with `podman-docker` and your desired networking * Install Podman and your desired networking support.
support.
* Configure `/etc/subuid` and `/etc/subgid` to map users/groups for the * Configure `/etc/subuid` and `/etc/subgid` to map users/groups for the
non-root user who will run tests. non-root user who will run tests.
* Create a non-root Podman socket for that user: * Create a non-root Podman socket for that user:
```bash ```bash
systemctl --user enable --now podman.socket systemctl --user enable --now podman.socket
systemctl --user start --now podman.socket
``` ```
Then you'll be able to run end-to-end tests as per normal, and the test script Then you'll be able to run end-to-end tests as per normal, and the test script
@ -161,11 +162,12 @@ To build and view a copy of the documentation with your local changes, run the
following from the root of borgmatic's source code: following from the root of borgmatic's source code:
```bash ```bash
sudo scripts/dev-docs scripts/dev-docs
``` ```
This requires Docker to be installed on your system. You may not need to use This requires Docker (or Podman; see below) to be installed on your system.
sudo if your non-root user has permissions to run Docker. This script assumes you have permission to run `docker`. If you don't, then
you may need to run with `sudo`.
After you run the script, you can point your web browser at After you run the script, you can point your web browser at
http://localhost:8080 to view the documentation with your changes. http://localhost:8080 to view the documentation with your changes.
@ -183,5 +185,5 @@ borgmatic's developer build for documentation optionally supports using
[Podman](https://podman.io/) instead of Docker. [Podman](https://podman.io/) instead of Docker.
Setting up Podman is outside the scope of this documentation. But once you Setting up Podman is outside the scope of this documentation. But once you
install `podman-docker`, then `scripts/dev-docs` should automatically use install and configure Podman, then `scripts/dev-docs` should automatically use
Podman instead of Docker. Podman instead of Docker.

View File

@ -169,7 +169,7 @@ brackets. For instance, the default log format is: `[{asctime}] {levelname}:
{message}`. This means each log message is recorded as the log time (in square {message}`. This means each log message is recorded as the log time (in square
brackets), a logging level name, a colon, and the actual log message. brackets), a logging level name, a colon, and the actual log message.
So if you just want each log message to get logged *without* a timestamp or a So if you only want each log message to get logged *without* a timestamp or a
logging level name: logging level name:
```bash ```bash

View File

@ -86,8 +86,8 @@ uses the `archive_name_format` option to automatically limit which archives
get used for actions operating on multiple archives. This prevents, for get used for actions operating on multiple archives. This prevents, for
instance, duplicate archives from showing up in `rlist` or `info` results—even instance, duplicate archives from showing up in `rlist` or `info` results—even
if the same repository appears in multiple borgmatic configuration files. To if the same repository appears in multiple borgmatic configuration files. To
take advantage of this feature, simply use a different `archive_name_format` take advantage of this feature, use a different `archive_name_format` in each
in each configuration file. configuration file.
Under the hood, borgmatic accomplishes this by substituting globs for certain Under the hood, borgmatic accomplishes this by substituting globs for certain
ephemeral data placeholders in your `archive_name_format`—and using the result ephemeral data placeholders in your `archive_name_format`—and using the result
@ -108,8 +108,8 @@ archives used for actions like `rlist`, `info`, `prune`, `check`, etc.
The end result is that when borgmatic runs the actions for a particular The end result is that when borgmatic runs the actions for a particular
application-specific configuration file, it only operates on the archives application-specific configuration file, it only operates on the archives
created for that application. Of course, this doesn't apply to actions like created for that application. But this doesn't apply to actions like `compact`
`compact` that operate on an entire repository. that operate on an entire repository.
If this behavior isn't quite smart enough for your needs, you can use the If this behavior isn't quite smart enough for your needs, you can use the
`match_archives` option to override the pattern that borgmatic uses for `match_archives` option to override the pattern that borgmatic uses for
@ -272,9 +272,65 @@ Once this include gets merged in, the resulting configuration would have a
When there's an option collision between the local file and the merged When there's an option collision between the local file and the merged
include, the local file's option takes precedence. include, the local file's option takes precedence.
#### List merge
<span class="minilink minilink-addedin">New in version 1.6.1</span> Colliding <span class="minilink minilink-addedin">New in version 1.6.1</span> Colliding
list values are appended together. list values are appended together.
<span class="minilink minilink-addedin">New in version 1.7.12</span> If there
is a list value from an include that you *don't* want in your local
configuration file, you can omit it with an `!omit` tag. For instance:
```yaml
<<: !include /etc/borgmatic/common.yaml
location:
source_directories:
- !omit /home
- /var
```
And `common.yaml` like this:
```yaml
location:
source_directories:
- /home
- /etc
```
Once this include gets merged in, the resulting configuration will have a
`source_directories` value of `/etc` and `/var`—with `/home` omitted.
This feature currently only works on scalar (e.g. string or number) list items
and will not work elsewhere in a configuration file. Be sure to put the
`!omit` tag *before* the list item (after the dash). Putting `!omit` after the
list item will not work, as it gets interpreted as part of the string. Here's
an example of some things not to do:
```yaml
<<: !include /etc/borgmatic/common.yaml
location:
source_directories:
# Do not do this! It will not work. "!omit" belongs before "/home".
- /home !omit
# Do not do this either! "!omit" only works on scalar list items.
repositories: !omit
# Also do not do this for the same reason! This is a list item, but it's
# not a scalar.
- !omit path: repo.borg
```
Additionally, the `!omit` tag only works in a configuration file that also
performs a merge include with `<<: !include`. It doesn't make sense within,
for instance, an included configuration file itself (unless it in turn
performs its own merge include). That's because `!omit` only applies to the
file doing the include; it doesn't work in reverse or propagate through
includes.
### Shallow merge ### Shallow merge
@ -296,7 +352,7 @@ on the `retention` mapping:
location: location:
repositories: repositories:
- repo.borg - path: repo.borg
retention: !retain retention: !retain
keep_daily: 5 keep_daily: 5
@ -307,7 +363,7 @@ And `common.yaml` like this:
```yaml ```yaml
location: location:
repositories: repositories:
- common.borg - path: common.borg
retention: retention:
keep_hourly: 24 keep_hourly: 24

View File

@ -82,8 +82,8 @@ on a relatively dedicated system, then a global install can work out fine.
Besides the approaches described above, there are several other options for Besides the approaches described above, there are several other options for
installing borgmatic: installing borgmatic:
* [Docker image with scheduled backups](https://hub.docker.com/r/b3vis/borgmatic/) (+ Docker Compose files) * [container image with scheduled backups](https://hub.docker.com/r/b3vis/borgmatic/) (+ Docker Compose files)
* [Docker image with multi-arch and Docker CLI support](https://hub.docker.com/r/modem7/borgmatic-docker/) * [container image with multi-arch and Docker CLI support](https://hub.docker.com/r/modem7/borgmatic-docker/)
* [Debian](https://tracker.debian.org/pkg/borgmatic) * [Debian](https://tracker.debian.org/pkg/borgmatic)
* [Ubuntu](https://launchpad.net/ubuntu/+source/borgmatic) * [Ubuntu](https://launchpad.net/ubuntu/+source/borgmatic)
* [Fedora official](https://bodhi.fedoraproject.org/updates/?search=borgmatic) * [Fedora official](https://bodhi.fedoraproject.org/updates/?search=borgmatic)
@ -96,7 +96,7 @@ installing borgmatic:
* [macOS (via MacPorts)](https://ports.macports.org/port/borgmatic/) * [macOS (via MacPorts)](https://ports.macports.org/port/borgmatic/)
* [NixOS](https://search.nixos.org/packages?show=borgmatic&sort=relevance&type=packages&query=borgmatic) * [NixOS](https://search.nixos.org/packages?show=borgmatic&sort=relevance&type=packages&query=borgmatic)
* [Ansible role](https://github.com/borgbase/ansible-role-borgbackup) * [Ansible role](https://github.com/borgbase/ansible-role-borgbackup)
* [virtualenv](https://virtualenv.pypa.io/en/stable/) * [Unraid](https://unraid.net/community/apps?q=borgmatic#r)
## Hosting providers ## Hosting providers
@ -279,7 +279,7 @@ that, you can configure a separate job runner to invoke it periodically.
### cron ### cron
If you're using cron, download the [sample cron If you're using cron, download the [sample cron
file](https://projects.torsion.org/borgmatic-collective/borgmatic/src/master/sample/cron/borgmatic). file](https://projects.torsion.org/borgmatic-collective/borgmatic/src/main/sample/cron/borgmatic).
Then, from the directory where you downloaded it: Then, from the directory where you downloaded it:
```bash ```bash
@ -303,9 +303,9 @@ you may already have borgmatic systemd service and timer files. If so, you may
be able to skip some of the steps below.) be able to skip some of the steps below.)
First, download the [sample systemd service First, download the [sample systemd service
file](https://projects.torsion.org/borgmatic-collective/borgmatic/raw/branch/master/sample/systemd/borgmatic.service) file](https://projects.torsion.org/borgmatic-collective/borgmatic/raw/branch/main/sample/systemd/borgmatic.service)
and the [sample systemd timer and the [sample systemd timer
file](https://projects.torsion.org/borgmatic-collective/borgmatic/raw/branch/master/sample/systemd/borgmatic.timer). file](https://projects.torsion.org/borgmatic-collective/borgmatic/raw/branch/main/sample/systemd/borgmatic.timer).
Then, from the directory where you downloaded them: Then, from the directory where you downloaded them:

View File

@ -2,8 +2,10 @@
set -e set -e
docker build --tag borgmatic-docs --build-arg ENVIRONMENT=dev --file docs/Dockerfile . USER_PODMAN_SOCKET_PATH=/run/user/$UID/podman/podman.sock
echo
echo "You can view dev docs at http://localhost:8080" if [ -e "$USER_PODMAN_SOCKET_PATH" ]; then
echo export DOCKER_HOST="unix://$USER_PODMAN_SOCKET_PATH"
docker run --interactive --tty --publish 8080:80 --rm borgmatic-docs fi
docker-compose --file docs/docker-compose.yaml up --build --force-recreate

View File

@ -4,22 +4,23 @@ description_file=README.md
[tool:pytest] [tool:pytest]
testpaths = tests testpaths = tests
addopts = --cov-report term-missing:skip-covered --cov=borgmatic --ignore=tests/end-to-end addopts = --cov-report term-missing:skip-covered --cov=borgmatic --ignore=tests/end-to-end
filterwarnings =
ignore:Deprecated call to `pkg_resources.declare_namespace\('ruamel'\)`.*:DeprecationWarning
[flake8] [flake8]
ignore = E501,W503 max-line-length = 100
extend-ignore = E203,E501,W503
exclude = *.*/* exclude = *.*/*
multiline-quotes = ''' multiline-quotes = '''
docstring-quotes = ''' docstring-quotes = '''
[tool:isort] [tool:isort]
force_single_line = False profile=black
include_trailing_comma = True
known_first_party = borgmatic known_first_party = borgmatic
line_length = 100 line_length = 100
multi_line_output = 3
skip = .tox skip = .tox
[codespell] [codespell]
skip = .git,.tox,build skip = .git,.tox,build
[pycodestyle]
ignore = E203
max_line_length = 100

View File

@ -1,6 +1,6 @@
from setuptools import find_packages, setup from setuptools import find_packages, setup
VERSION = '1.7.12.dev0' VERSION = '1.7.13.dev0'
setup( setup(
@ -32,6 +32,7 @@ setup(
install_requires=( install_requires=(
'colorama>=0.4.1,<0.5', 'colorama>=0.4.1,<0.5',
'jsonschema', 'jsonschema',
'packaging',
'requests', 'requests',
'ruamel.yaml>0.15.0,<0.18.0', 'ruamel.yaml>0.15.0,<0.18.0',
'setuptools', 'setuptools',

View File

@ -1,27 +1,33 @@
appdirs==1.4.4; python_version >= '3.8' appdirs==1.4.4; python_version >= '3.8'
attrs==20.3.0; python_version >= '3.8' attrs==22.2.0; python_version >= '3.8'
black==19.10b0; python_version >= '3.8' black==23.3.0; python_version >= '3.8'
click==7.1.2; python_version >= '3.8' chardet==5.1.0
click==8.1.3; python_version >= '3.8'
codespell==2.2.4 codespell==2.2.4
colorama==0.4.4 colorama==0.4.6
coverage==5.3 coverage==7.2.3
flake8==4.0.1 flake8==6.0.0
flake8-quotes==3.3.2 flake8-quotes==3.3.2
flake8-use-fstring==1.4 flake8-use-fstring==1.4
flake8-variables-names==0.0.5 flake8-variables-names==0.0.5
flexmock==0.10.4 flexmock==0.11.3
isort==5.9.1 idna==3.4
mccabe==0.6.1 importlib_metadata==6.3.0; python_version < '3.8'
pluggy==0.13.1 isort==5.12.0
pathspec==0.8.1; python_version >= '3.8' mccabe==0.7.0
py==1.10.0 packaging==23.1
pycodestyle==2.8.0 pluggy==1.0.0
pyflakes==2.4.0 pathspec==0.11.1; python_version >= '3.8'
jsonschema==3.2.0 py==1.11.0
pytest==7.2.0 pycodestyle==2.10.0
pyflakes==3.0.1
jsonschema==4.17.3
pytest==7.3.0
pytest-cov==4.0.0 pytest-cov==4.0.0
regex; python_version >= '3.8' regex; python_version >= '3.8'
requests==2.25.0 requests==2.28.2
ruamel.yaml>0.15.0,<0.18.0 ruamel.yaml>0.15.0,<0.18.0
toml==0.10.2; python_version >= '3.8' toml==0.10.2; python_version >= '3.8'
typed-ast; python_version >= '3.8' typed-ast; python_version >= '3.8'
typing-extensions==4.5.0; python_version < '3.8'
zipp==3.15.0; python_version < '3.8'

View File

@ -23,6 +23,7 @@ services:
- "../..:/app:ro" - "../..:/app:ro"
tmpfs: tmpfs:
- "/app/borgmatic.egg-info" - "/app/borgmatic.egg-info"
- "/app/build"
tty: true tty: true
working_dir: /app working_dir: /app
entrypoint: /app/scripts/run-full-tests entrypoint: /app/scripts/run-full-tests

View File

@ -118,7 +118,7 @@ def test_database_dump_and_restore():
# Restore the database from the archive. # Restore the database from the archive.
subprocess.check_call( subprocess.check_call(
['borgmatic', '--config', config_path, 'restore', '--archive', archive_name] ['borgmatic', '-v', '2', '--config', config_path, 'restore', '--archive', archive_name]
) )
finally: finally:
os.chdir(original_working_directory) os.chdir(original_working_directory)

View File

@ -1,6 +1,6 @@
import copy import copy
import flexmock from flexmock import flexmock
import borgmatic.borg.info import borgmatic.borg.info
import borgmatic.borg.list import borgmatic.borg.list

View File

@ -211,7 +211,7 @@ def test_load_configuration_with_retain_tag_but_without_merge_include_raises():
builtins.should_receive('open').with_args('config.yaml').and_return(config_file) builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
with pytest.raises(ValueError): with pytest.raises(ValueError):
assert module.load_configuration('config.yaml') module.load_configuration('config.yaml')
def test_load_configuration_with_retain_tag_on_scalar_raises(): def test_load_configuration_with_retain_tag_on_scalar_raises():
@ -239,7 +239,156 @@ def test_load_configuration_with_retain_tag_on_scalar_raises():
builtins.should_receive('open').with_args('config.yaml').and_return(config_file) builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
with pytest.raises(ValueError): with pytest.raises(ValueError):
assert module.load_configuration('config.yaml') module.load_configuration('config.yaml')
def test_load_configuration_with_omit_tag_merges_include_and_omits_requested_values():
builtins = flexmock(sys.modules['builtins'])
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
flexmock(module.os.path).should_receive('isabs').and_return(False)
flexmock(module.os.path).should_receive('exists').and_return(True)
include_file = io.StringIO(
'''
stuff:
- a
- b
- c
'''
)
include_file.name = 'include.yaml'
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
config_file = io.StringIO(
'''
stuff:
- x
- !omit b
- y
<<: !include include.yaml
'''
)
config_file.name = 'config.yaml'
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
assert module.load_configuration('config.yaml') == {'stuff': ['a', 'c', 'x', 'y']}
def test_load_configuration_with_omit_tag_on_unknown_value_merges_include_and_does_not_raise():
builtins = flexmock(sys.modules['builtins'])
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
flexmock(module.os.path).should_receive('isabs').and_return(False)
flexmock(module.os.path).should_receive('exists').and_return(True)
include_file = io.StringIO(
'''
stuff:
- a
- b
- c
'''
)
include_file.name = 'include.yaml'
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
config_file = io.StringIO(
'''
stuff:
- x
- !omit q
- y
<<: !include include.yaml
'''
)
config_file.name = 'config.yaml'
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
assert module.load_configuration('config.yaml') == {'stuff': ['a', 'b', 'c', 'x', 'y']}
def test_load_configuration_with_omit_tag_on_non_list_item_raises():
builtins = flexmock(sys.modules['builtins'])
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
flexmock(module.os.path).should_receive('isabs').and_return(False)
flexmock(module.os.path).should_receive('exists').and_return(True)
include_file = io.StringIO(
'''
stuff:
- a
- b
- c
'''
)
include_file.name = 'include.yaml'
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
config_file = io.StringIO(
'''
stuff: !omit
- x
- y
<<: !include include.yaml
'''
)
config_file.name = 'config.yaml'
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
with pytest.raises(ValueError):
module.load_configuration('config.yaml')
def test_load_configuration_with_omit_tag_on_non_scalar_list_item_raises():
builtins = flexmock(sys.modules['builtins'])
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
flexmock(module.os.path).should_receive('isabs').and_return(False)
flexmock(module.os.path).should_receive('exists').and_return(True)
include_file = io.StringIO(
'''
stuff:
- foo: bar
baz: quux
'''
)
include_file.name = 'include.yaml'
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
config_file = io.StringIO(
'''
stuff:
- !omit foo: bar
baz: quux
<<: !include include.yaml
'''
)
config_file.name = 'config.yaml'
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
with pytest.raises(ValueError):
module.load_configuration('config.yaml')
def test_load_configuration_with_omit_tag_but_without_merge_raises():
builtins = flexmock(sys.modules['builtins'])
flexmock(module.os).should_receive('getcwd').and_return('/tmp')
flexmock(module.os.path).should_receive('isabs').and_return(False)
flexmock(module.os.path).should_receive('exists').and_return(True)
include_file = io.StringIO(
'''
stuff:
- a
- !omit b
- c
'''
)
include_file.name = 'include.yaml'
builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file)
config_file = io.StringIO(
'''
stuff:
- x
- y
<<: !include include.yaml
'''
)
config_file.name = 'config.yaml'
builtins.should_receive('open').with_args('config.yaml').and_return(config_file)
with pytest.raises(ValueError):
module.load_configuration('config.yaml')
def test_load_configuration_does_not_merge_include_list(): def test_load_configuration_does_not_merge_include_list():
@ -277,13 +426,33 @@ def test_load_configuration_does_not_merge_include_list():
module.ruamel.yaml.nodes.ScalarNode, module.ruamel.yaml.nodes.ScalarNode,
), ),
) )
def test_retain_node_error_raises(node_class): def test_raise_retain_node_error_raises(node_class):
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.retain_node_error( module.raise_retain_node_error(
loader=flexmock(), node=node_class(tag=flexmock(), value=flexmock()) loader=flexmock(), node=node_class(tag=flexmock(), value=flexmock())
) )
def test_raise_omit_node_error_raises():
with pytest.raises(ValueError):
module.raise_omit_node_error(loader=flexmock(), node=flexmock())
def test_filter_omitted_nodes():
nodes = [
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='a'),
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='b'),
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='c'),
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='a'),
module.ruamel.yaml.nodes.ScalarNode(tag='!omit', value='b'),
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='c'),
]
result = module.filter_omitted_nodes(nodes)
assert [item.value for item in result] == ['a', 'c', 'a', 'c']
def test_deep_merge_nodes_replaces_colliding_scalar_values(): def test_deep_merge_nodes_replaces_colliding_scalar_values():
node_values = [ node_values = [
( (
@ -483,7 +652,15 @@ def test_deep_merge_nodes_appends_colliding_sequence_values():
tag='tag:yaml.org,2002:str', value='before_backup' tag='tag:yaml.org,2002:str', value='before_backup'
), ),
module.ruamel.yaml.nodes.SequenceNode( module.ruamel.yaml.nodes.SequenceNode(
tag='tag:yaml.org,2002:seq', value=['echo 1', 'echo 2'] tag='tag:yaml.org,2002:seq',
value=[
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 1'
),
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 2'
),
],
), ),
), ),
], ],
@ -499,7 +676,15 @@ def test_deep_merge_nodes_appends_colliding_sequence_values():
tag='tag:yaml.org,2002:str', value='before_backup' tag='tag:yaml.org,2002:str', value='before_backup'
), ),
module.ruamel.yaml.nodes.SequenceNode( module.ruamel.yaml.nodes.SequenceNode(
tag='tag:yaml.org,2002:seq', value=['echo 3', 'echo 4'] tag='tag:yaml.org,2002:seq',
value=[
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 3'
),
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 4'
),
],
), ),
), ),
], ],
@ -514,10 +699,10 @@ def test_deep_merge_nodes_appends_colliding_sequence_values():
options = section_value.value options = section_value.value
assert len(options) == 1 assert len(options) == 1
assert options[0][0].value == 'before_backup' assert options[0][0].value == 'before_backup'
assert options[0][1].value == ['echo 1', 'echo 2', 'echo 3', 'echo 4'] assert [item.value for item in options[0][1].value] == ['echo 1', 'echo 2', 'echo 3', 'echo 4']
def test_deep_merge_nodes_keeps_mapping_values_tagged_with_retain(): def test_deep_merge_nodes_only_keeps_mapping_values_tagged_with_retain():
node_values = [ node_values = [
( (
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'), module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'),
@ -568,7 +753,7 @@ def test_deep_merge_nodes_keeps_mapping_values_tagged_with_retain():
assert options[0][1].value == '5' assert options[0][1].value == '5'
def test_deep_merge_nodes_keeps_sequence_values_tagged_with_retain(): def test_deep_merge_nodes_only_keeps_sequence_values_tagged_with_retain():
node_values = [ node_values = [
( (
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'), module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
@ -580,7 +765,15 @@ def test_deep_merge_nodes_keeps_sequence_values_tagged_with_retain():
tag='tag:yaml.org,2002:str', value='before_backup' tag='tag:yaml.org,2002:str', value='before_backup'
), ),
module.ruamel.yaml.nodes.SequenceNode( module.ruamel.yaml.nodes.SequenceNode(
tag='tag:yaml.org,2002:seq', value=['echo 1', 'echo 2'] tag='tag:yaml.org,2002:seq',
value=[
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 1'
),
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 2'
),
],
), ),
), ),
], ],
@ -596,7 +789,15 @@ def test_deep_merge_nodes_keeps_sequence_values_tagged_with_retain():
tag='tag:yaml.org,2002:str', value='before_backup' tag='tag:yaml.org,2002:str', value='before_backup'
), ),
module.ruamel.yaml.nodes.SequenceNode( module.ruamel.yaml.nodes.SequenceNode(
tag='!retain', value=['echo 3', 'echo 4'] tag='!retain',
value=[
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 3'
),
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 4'
),
],
), ),
), ),
], ],
@ -612,4 +813,64 @@ def test_deep_merge_nodes_keeps_sequence_values_tagged_with_retain():
assert len(options) == 1 assert len(options) == 1
assert options[0][0].value == 'before_backup' assert options[0][0].value == 'before_backup'
assert options[0][1].tag == 'tag:yaml.org,2002:seq' assert options[0][1].tag == 'tag:yaml.org,2002:seq'
assert options[0][1].value == ['echo 3', 'echo 4'] assert [item.value for item in options[0][1].value] == ['echo 3', 'echo 4']
def test_deep_merge_nodes_skips_sequence_values_tagged_with_omit():
node_values = [
(
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
module.ruamel.yaml.nodes.MappingNode(
tag='tag:yaml.org,2002:map',
value=[
(
module.ruamel.yaml.nodes.ScalarNode(
tag='tag:yaml.org,2002:str', value='before_backup'
),
module.ruamel.yaml.nodes.SequenceNode(
tag='tag:yaml.org,2002:seq',
value=[
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 1'
),
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 2'
),
],
),
),
],
),
),
(
module.ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'),
module.ruamel.yaml.nodes.MappingNode(
tag='tag:yaml.org,2002:map',
value=[
(
module.ruamel.yaml.nodes.ScalarNode(
tag='tag:yaml.org,2002:str', value='before_backup'
),
module.ruamel.yaml.nodes.SequenceNode(
tag='tag:yaml.org,2002:seq',
value=[
module.ruamel.yaml.ScalarNode(tag='!omit', value='echo 2'),
module.ruamel.yaml.ScalarNode(
tag='tag:yaml.org,2002:str', value='echo 3'
),
],
),
),
],
),
),
]
result = module.deep_merge_nodes(node_values)
assert len(result) == 1
(section_key, section_value) = result[0]
assert section_key.value == 'hooks'
options = section_value.value
assert len(options) == 1
assert options[0][0].value == 'before_backup'
assert [item.value for item in options[0][1].value] == ['echo 1', 'echo 3']

View File

@ -12,7 +12,11 @@ def test_run_check_calls_hooks_for_configured_repository():
flexmock(module.borgmatic.borg.check).should_receive('check_archives').once() flexmock(module.borgmatic.borg.check).should_receive('check_archives').once()
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
check_arguments = flexmock( check_arguments = flexmock(
repository=None, progress=flexmock(), repair=flexmock(), only=flexmock(), force=flexmock(), repository=None,
progress=flexmock(),
repair=flexmock(),
only=flexmock(),
force=flexmock(),
) )
global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False)

View File

@ -148,7 +148,8 @@ def test_find_databases_to_restore_without_requested_names_finds_all_archive_dat
archive_database_names = {'postresql_databases': ['foo', 'bar']} archive_database_names = {'postresql_databases': ['foo', 'bar']}
restore_names = module.find_databases_to_restore( restore_names = module.find_databases_to_restore(
requested_database_names=[], archive_database_names=archive_database_names, requested_database_names=[],
archive_database_names=archive_database_names,
) )
assert restore_names == archive_database_names assert restore_names == archive_database_names
@ -158,7 +159,8 @@ def test_find_databases_to_restore_with_all_in_requested_names_finds_all_archive
archive_database_names = {'postresql_databases': ['foo', 'bar']} archive_database_names = {'postresql_databases': ['foo', 'bar']}
restore_names = module.find_databases_to_restore( restore_names = module.find_databases_to_restore(
requested_database_names=['all'], archive_database_names=archive_database_names, requested_database_names=['all'],
archive_database_names=archive_database_names,
) )
assert restore_names == archive_database_names assert restore_names == archive_database_names
@ -194,7 +196,9 @@ def test_ensure_databases_found_with_all_databases_found_does_not_raise():
def test_ensure_databases_found_with_no_databases_raises(): def test_ensure_databases_found_with_no_databases_raises():
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.ensure_databases_found( module.ensure_databases_found(
restore_names={'postgresql_databases': []}, remaining_restore_names={}, found_names=[], restore_names={'postgresql_databases': []},
remaining_restore_names={},
found_names=[],
) )
@ -233,7 +237,7 @@ def test_run_restore_restores_each_database():
remote_path=object, remote_path=object,
archive_name=object, archive_name=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database={'name': 'foo'}, database={'name': 'foo', 'schemas': None},
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
@ -246,7 +250,7 @@ def test_run_restore_restores_each_database():
remote_path=object, remote_path=object,
archive_name=object, archive_name=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database={'name': 'bar'}, database={'name': 'bar', 'schemas': None},
).once() ).once()
flexmock(module).should_receive('ensure_databases_found') flexmock(module).should_receive('ensure_databases_found')
@ -256,7 +260,9 @@ def test_run_restore_restores_each_database():
storage=flexmock(), storage=flexmock(),
hooks=flexmock(), hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), restore_arguments=flexmock(
repository='repo', archive='archive', databases=flexmock(), schemas=None
),
global_arguments=flexmock(dry_run=False), global_arguments=flexmock(dry_run=False),
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
@ -327,7 +333,7 @@ def test_run_restore_restores_database_configured_with_all_name():
remote_path=object, remote_path=object,
archive_name=object, archive_name=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database={'name': 'foo'}, database={'name': 'foo', 'schemas': None},
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
@ -340,7 +346,7 @@ def test_run_restore_restores_database_configured_with_all_name():
remote_path=object, remote_path=object,
archive_name=object, archive_name=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database={'name': 'bar'}, database={'name': 'bar', 'schemas': None},
).once() ).once()
flexmock(module).should_receive('ensure_databases_found') flexmock(module).should_receive('ensure_databases_found')
@ -350,7 +356,9 @@ def test_run_restore_restores_database_configured_with_all_name():
storage=flexmock(), storage=flexmock(),
hooks=flexmock(), hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), restore_arguments=flexmock(
repository='repo', archive='archive', databases=flexmock(), schemas=None
),
global_arguments=flexmock(dry_run=False), global_arguments=flexmock(dry_run=False),
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
@ -399,7 +407,7 @@ def test_run_restore_skips_missing_database():
remote_path=object, remote_path=object,
archive_name=object, archive_name=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database={'name': 'foo'}, database={'name': 'foo', 'schemas': None},
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
@ -412,7 +420,7 @@ def test_run_restore_skips_missing_database():
remote_path=object, remote_path=object,
archive_name=object, archive_name=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database={'name': 'bar'}, database={'name': 'bar', 'schemas': None},
).never() ).never()
flexmock(module).should_receive('ensure_databases_found') flexmock(module).should_receive('ensure_databases_found')
@ -422,7 +430,9 @@ def test_run_restore_skips_missing_database():
storage=flexmock(), storage=flexmock(),
hooks=flexmock(), hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), restore_arguments=flexmock(
repository='repo', archive='archive', databases=flexmock(), schemas=None
),
global_arguments=flexmock(dry_run=False), global_arguments=flexmock(dry_run=False),
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
@ -465,7 +475,7 @@ def test_run_restore_restores_databases_from_different_hooks():
remote_path=object, remote_path=object,
archive_name=object, archive_name=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database={'name': 'foo'}, database={'name': 'foo', 'schemas': None},
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
@ -478,7 +488,7 @@ def test_run_restore_restores_databases_from_different_hooks():
remote_path=object, remote_path=object,
archive_name=object, archive_name=object,
hook_name='mysql_databases', hook_name='mysql_databases',
database={'name': 'bar'}, database={'name': 'bar', 'schemas': None},
).once() ).once()
flexmock(module).should_receive('ensure_databases_found') flexmock(module).should_receive('ensure_databases_found')
@ -488,7 +498,9 @@ def test_run_restore_restores_databases_from_different_hooks():
storage=flexmock(), storage=flexmock(),
hooks=flexmock(), hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), restore_arguments=flexmock(
repository='repo', archive='archive', databases=flexmock(), schemas=None
),
global_arguments=flexmock(dry_run=False), global_arguments=flexmock(dry_run=False),
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),

View File

@ -222,7 +222,10 @@ def test_run_arbitrary_borg_without_borg_specific_parameters_does_not_raise():
) )
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', storage_config={}, local_borg_version='1.2.3', options=[], repository_path='repo',
storage_config={},
local_borg_version='1.2.3',
options=[],
) )

View File

@ -10,7 +10,9 @@ from ..test_verbosity import insert_logging_mock
def insert_execute_command_mock(command): def insert_execute_command_mock(command):
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
command, borg_local_path='borg', extra_environment=None, command,
borg_local_path='borg',
extra_environment=None,
).once() ).once()
@ -19,7 +21,9 @@ def test_break_lock_calls_borg_with_required_flags():
insert_execute_command_mock(('borg', 'break-lock', 'repo')) insert_execute_command_mock(('borg', 'break-lock', 'repo'))
module.break_lock( module.break_lock(
repository_path='repo', storage_config={}, local_borg_version='1.2.3', repository_path='repo',
storage_config={},
local_borg_version='1.2.3',
) )
@ -28,7 +32,10 @@ def test_break_lock_calls_borg_with_remote_path_flags():
insert_execute_command_mock(('borg', 'break-lock', '--remote-path', 'borg1', 'repo')) insert_execute_command_mock(('borg', 'break-lock', '--remote-path', 'borg1', 'repo'))
module.break_lock( module.break_lock(
repository_path='repo', storage_config={}, local_borg_version='1.2.3', remote_path='borg1', repository_path='repo',
storage_config={},
local_borg_version='1.2.3',
remote_path='borg1',
) )
@ -37,7 +44,9 @@ def test_break_lock_calls_borg_with_umask_flags():
insert_execute_command_mock(('borg', 'break-lock', '--umask', '0770', 'repo')) insert_execute_command_mock(('borg', 'break-lock', '--umask', '0770', 'repo'))
module.break_lock( module.break_lock(
repository_path='repo', storage_config={'umask': '0770'}, local_borg_version='1.2.3', repository_path='repo',
storage_config={'umask': '0770'},
local_borg_version='1.2.3',
) )
@ -46,7 +55,9 @@ def test_break_lock_calls_borg_with_lock_wait_flags():
insert_execute_command_mock(('borg', 'break-lock', '--lock-wait', '5', 'repo')) insert_execute_command_mock(('borg', 'break-lock', '--lock-wait', '5', 'repo'))
module.break_lock( module.break_lock(
repository_path='repo', storage_config={'lock_wait': '5'}, local_borg_version='1.2.3', repository_path='repo',
storage_config={'lock_wait': '5'},
local_borg_version='1.2.3',
) )
@ -56,7 +67,9 @@ def test_break_lock_with_log_info_calls_borg_with_info_parameter():
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
module.break_lock( module.break_lock(
repository_path='repo', storage_config={}, local_borg_version='1.2.3', repository_path='repo',
storage_config={},
local_borg_version='1.2.3',
) )
@ -66,5 +79,7 @@ def test_break_lock_with_log_debug_calls_borg_with_debug_flags():
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
module.break_lock( module.break_lock(
repository_path='repo', storage_config={}, local_borg_version='1.2.3', repository_path='repo',
storage_config={},
local_borg_version='1.2.3',
) )

View File

@ -79,7 +79,12 @@ def test_parse_frequency_parses_into_timedeltas(frequency, expected_result):
@pytest.mark.parametrize( @pytest.mark.parametrize(
'frequency', ('sometime', 'x days', '3 decades',), 'frequency',
(
'sometime',
'x days',
'3 decades',
),
) )
def test_parse_frequency_raises_on_parse_error(frequency): def test_parse_frequency_raises_on_parse_error(frequency):
with pytest.raises(ValueError): with pytest.raises(ValueError):
@ -211,7 +216,10 @@ def test_make_check_flags_with_data_check_returns_flag_and_implies_archives():
flags = module.make_check_flags('1.2.3', {}, ('data',)) flags = module.make_check_flags('1.2.3', {}, ('data',))
assert flags == ('--archives-only', '--verify-data',) assert flags == (
'--archives-only',
'--verify-data',
)
def test_make_check_flags_with_extract_omits_extract_flag(): def test_make_check_flags_with_extract_omits_extract_flag():
@ -227,7 +235,14 @@ def test_make_check_flags_with_repository_and_data_checks_does_not_return_reposi
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
flags = module.make_check_flags('1.2.3', {}, ('repository', 'data',)) flags = module.make_check_flags(
'1.2.3',
{},
(
'repository',
'data',
),
)
assert flags == ('--verify-data',) assert flags == ('--verify-data',)
@ -236,7 +251,12 @@ def test_make_check_flags_with_default_checks_and_prefix_returns_default_flags()
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
flags = module.make_check_flags('1.2.3', {}, ('repository', 'archives'), prefix='foo',) flags = module.make_check_flags(
'1.2.3',
{},
('repository', 'archives'),
prefix='foo',
)
assert flags == ('--match-archives', 'sh:foo*') assert flags == ('--match-archives', 'sh:foo*')
@ -246,7 +266,10 @@ def test_make_check_flags_with_all_checks_and_prefix_returns_default_flags():
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
flags = module.make_check_flags( flags = module.make_check_flags(
'1.2.3', {}, ('repository', 'archives', 'extract'), prefix='foo', '1.2.3',
{},
('repository', 'archives', 'extract'),
prefix='foo',
) )
assert flags == ('--match-archives', 'sh:foo*') assert flags == ('--match-archives', 'sh:foo*')
@ -257,7 +280,10 @@ def test_make_check_flags_with_all_checks_and_prefix_without_borg_features_retur
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
flags = module.make_check_flags( flags = module.make_check_flags(
'1.2.3', {}, ('repository', 'archives', 'extract'), prefix='foo', '1.2.3',
{},
('repository', 'archives', 'extract'),
prefix='foo',
) )
assert flags == ('--glob-archives', 'foo*') assert flags == ('--glob-archives', 'foo*')
@ -447,7 +473,11 @@ def test_check_archives_calls_borg_with_parameters(checks):
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
flexmock(module).should_receive('make_check_flags').with_args( flexmock(module).should_receive('make_check_flags').with_args(
'1.2.3', {}, checks, check_last, prefix=None, '1.2.3',
{},
checks,
check_last,
prefix=None,
).and_return(()) ).and_return(())
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
insert_execute_command_mock(('borg', 'check', 'repo')) insert_execute_command_mock(('borg', 'check', 'repo'))
@ -601,7 +631,11 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
flexmock(module).should_receive('make_check_flags').with_args( flexmock(module).should_receive('make_check_flags').with_args(
'1.2.3', {}, checks, check_last, prefix=None, '1.2.3',
{},
checks,
check_last,
prefix=None,
).and_return(()) ).and_return(())
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
insert_execute_command_mock(('borg1', 'check', 'repo')) insert_execute_command_mock(('borg1', 'check', 'repo'))
@ -628,7 +662,11 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
flexmock(module).should_receive('make_check_flags').with_args( flexmock(module).should_receive('make_check_flags').with_args(
'1.2.3', {}, checks, check_last, prefix=None, '1.2.3',
{},
checks,
check_last,
prefix=None,
).and_return(()) ).and_return(())
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
insert_execute_command_mock(('borg', 'check', '--remote-path', 'borg1', 'repo')) insert_execute_command_mock(('borg', 'check', '--remote-path', 'borg1', 'repo'))
@ -656,7 +694,11 @@ def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
flexmock(module).should_receive('make_check_flags').with_args( flexmock(module).should_receive('make_check_flags').with_args(
'1.2.3', storage_config, checks, check_last, None, '1.2.3',
storage_config,
checks,
check_last,
None,
).and_return(()) ).and_return(())
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
insert_execute_command_mock(('borg', 'check', '--lock-wait', '5', 'repo')) insert_execute_command_mock(('borg', 'check', '--lock-wait', '5', 'repo'))

View File

@ -1053,7 +1053,8 @@ def test_create_archive_with_compression_calls_borg_with_compression_parameters(
@pytest.mark.parametrize( @pytest.mark.parametrize(
'feature_available,option_flag', ((True, '--upload-ratelimit'), (False, '--remote-ratelimit')), 'feature_available,option_flag',
((True, '--upload-ratelimit'), (False, '--remote-ratelimit')),
) )
def test_create_archive_with_upload_rate_limit_calls_borg_with_upload_ratelimit_parameters( def test_create_archive_with_upload_rate_limit_calls_borg_with_upload_ratelimit_parameters(
feature_available, option_flag feature_available, option_flag
@ -1188,7 +1189,8 @@ def test_create_archive_with_one_file_system_calls_borg_with_one_file_system_par
@pytest.mark.parametrize( @pytest.mark.parametrize(
'feature_available,option_flag', ((True, '--numeric-ids'), (False, '--numeric-owner')), 'feature_available,option_flag',
((True, '--numeric-ids'), (False, '--numeric-owner')),
) )
def test_create_archive_with_numeric_ids_calls_borg_with_numeric_ids_parameter( def test_create_archive_with_numeric_ids_calls_borg_with_numeric_ids_parameter(
feature_available, option_flag feature_available, option_flag
@ -1290,7 +1292,12 @@ def test_create_archive_with_read_special_calls_borg_with_read_special_parameter
@pytest.mark.parametrize( @pytest.mark.parametrize(
'option_name,option_value', 'option_name,option_value',
(('ctime', True), ('ctime', False), ('birthtime', True), ('birthtime', False),), (
('ctime', True),
('ctime', False),
('birthtime', True),
('birthtime', False),
),
) )
def test_create_archive_with_basic_option_calls_borg_with_corresponding_parameter( def test_create_archive_with_basic_option_calls_borg_with_corresponding_parameter(
option_name, option_value option_name, option_value
@ -1766,7 +1773,12 @@ def test_create_archive_with_progress_and_log_info_calls_borg_with_progress_para
) )
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--info', '--progress',), ('borg', 'create')
+ REPO_ARCHIVE_WITH_PATHS
+ (
'--info',
'--progress',
),
output_log_level=logging.INFO, output_log_level=logging.INFO,
output_file=module.DO_NOT_CAPTURE, output_file=module.DO_NOT_CAPTURE,
borg_local_path='borg', borg_local_path='borg',
@ -2538,7 +2550,7 @@ def test_create_archive_with_non_existent_directory_and_source_directories_must_
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([])
flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module).should_receive('check_all_source_directories_exist').and_raise(ValueError)
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.create_archive( module.create_archive(
@ -2553,3 +2565,26 @@ def test_create_archive_with_non_existent_directory_and_source_directories_must_
storage_config={}, storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
) )
def test_check_all_source_directories_exist_with_glob_and_tilde_directories():
flexmock(module).should_receive('expand_directory').with_args('foo*').and_return(
('foo', 'food')
)
flexmock(module).should_receive('expand_directory').with_args('~/bar').and_return(
('/root/bar',)
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.os.path).should_receive('exists').with_args('foo').and_return(True)
flexmock(module.os.path).should_receive('exists').with_args('food').and_return(True)
flexmock(module.os.path).should_receive('exists').with_args('/root/bar').and_return(True)
module.check_all_source_directories_exist(['foo*', '~/bar'])
def test_check_all_source_directories_exist_with_non_existent_directory_raises():
flexmock(module).should_receive('expand_directory').with_args('foo').and_return(('foo',))
flexmock(module.os.path).should_receive('exists').and_return(False)
with pytest.raises(ValueError):
module.check_all_source_directories_exist(['foo'])

View File

@ -11,7 +11,9 @@ from ..test_verbosity import insert_logging_mock
def insert_execute_command_mock(command, working_directory=None): def insert_execute_command_mock(command, working_directory=None):
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
command, working_directory=working_directory, extra_environment=None, command,
working_directory=working_directory,
extra_environment=None,
).once() ).once()
@ -152,7 +154,11 @@ def test_extract_archive_calls_borg_with_remote_path_parameters():
@pytest.mark.parametrize( @pytest.mark.parametrize(
'feature_available,option_flag', ((True, '--numeric-ids'), (False, '--numeric-owner'),), 'feature_available,option_flag',
(
(True, '--numeric-ids'),
(False, '--numeric-owner'),
),
) )
def test_extract_archive_calls_borg_with_numeric_ids_parameter(feature_available, option_flag): def test_extract_archive_calls_borg_with_numeric_ids_parameter(feature_available, option_flag):
flexmock(module.os.path).should_receive('abspath').and_return('repo') flexmock(module.os.path).should_receive('abspath').and_return('repo')
@ -441,7 +447,9 @@ def test_extract_archive_skips_abspath_for_remote_repository():
flexmock(module.os.path).should_receive('abspath').never() flexmock(module.os.path).should_receive('abspath').never()
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('borg', 'extract', 'server:repo::archive'), working_directory=None, extra_environment=None, ('borg', 'extract', 'server:repo::archive'),
working_directory=None,
extra_environment=None,
).once() ).once()
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(

View File

@ -70,7 +70,11 @@ def test_make_repository_archive_flags_with_borg_features_separates_repository_a
assert module.make_repository_archive_flags( assert module.make_repository_archive_flags(
repository_path='repo', archive='archive', local_borg_version='1.2.3' repository_path='repo', archive='archive', local_borg_version='1.2.3'
) == ('--repo', 'repo', 'archive',) ) == (
'--repo',
'repo',
'archive',
)
def test_make_repository_archive_flags_with_borg_features_joins_repository_and_archive(): def test_make_repository_archive_flags_with_borg_features_joins_repository_and_archive():
@ -86,9 +90,24 @@ def test_make_repository_archive_flags_with_borg_features_joins_repository_and_a
( (
(None, None, True, ()), (None, None, True, ()),
(None, '', True, ()), (None, '', True, ()),
('re:foo-.*', '{hostname}-{now}', True, ('--match-archives', 're:foo-.*'),), # noqa: FS003 (
('sh:foo-*', '{hostname}-{now}', False, ('--glob-archives', 'foo-*'),), # noqa: FS003 're:foo-.*',
('foo-*', '{hostname}-{now}', False, ('--glob-archives', 'foo-*'),), # noqa: FS003 '{hostname}-{now}',
True,
('--match-archives', 're:foo-.*'),
), # noqa: FS003
(
'sh:foo-*',
'{hostname}-{now}',
False,
('--glob-archives', 'foo-*'),
), # noqa: FS003
(
'foo-*',
'{hostname}-{now}',
False,
('--glob-archives', 'foo-*'),
), # noqa: FS003
( (
None, None,
'{hostname}-docs-{now}', # noqa: FS003 '{hostname}-docs-{now}', # noqa: FS003

View File

@ -69,7 +69,8 @@ def test_display_archives_info_with_log_info_and_json_suppresses_most_borg_outpu
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None, ('borg', 'info', '--json', '--repo', 'repo'),
extra_environment=None,
).and_return('[]') ).and_return('[]')
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
@ -120,7 +121,8 @@ def test_display_archives_info_with_log_debug_and_json_suppresses_most_borg_outp
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None, ('borg', 'info', '--json', '--repo', 'repo'),
extra_environment=None,
).and_return('[]') ).and_return('[]')
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
@ -145,7 +147,8 @@ def test_display_archives_info_with_json_calls_borg_with_json_parameter():
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None, ('borg', 'info', '--json', '--repo', 'repo'),
extra_environment=None,
).and_return('[]') ).and_return('[]')
json_output = module.display_archives_info( json_output = module.display_archives_info(

View File

@ -387,7 +387,8 @@ def test_list_archive_calls_borg_multiple_times_with_find_paths():
flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.feature).should_receive('available').and_return(False)
flexmock(module.rlist).should_receive('make_rlist_command').and_return(('borg', 'list', 'repo')) flexmock(module.rlist).should_receive('make_rlist_command').and_return(('borg', 'list', 'repo'))
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'list', 'repo'), extra_environment=None, ('borg', 'list', 'repo'),
extra_environment=None,
).and_return('archive1\narchive2').once() ).and_return('archive1\narchive2').once()
flexmock(module).should_receive('make_list_command').and_return( flexmock(module).should_receive('make_list_command').and_return(
('borg', 'list', 'repo::archive1') ('borg', 'list', 'repo::archive1')
@ -518,9 +519,18 @@ def test_list_archive_with_borg_features_without_archive_delegates_to_list_repos
@pytest.mark.parametrize( @pytest.mark.parametrize(
'archive_filter_flag', ('prefix', 'match_archives', 'sort_by', 'first', 'last',), 'archive_filter_flag',
(
'prefix',
'match_archives',
'sort_by',
'first',
'last',
),
) )
def test_list_archive_with_archive_ignores_archive_filter_flag(archive_filter_flag,): def test_list_archive_with_archive_ignores_archive_filter_flag(
archive_filter_flag,
):
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module.logger).answer = lambda message: None flexmock(module.logger).answer = lambda message: None
@ -566,7 +576,14 @@ def test_list_archive_with_archive_ignores_archive_filter_flag(archive_filter_fl
@pytest.mark.parametrize( @pytest.mark.parametrize(
'archive_filter_flag', ('prefix', 'match_archives', 'sort_by', 'first', 'last',), 'archive_filter_flag',
(
'prefix',
'match_archives',
'sort_by',
'first',
'last',
),
) )
def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes_it_to_rlist( def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes_it_to_rlist(
archive_filter_flag, archive_filter_flag,
@ -597,7 +614,8 @@ def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes
).and_return(('borg', 'rlist', '--repo', 'repo')) ).and_return(('borg', 'rlist', '--repo', 'repo'))
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'rlist', '--repo', 'repo'), extra_environment=None, ('borg', 'rlist', '--repo', 'repo'),
extra_environment=None,
).and_return('archive1\narchive2').once() ).and_return('archive1\narchive2').once()
flexmock(module).should_receive('make_list_command').with_args( flexmock(module).should_receive('make_list_command').with_args(

View File

@ -10,7 +10,9 @@ from ..test_verbosity import insert_logging_mock
def insert_execute_command_mock(command): def insert_execute_command_mock(command):
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
command, borg_local_path='borg', extra_environment=None, command,
borg_local_path='borg',
extra_environment=None,
).once() ).once()
@ -33,7 +35,12 @@ def test_mount_archive_calls_borg_with_required_flags():
def test_mount_archive_with_borg_features_calls_borg_with_repository_and_match_archives_flags(): def test_mount_archive_with_borg_features_calls_borg_with_repository_and_match_archives_flags():
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
insert_execute_command_mock( insert_execute_command_mock(
('borg', 'mount', '--repo', 'repo', '--match-archives', 'archive', '/mnt') ('borg', 'mount', '--repo', 'repo', '--match-archives', 'archive', '/mnt')
) )

View File

@ -36,7 +36,12 @@ def test_create_repository_calls_borg_with_flags():
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
insert_rcreate_command_mock(RCREATE_COMMAND + ('--repo', 'repo')) insert_rcreate_command_mock(RCREATE_COMMAND + ('--repo', 'repo'))
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -51,7 +56,12 @@ def test_create_repository_with_dry_run_skips_borg_call():
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
flexmock(module).should_receive('execute_command').never() flexmock(module).should_receive('execute_command').never()
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=True, dry_run=True,
@ -65,7 +75,12 @@ def test_create_repository_with_dry_run_skips_borg_call():
def test_create_repository_raises_for_borg_rcreate_error(): def test_create_repository_raises_for_borg_rcreate_error():
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').and_raise( flexmock(module).should_receive('execute_command').and_raise(
module.subprocess.CalledProcessError(2, 'borg rcreate') module.subprocess.CalledProcessError(2, 'borg rcreate')
@ -84,7 +99,12 @@ def test_create_repository_raises_for_borg_rcreate_error():
def test_create_repository_skips_creation_when_repository_already_exists(): def test_create_repository_skips_creation_when_repository_already_exists():
insert_rinfo_command_found_mock() insert_rinfo_command_found_mock()
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -114,7 +134,12 @@ def test_create_repository_with_source_repository_calls_borg_with_other_repo_fla
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
insert_rcreate_command_mock(RCREATE_COMMAND + ('--other-repo', 'other.borg', '--repo', 'repo')) insert_rcreate_command_mock(RCREATE_COMMAND + ('--other-repo', 'other.borg', '--repo', 'repo'))
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -130,7 +155,12 @@ def test_create_repository_with_copy_crypt_key_calls_borg_with_copy_crypt_key_fl
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
insert_rcreate_command_mock(RCREATE_COMMAND + ('--copy-crypt-key', '--repo', 'repo')) insert_rcreate_command_mock(RCREATE_COMMAND + ('--copy-crypt-key', '--repo', 'repo'))
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -146,7 +176,12 @@ def test_create_repository_with_append_only_calls_borg_with_append_only_flag():
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
insert_rcreate_command_mock(RCREATE_COMMAND + ('--append-only', '--repo', 'repo')) insert_rcreate_command_mock(RCREATE_COMMAND + ('--append-only', '--repo', 'repo'))
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -162,7 +197,12 @@ def test_create_repository_with_storage_quota_calls_borg_with_storage_quota_flag
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
insert_rcreate_command_mock(RCREATE_COMMAND + ('--storage-quota', '5G', '--repo', 'repo')) insert_rcreate_command_mock(RCREATE_COMMAND + ('--storage-quota', '5G', '--repo', 'repo'))
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -178,7 +218,12 @@ def test_create_repository_with_make_parent_dirs_calls_borg_with_make_parent_dir
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
insert_rcreate_command_mock(RCREATE_COMMAND + ('--make-parent-dirs', '--repo', 'repo')) insert_rcreate_command_mock(RCREATE_COMMAND + ('--make-parent-dirs', '--repo', 'repo'))
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -195,7 +240,12 @@ def test_create_repository_with_log_info_calls_borg_with_info_flag():
insert_rcreate_command_mock(RCREATE_COMMAND + ('--info', '--repo', 'repo')) insert_rcreate_command_mock(RCREATE_COMMAND + ('--info', '--repo', 'repo'))
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -211,7 +261,12 @@ def test_create_repository_with_log_debug_calls_borg_with_debug_flag():
insert_rcreate_command_mock(RCREATE_COMMAND + ('--debug', '--repo', 'repo')) insert_rcreate_command_mock(RCREATE_COMMAND + ('--debug', '--repo', 'repo'))
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -226,7 +281,12 @@ def test_create_repository_with_local_path_calls_borg_via_local_path():
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
insert_rcreate_command_mock(('borg1',) + RCREATE_COMMAND[1:] + ('--repo', 'repo')) insert_rcreate_command_mock(('borg1',) + RCREATE_COMMAND[1:] + ('--repo', 'repo'))
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -242,7 +302,12 @@ def test_create_repository_with_remote_path_calls_borg_with_remote_path_flag():
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
insert_rcreate_command_mock(RCREATE_COMMAND + ('--remote-path', 'borg1', '--repo', 'repo')) insert_rcreate_command_mock(RCREATE_COMMAND + ('--remote-path', 'borg1', '--repo', 'repo'))
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
@ -258,7 +323,12 @@ def test_create_repository_with_extra_borg_options_calls_borg_with_extra_options
insert_rinfo_command_not_found_mock() insert_rinfo_command_not_found_mock()
insert_rcreate_command_mock(RCREATE_COMMAND + ('--extra', '--options', '--repo', 'repo')) insert_rcreate_command_mock(RCREATE_COMMAND + ('--extra', '--options', '--repo', 'repo'))
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,

View File

@ -11,7 +11,12 @@ def test_display_repository_info_calls_borg_with_parameters():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('borg', 'rinfo', '--repo', 'repo'), ('borg', 'rinfo', '--repo', 'repo'),
@ -53,7 +58,12 @@ def test_display_repository_info_with_log_info_calls_borg_with_info_parameter():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('borg', 'rinfo', '--info', '--repo', 'repo'), ('borg', 'rinfo', '--info', '--repo', 'repo'),
@ -74,10 +84,16 @@ def test_display_repository_info_with_log_info_and_json_suppresses_most_borg_out
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'rinfo', '--json', '--repo', 'repo'), extra_environment=None, ('borg', 'rinfo', '--json', '--repo', 'repo'),
extra_environment=None,
).and_return('[]') ).and_return('[]')
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
@ -95,7 +111,12 @@ def test_display_repository_info_with_log_debug_calls_borg_with_debug_parameter(
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('borg', 'rinfo', '--debug', '--show-rc', '--repo', 'repo'), ('borg', 'rinfo', '--debug', '--show-rc', '--repo', 'repo'),
@ -117,10 +138,16 @@ def test_display_repository_info_with_log_debug_and_json_suppresses_most_borg_ou
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'rinfo', '--json', '--repo', 'repo'), extra_environment=None, ('borg', 'rinfo', '--json', '--repo', 'repo'),
extra_environment=None,
).and_return('[]') ).and_return('[]')
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
@ -138,10 +165,16 @@ def test_display_repository_info_with_json_calls_borg_with_json_parameter():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'rinfo', '--json', '--repo', 'repo'), extra_environment=None, ('borg', 'rinfo', '--json', '--repo', 'repo'),
extra_environment=None,
).and_return('[]') ).and_return('[]')
json_output = module.display_repository_info( json_output = module.display_repository_info(
@ -158,7 +191,12 @@ def test_display_repository_info_with_local_path_calls_borg_via_local_path():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('borg1', 'rinfo', '--repo', 'repo'), ('borg1', 'rinfo', '--repo', 'repo'),
@ -180,7 +218,12 @@ def test_display_repository_info_with_remote_path_calls_borg_with_remote_path_pa
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('borg', 'rinfo', '--remote-path', 'borg1', '--repo', 'repo'), ('borg', 'rinfo', '--remote-path', 'borg1', '--repo', 'repo'),
@ -203,7 +246,12 @@ def test_display_repository_info_with_lock_wait_calls_borg_with_lock_wait_parame
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
storage_config = {'lock_wait': 5} storage_config = {'lock_wait': 5}
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(
(
'--repo',
'repo',
)
)
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('borg', 'rinfo', '--lock-wait', '5', '--repo', 'repo'), ('borg', 'rinfo', '--lock-wait', '5', '--repo', 'repo'),

View File

@ -29,7 +29,8 @@ def test_resolve_archive_name_calls_borg_with_parameters():
expected_archive = 'archive-name' expected_archive = 'archive-name'
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS,
extra_environment=None,
).and_return(expected_archive + '\n') ).and_return(expected_archive + '\n')
assert ( assert (
@ -42,7 +43,8 @@ def test_resolve_archive_name_with_log_info_calls_borg_without_info_parameter():
expected_archive = 'archive-name' expected_archive = 'archive-name'
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS,
extra_environment=None,
).and_return(expected_archive + '\n') ).and_return(expected_archive + '\n')
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
@ -56,7 +58,8 @@ def test_resolve_archive_name_with_log_debug_calls_borg_without_debug_parameter(
expected_archive = 'archive-name' expected_archive = 'archive-name'
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS,
extra_environment=None,
).and_return(expected_archive + '\n') ).and_return(expected_archive + '\n')
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
@ -70,7 +73,8 @@ def test_resolve_archive_name_with_local_path_calls_borg_via_local_path():
expected_archive = 'archive-name' expected_archive = 'archive-name'
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg1', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ('borg1', 'list') + BORG_LIST_LATEST_ARGUMENTS,
extra_environment=None,
).and_return(expected_archive + '\n') ).and_return(expected_archive + '\n')
assert ( assert (
@ -100,7 +104,8 @@ def test_resolve_archive_name_with_remote_path_calls_borg_with_remote_path_param
def test_resolve_archive_name_without_archives_raises(): def test_resolve_archive_name_without_archives_raises():
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS,
extra_environment=None,
).and_return('') ).and_return('')
with pytest.raises(ValueError): with pytest.raises(ValueError):
@ -374,7 +379,15 @@ def test_make_rlist_command_includes_short():
@pytest.mark.parametrize( @pytest.mark.parametrize(
'argument_name', 'argument_name',
('sort_by', 'first', 'last', 'exclude', 'exclude_from', 'pattern', 'patterns_from',), (
'sort_by',
'first',
'last',
'exclude',
'exclude_from',
'pattern',
'patterns_from',
),
) )
def test_make_rlist_command_includes_additional_flags(argument_name): def test_make_rlist_command_includes_additional_flags(argument_name):
flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').and_return(())
@ -411,7 +424,9 @@ def test_make_rlist_command_with_match_archives_calls_borg_with_match_archives_p
None, None, '1.2.3' None, None, '1.2.3'
).and_return(()) ).and_return(())
flexmock(module.flags).should_receive('make_match_archives_flags').with_args( flexmock(module.flags).should_receive('make_match_archives_flags').with_args(
'foo-*', None, '1.2.3', 'foo-*',
None,
'1.2.3',
).and_return(('--match-archives', 'foo-*')) ).and_return(('--match-archives', 'foo-*'))
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))

View File

@ -15,7 +15,8 @@ def insert_execute_command_and_capture_output_mock(
): ):
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
command, extra_environment=None, command,
extra_environment=None,
).once().and_return(version_output) ).once().and_return(version_output)

View File

@ -229,7 +229,8 @@ def test_run_configuration_retries_hard_error():
).and_return([flexmock()]) ).and_return([flexmock()])
error_logs = [flexmock()] error_logs = [flexmock()]
flexmock(module).should_receive('log_error_records').with_args( flexmock(module).should_receive('log_error_records').with_args(
'foo: Error running actions for repository', OSError, 'foo: Error running actions for repository',
OSError,
).and_return(error_logs) ).and_return(error_logs)
config = {'location': {'repositories': [{'path': 'foo'}]}, 'storage': {'retries': 1}} config = {'location': {'repositories': [{'path': 'foo'}]}, 'storage': {'retries': 1}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}

View File

@ -21,13 +21,21 @@ from borgmatic.config import normalize as module
{'location': {'source_directories': ['foo', 'bar']}}, {'location': {'source_directories': ['foo', 'bar']}},
False, False,
), ),
({'location': None}, {'location': None}, False,), (
{'location': None},
{'location': None},
False,
),
( (
{'storage': {'compression': 'yes_please'}}, {'storage': {'compression': 'yes_please'}},
{'storage': {'compression': 'yes_please'}}, {'storage': {'compression': 'yes_please'}},
False, False,
), ),
({'storage': None}, {'storage': None}, False,), (
{'storage': None},
{'storage': None},
False,
),
( (
{'hooks': {'healthchecks': 'https://example.com'}}, {'hooks': {'healthchecks': 'https://example.com'}},
{'hooks': {'healthchecks': {'ping_url': 'https://example.com'}}}, {'hooks': {'healthchecks': {'ping_url': 'https://example.com'}}},
@ -48,10 +56,9 @@ from borgmatic.config import normalize as module
{'hooks': {'cronhub': {'ping_url': 'https://example.com'}}}, {'hooks': {'cronhub': {'ping_url': 'https://example.com'}}},
False, False,
), ),
({'hooks': None}, {'hooks': None}, False,),
( (
{'consistency': {'checks': ['archives']}}, {'hooks': None},
{'consistency': {'checks': [{'name': 'archives'}]}}, {'hooks': None},
False, False,
), ),
( (
@ -59,9 +66,26 @@ from borgmatic.config import normalize as module
{'consistency': {'checks': [{'name': 'archives'}]}}, {'consistency': {'checks': [{'name': 'archives'}]}},
False, False,
), ),
({'consistency': None}, {'consistency': None}, False,), (
({'location': {'numeric_owner': False}}, {'location': {'numeric_ids': False}}, False,), {'consistency': {'checks': ['archives']}},
({'location': {'bsd_flags': False}}, {'location': {'flags': False}}, False,), {'consistency': {'checks': [{'name': 'archives'}]}},
False,
),
(
{'consistency': None},
{'consistency': None},
False,
),
(
{'location': {'numeric_owner': False}},
{'location': {'numeric_ids': False}},
False,
),
(
{'location': {'bsd_flags': False}},
{'location': {'flags': False}},
False,
),
( (
{'storage': {'remote_rate_limit': False}}, {'storage': {'remote_rate_limit': False}},
{'storage': {'upload_rate_limit': False}}, {'storage': {'upload_rate_limit': False}},

View File

@ -4,6 +4,28 @@ from flexmock import flexmock
from borgmatic.config import validate as module from borgmatic.config import validate as module
def test_schema_filename_finds_schema_path():
schema_path = '/var/borgmatic/config/schema.yaml'
flexmock(module.importlib_metadata).should_receive('files').and_return(
flexmock(match=lambda path: False, locate=lambda: None),
flexmock(match=lambda path: True, locate=lambda: schema_path),
flexmock(match=lambda path: False, locate=lambda: None),
)
assert module.schema_filename() == schema_path
def test_schema_filename_with_missing_schema_path_raises():
flexmock(module.importlib_metadata).should_receive('files').and_return(
flexmock(match=lambda path: False, locate=lambda: None),
flexmock(match=lambda path: False, locate=lambda: None),
)
with pytest.raises(FileNotFoundError):
assert module.schema_filename()
def test_format_json_error_path_element_formats_array_index(): def test_format_json_error_path_element_formats_array_index():
module.format_json_error_path_element(3) == '[3]' module.format_json_error_path_element(3) == '[3]'
@ -138,7 +160,6 @@ def test_guard_configuration_contains_repository_does_not_raise_when_repository_
def test_guard_configuration_contains_repository_does_not_raise_when_repository_label_in_config(): def test_guard_configuration_contains_repository_does_not_raise_when_repository_label_in_config():
module.guard_configuration_contains_repository( module.guard_configuration_contains_repository(
repository='repo', repository='repo',
configurations={ configurations={
@ -190,13 +211,15 @@ def test_guard_single_repository_selected_raises_when_multiple_repositories_conf
def test_guard_single_repository_selected_does_not_raise_when_single_repository_configured_and_none_selected(): def test_guard_single_repository_selected_does_not_raise_when_single_repository_configured_and_none_selected():
module.guard_single_repository_selected( module.guard_single_repository_selected(
repository=None, configurations={'config.yaml': {'location': {'repositories': ['repo']}}}, repository=None,
configurations={'config.yaml': {'location': {'repositories': ['repo']}}},
) )
def test_guard_single_repository_selected_does_not_raise_when_no_repositories_configured_and_one_selected(): def test_guard_single_repository_selected_does_not_raise_when_no_repositories_configured_and_one_selected():
module.guard_single_repository_selected( module.guard_single_repository_selected(
repository='repo', configurations={'config.yaml': {'location': {'repositories': []}}}, repository='repo',
configurations={'config.yaml': {'location': {'repositories': []}}},
) )

View File

@ -108,5 +108,9 @@ def test_ping_monitor_with_unsupported_monitoring_state():
hook_config = {'ping_url': 'https://example.com'} hook_config = {'ping_url': 'https://example.com'}
flexmock(module.requests).should_receive('get').never() flexmock(module.requests).should_receive('get').never()
module.ping_monitor( module.ping_monitor(
hook_config, 'config.yaml', module.monitor.State.LOG, monitoring_log_level=1, dry_run=False, hook_config,
'config.yaml',
module.monitor.State.LOG,
monitoring_log_level=1,
dry_run=False,
) )

View File

@ -93,5 +93,9 @@ def test_ping_monitor_with_unsupported_monitoring_state():
hook_config = {'ping_url': 'https://example.com'} hook_config = {'ping_url': 'https://example.com'}
flexmock(module.requests).should_receive('get').never() flexmock(module.requests).should_receive('get').never()
module.ping_monitor( module.ping_monitor(
hook_config, 'config.yaml', module.monitor.State.LOG, monitoring_log_level=1, dry_run=False, hook_config,
'config.yaml',
module.monitor.State.LOG,
monitoring_log_level=1,
dry_run=False,
) )

View File

@ -206,7 +206,9 @@ def test_ping_monitor_with_ping_uuid_hits_corresponding_url():
payload = 'data' payload = 'data'
flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload) flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload)
flexmock(module.requests).should_receive('post').with_args( flexmock(module.requests).should_receive('post').with_args(
f"https://hc-ping.com/{hook_config['ping_url']}", data=payload.encode('utf-8'), verify=True, f"https://hc-ping.com/{hook_config['ping_url']}",
data=payload.encode('utf-8'),
verify=True,
).and_return(flexmock(ok=True)) ).and_return(flexmock(ok=True))
module.ping_monitor( module.ping_monitor(

View File

@ -114,7 +114,8 @@ def test_dump_databases_runs_mongodump_with_directory_format():
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
['mongodump', '--out', 'databases/localhost/foo', '--db', 'foo'], shell=True, ['mongodump', '--out', 'databases/localhost/foo', '--db', 'foo'],
shell=True,
).and_return(flexmock()).once() ).and_return(flexmock()).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [] assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == []
@ -157,7 +158,7 @@ def test_dump_databases_runs_mongodumpall_for_all_databases():
def test_restore_database_dump_runs_mongorestore(): def test_restore_database_dump_runs_mongorestore():
database_config = [{'name': 'foo'}] database_config = [{'name': 'foo', 'schemas': None}]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path') flexmock(module).should_receive('make_dump_path')
@ -189,7 +190,9 @@ def test_restore_database_dump_errors_on_multiple_database_config():
def test_restore_database_dump_runs_mongorestore_with_hostname_and_port(): def test_restore_database_dump_runs_mongorestore_with_hostname_and_port():
database_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] database_config = [
{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433, 'schemas': None}
]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path') flexmock(module).should_receive('make_dump_path')
@ -223,6 +226,7 @@ def test_restore_database_dump_runs_mongorestore_with_username_and_password():
'username': 'mongo', 'username': 'mongo',
'password': 'trustsome1', 'password': 'trustsome1',
'authentication_database': 'admin', 'authentication_database': 'admin',
'schemas': None,
} }
] ]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
@ -254,7 +258,7 @@ def test_restore_database_dump_runs_mongorestore_with_username_and_password():
def test_restore_database_dump_runs_mongorestore_with_options(): def test_restore_database_dump_runs_mongorestore_with_options():
database_config = [{'name': 'foo', 'restore_options': '--harder'}] database_config = [{'name': 'foo', 'restore_options': '--harder', 'schemas': None}]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path') flexmock(module).should_receive('make_dump_path')
@ -271,8 +275,36 @@ def test_restore_database_dump_runs_mongorestore_with_options():
) )
def test_restore_databases_dump_runs_mongorestore_with_schemas():
database_config = [{'name': 'foo', 'schemas': ['bar', 'baz']}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
[
'mongorestore',
'--archive',
'--drop',
'--db',
'foo',
'--nsInclude',
'bar',
'--nsInclude',
'baz',
],
processes=[extract_process],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout,
).once()
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process
)
def test_restore_database_dump_runs_psql_for_all_database_dump(): def test_restore_database_dump_runs_psql_for_all_database_dump():
database_config = [{'name': 'all'}] database_config = [{'name': 'all', 'schemas': None}]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_dump_path') flexmock(module).should_receive('make_dump_path')
@ -290,7 +322,7 @@ def test_restore_database_dump_runs_psql_for_all_database_dump():
def test_restore_database_dump_with_dry_run_skips_restore(): def test_restore_database_dump_with_dry_run_skips_restore():
database_config = [{'name': 'foo'}] database_config = [{'name': 'foo', 'schemas': None}]
flexmock(module).should_receive('make_dump_path') flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module.dump).should_receive('make_database_dump_filename')
@ -302,7 +334,7 @@ def test_restore_database_dump_with_dry_run_skips_restore():
def test_restore_database_dump_without_extract_process_restores_from_disk(): def test_restore_database_dump_without_extract_process_restores_from_disk():
database_config = [{'name': 'foo', 'format': 'directory'}] database_config = [{'name': 'foo', 'format': 'directory', 'schemas': None}]
flexmock(module).should_receive('make_dump_path') flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path') flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path')

View File

@ -149,7 +149,14 @@ def test_execute_dump_command_runs_mysqldump():
flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('mysqldump', '--add-drop-database', '--databases', 'foo', '--result-file', 'dump',), (
'mysqldump',
'--add-drop-database',
'--databases',
'foo',
'--result-file',
'dump',
),
extra_environment=None, extra_environment=None,
run_to_completion=False, run_to_completion=False,
).and_return(process).once() ).and_return(process).once()
@ -175,7 +182,13 @@ def test_execute_dump_command_runs_mysqldump_without_add_drop_database():
flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('mysqldump', '--databases', 'foo', '--result-file', 'dump',), (
'mysqldump',
'--databases',
'foo',
'--result-file',
'dump',
),
extra_environment=None, extra_environment=None,
run_to_completion=False, run_to_completion=False,
).and_return(process).once() ).and_return(process).once()

View File

@ -56,6 +56,7 @@ def test_database_names_to_dump_with_all_and_format_lists_databases_with_hostnam
'psql', 'psql',
'--list', '--list',
'--no-password', '--no-password',
'--no-psqlrc',
'--csv', '--csv',
'--tuples-only', '--tuples-only',
'--host', '--host',
@ -75,7 +76,16 @@ def test_database_names_to_dump_with_all_and_format_lists_databases_with_hostnam
def test_database_names_to_dump_with_all_and_format_lists_databases_with_username(): def test_database_names_to_dump_with_all_and_format_lists_databases_with_username():
database = {'name': 'all', 'format': 'custom', 'username': 'postgres'} database = {'name': 'all', 'format': 'custom', 'username': 'postgres'}
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('psql', '--list', '--no-password', '--csv', '--tuples-only', '--username', 'postgres'), (
'psql',
'--list',
'--no-password',
'--no-psqlrc',
'--csv',
'--tuples-only',
'--username',
'postgres',
),
extra_environment=object, extra_environment=object,
).and_return('foo,test,\nbar,test,"stuff and such"') ).and_return('foo,test,\nbar,test,"stuff and such"')
@ -88,7 +98,7 @@ def test_database_names_to_dump_with_all_and_format_lists_databases_with_usernam
def test_database_names_to_dump_with_all_and_format_lists_databases_with_options(): def test_database_names_to_dump_with_all_and_format_lists_databases_with_options():
database = {'name': 'all', 'format': 'custom', 'list_options': '--harder'} database = {'name': 'all', 'format': 'custom', 'list_options': '--harder'}
flexmock(module).should_receive('execute_command_and_capture_output').with_args( flexmock(module).should_receive('execute_command_and_capture_output').with_args(
('psql', '--list', '--no-password', '--csv', '--tuples-only', '--harder'), ('psql', '--list', '--no-password', '--no-psqlrc', '--csv', '--tuples-only', '--harder'),
extra_environment=object, extra_environment=object,
).and_return('foo,test,\nbar,test,"stuff and such"') ).and_return('foo,test,\nbar,test,"stuff and such"')
@ -109,6 +119,28 @@ def test_database_names_to_dump_with_all_and_format_excludes_particular_database
) )
def test_database_names_to_dump_with_all_and_psql_command_uses_custom_command():
database = {'name': 'all', 'format': 'custom', 'psql_command': 'docker exec mycontainer psql'}
flexmock(module).should_receive('execute_command_and_capture_output').with_args(
(
'docker',
'exec',
'mycontainer',
'psql',
'--list',
'--no-password',
'--no-psqlrc',
'--csv',
'--tuples-only',
),
extra_environment=object,
).and_return('foo,text').once()
assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == (
'foo',
)
def test_dump_databases_runs_pg_dump_for_each_database(): def test_dump_databases_runs_pg_dump_for_each_database():
databases = [{'name': 'foo'}, {'name': 'bar'}] databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()] processes = [flexmock(), flexmock()]
@ -411,7 +443,7 @@ def test_dump_databases_runs_non_default_pg_dump():
def test_restore_database_dump_runs_pg_restore(): def test_restore_database_dump_runs_pg_restore():
database_config = [{'name': 'foo'}] database_config = [{'name': 'foo', 'schemas': None}]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
@ -433,7 +465,16 @@ def test_restore_database_dump_runs_pg_restore():
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('psql', '--no-password', '--quiet', '--dbname', 'foo', '--command', 'ANALYZE'), (
'psql',
'--no-password',
'--no-psqlrc',
'--quiet',
'--dbname',
'foo',
'--command',
'ANALYZE',
),
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
@ -458,7 +499,9 @@ def test_restore_database_dump_errors_on_multiple_database_config():
def test_restore_database_dump_runs_pg_restore_with_hostname_and_port(): def test_restore_database_dump_runs_pg_restore_with_hostname_and_port():
database_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] database_config = [
{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433, 'schemas': None}
]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
@ -487,6 +530,7 @@ def test_restore_database_dump_runs_pg_restore_with_hostname_and_port():
( (
'psql', 'psql',
'--no-password', '--no-password',
'--no-psqlrc',
'--quiet', '--quiet',
'--host', '--host',
'database.example.org', 'database.example.org',
@ -506,7 +550,9 @@ def test_restore_database_dump_runs_pg_restore_with_hostname_and_port():
def test_restore_database_dump_runs_pg_restore_with_username_and_password(): def test_restore_database_dump_runs_pg_restore_with_username_and_password():
database_config = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}] database_config = [
{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1', 'schemas': None}
]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_extra_environment').and_return( flexmock(module).should_receive('make_extra_environment').and_return(
@ -535,6 +581,7 @@ def test_restore_database_dump_runs_pg_restore_with_username_and_password():
( (
'psql', 'psql',
'--no-password', '--no-password',
'--no-psqlrc',
'--quiet', '--quiet',
'--username', '--username',
'postgres', 'postgres',
@ -553,7 +600,12 @@ def test_restore_database_dump_runs_pg_restore_with_username_and_password():
def test_restore_database_dump_runs_pg_restore_with_options(): def test_restore_database_dump_runs_pg_restore_with_options():
database_config = [ database_config = [
{'name': 'foo', 'restore_options': '--harder', 'analyze_options': '--smarter'} {
'name': 'foo',
'restore_options': '--harder',
'analyze_options': '--smarter',
'schemas': None,
}
] ]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
@ -580,6 +632,7 @@ def test_restore_database_dump_runs_pg_restore_with_options():
( (
'psql', 'psql',
'--no-password', '--no-password',
'--no-psqlrc',
'--quiet', '--quiet',
'--dbname', '--dbname',
'foo', 'foo',
@ -596,21 +649,58 @@ def test_restore_database_dump_runs_pg_restore_with_options():
def test_restore_database_dump_runs_psql_for_all_database_dump(): def test_restore_database_dump_runs_psql_for_all_database_dump():
database_config = [{'name': 'all'}] database_config = [{'name': 'all', 'schemas': None}]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path') flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args( flexmock(module).should_receive('execute_command_with_processes').with_args(
('psql', '--no-password'), (
'psql',
'--no-password',
'--no-psqlrc',
),
processes=[extract_process], processes=[extract_process],
output_log_level=logging.DEBUG, output_log_level=logging.DEBUG,
input_file=extract_process.stdout, input_file=extract_process.stdout,
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('psql', '--no-password', '--quiet', '--command', 'ANALYZE'), ('psql', '--no-password', '--no-psqlrc', '--quiet', '--command', 'ANALYZE'),
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process
)
def test_restore_database_dump_runs_psql_for_plain_database_dump():
database_config = [{'name': 'foo', 'format': 'plain', 'schemas': None}]
extract_process = flexmock(stdout=flexmock())
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args(
('psql', '--no-password', '--no-psqlrc', '--dbname', 'foo'),
processes=[extract_process],
output_log_level=logging.DEBUG,
input_file=extract_process.stdout,
extra_environment={'PGSSLMODE': 'disable'},
).once()
flexmock(module).should_receive('execute_command').with_args(
(
'psql',
'--no-password',
'--no-psqlrc',
'--quiet',
'--dbname',
'foo',
'--command',
'ANALYZE',
),
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
@ -621,7 +711,12 @@ def test_restore_database_dump_runs_psql_for_all_database_dump():
def test_restore_database_dump_runs_non_default_pg_restore_and_psql(): def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
database_config = [ database_config = [
{'name': 'foo', 'pg_restore_command': 'special_pg_restore', 'psql_command': 'special_psql'} {
'name': 'foo',
'pg_restore_command': 'docker exec mycontainer pg_restore',
'psql_command': 'docker exec mycontainer psql',
'schemas': None,
}
] ]
extract_process = flexmock(stdout=flexmock()) extract_process = flexmock(stdout=flexmock())
@ -630,7 +725,10 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module.dump).should_receive('make_database_dump_filename')
flexmock(module).should_receive('execute_command_with_processes').with_args( flexmock(module).should_receive('execute_command_with_processes').with_args(
( (
'special_pg_restore', 'docker',
'exec',
'mycontainer',
'pg_restore',
'--no-password', '--no-password',
'--if-exists', '--if-exists',
'--exit-on-error', '--exit-on-error',
@ -644,7 +742,19 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('special_psql', '--no-password', '--quiet', '--dbname', 'foo', '--command', 'ANALYZE'), (
'docker',
'exec',
'mycontainer',
'psql',
'--no-password',
'--no-psqlrc',
'--quiet',
'--dbname',
'foo',
'--command',
'ANALYZE',
),
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
@ -654,7 +764,7 @@ def test_restore_database_dump_runs_non_default_pg_restore_and_psql():
def test_restore_database_dump_with_dry_run_skips_restore(): def test_restore_database_dump_with_dry_run_skips_restore():
database_config = [{'name': 'foo'}] database_config = [{'name': 'foo', 'schemas': None}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path') flexmock(module).should_receive('make_dump_path')
@ -667,7 +777,7 @@ def test_restore_database_dump_with_dry_run_skips_restore():
def test_restore_database_dump_without_extract_process_restores_from_disk(): def test_restore_database_dump_without_extract_process_restores_from_disk():
database_config = [{'name': 'foo'}] database_config = [{'name': 'foo', 'schemas': None}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path') flexmock(module).should_receive('make_dump_path')
@ -689,7 +799,61 @@ def test_restore_database_dump_without_extract_process_restores_from_disk():
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('psql', '--no-password', '--quiet', '--dbname', 'foo', '--command', 'ANALYZE'), (
'psql',
'--no-password',
'--no-psqlrc',
'--quiet',
'--dbname',
'foo',
'--command',
'ANALYZE',
),
extra_environment={'PGSSLMODE': 'disable'},
).once()
module.restore_database_dump(
database_config, 'test.yaml', {}, dry_run=False, extract_process=None
)
def test_restore_database_dump_with_schemas_restores_schemas():
database_config = [{'name': 'foo', 'schemas': ['bar', 'baz']}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path')
flexmock(module).should_receive('execute_command_with_processes').with_args(
(
'pg_restore',
'--no-password',
'--if-exists',
'--exit-on-error',
'--clean',
'--dbname',
'foo',
'/dump/path',
'--schema',
'bar',
'--schema',
'baz',
),
processes=[],
output_log_level=logging.DEBUG,
input_file=None,
extra_environment={'PGSSLMODE': 'disable'},
).once()
flexmock(module).should_receive('execute_command').with_args(
(
'psql',
'--no-password',
'--no-psqlrc',
'--quiet',
'--dbname',
'foo',
'--command',
'ANALYZE',
),
extra_environment={'PGSSLMODE': 'disable'}, extra_environment={'PGSSLMODE': 'disable'},
).once() ).once()

View File

@ -320,7 +320,11 @@ def test_execute_command_and_capture_output_returns_output_with_extra_environmen
expected_output = '[]' expected_output = '[]'
flexmock(module.os, environ={'a': 'b'}) flexmock(module.os, environ={'a': 'b'})
flexmock(module.subprocess).should_receive('check_output').with_args( flexmock(module.subprocess).should_receive('check_output').with_args(
full_command, stderr=None, shell=False, env={'a': 'b', 'c': 'd'}, cwd=None, full_command,
stderr=None,
shell=False,
env={'a': 'b', 'c': 'd'},
cwd=None,
).and_return(flexmock(decode=lambda: expected_output)).once() ).and_return(flexmock(decode=lambda: expected_output)).once()
output = module.execute_command_and_capture_output( output = module.execute_command_and_capture_output(

View File

@ -27,7 +27,9 @@ commands =
pytest {posargs} pytest {posargs}
[testenv:end-to-end] [testenv:end-to-end]
usedevelop = False
deps = -rtest_requirements.txt deps = -rtest_requirements.txt
.
passenv = COVERAGE_FILE passenv = COVERAGE_FILE
commands = commands =
pytest {posargs} --no-cov tests/end-to-end pytest {posargs} --no-cov tests/end-to-end