Merge remote-tracking branch 'upstream/master'

This commit is contained in:
Florian Lindner 2018-08-09 21:10:03 +02:00
commit f9c08470ec
17 changed files with 327 additions and 74 deletions

1
.gitignore vendored
View File

@ -3,6 +3,7 @@
*.swp
.cache
.coverage
.pytest_cache
.tox
build
dist

View File

@ -7,5 +7,5 @@ Michele Lazzeri: Custom archive names
newtonne: Read encryption password from external file
Robin `ypid` Schneider: Support additional options of Borg
Scott Squires: Custom archive names
Thomas LÉVEIL: Support for a keep_minutely prune option
Thomas LÉVEIL: Support for a keep_minutely prune option. Support for the --json option
Nick Whyte: Support prefix filtering for archive consistency checks

9
NEWS
View File

@ -1,5 +1,14 @@
1.2.1.dev0
* Skip before/after backup hooks when only doing --prune, --check, --list, and/or --info.
* #71: Support for XDG_CONFIG_HOME environment variable for specifying alternate user ~/.config/
path.
* #74, #83: Support for Borg --json option via borgmatic command-line to --list archives or show
archive --info in JSON format, ideal for programmatic consumption.
* #38, #76: Upgrade ruamel.yaml compatibility version range and fix support for Python 3.7.
* #77: Skip non-"*.yaml" config filenames in /etc/borgmatic.d/ so as not to parse backup files,
editor swap files, etc.
* #81: Document user-defined hooks run before/after backup, or on error.
* Add code style guidelines to the documention.
1.2.0
* #61: Support for Borg --list option via borgmatic command-line to list all archives.

View File

@ -138,6 +138,31 @@ configuration paths on the command-line with borgmatic's `--config` option.
See `borgmatic --help` for more information.
### Hooks
If you find yourself performing prepraration tasks before your backup runs, or
cleanup work afterwards, borgmatic hooks may be of interest. They're simply
shell commands that borgmatic executes for you at various points, and they're
configured in the `hooks` section of your configuration file.
For instance, you can specify `before_backup` hooks to dump a database to file
before backing it up, and specify `after_backup` hooks to delete the temporary
file afterwards.
borgmatic hooks run once per configuration file. `before_backup` hooks run
prior to backups of all repositories. `after_backup` hooks run afterwards, but
not if an error occurs in a previous hook or in the backups themselves. And
borgmatic runs `on_error` hooks if an error occurs.
An important security note about hooks: borgmatic executes all hook commands
with the user permissions of borgmatic itself. So to prevent potential shell
injection or privilege escalation, do not forget to set secure permissions
(chmod 0700) on borgmatic configuration files and scripts invoked by hooks.
See the sample generated configuration file mentioned above for specifics
about hook configuration syntax.
## Upgrading
In general, all you should need to do to upgrade borgmatic is run the
@ -275,7 +300,7 @@ configure a job runner to invoke it periodically.
### cron
If you're using cron, download the [sample cron
file](https://projects.torsion.org/witten/borgmatic/raw/master/sample/cron/borgmatic).
file](https://projects.torsion.org/witten/borgmatic/src/master/sample/cron/borgmatic).
Then, from the directory where you downloaded it:
```bash
@ -326,6 +351,20 @@ to discuss your idea. We also accept Pull Requests on GitHub, if that's more
your thing. In general, contributions are very welcome. We don't bite!
### Code style
Start with [PEP 8](https://www.python.org/dev/peps/pep-0008/). But then, apply
the following deviations from it:
* For strings, prefer single quotes over double quotes.
* Limit all lines to a maximum of 100 characters.
* Use trailing commas within multiline values or argument lists.
* For multiline constructs, put opening and closing delimeters on lines
separate from their contents.
* Within multiline constructs, use standard four-space indentation. Don't align
indentation with an opening delimeter.
### Development
To get set up to hack on borgmatic, first clone master via HTTPS or SSH:
@ -341,7 +380,7 @@ git clone ssh://git@projects.torsion.org:3022/witten/borgmatic.git
```
Then, install borgmatic
"[editable](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs)"
"[editable](https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs)"
so that you can easily run borgmatic commands while you're hacking on them to
make sure your changes work.

View File

@ -5,7 +5,7 @@ import subprocess
logger = logging.getLogger(__name__)
def display_archives_info(repository, storage_config, local_path='borg', remote_path=None):
def display_archives_info(repository, storage_config, local_path='borg', remote_path=None, json=False):
'''
Given a verbosity flag, a local or remote repository path, and a storage config dict,
display summary information for Borg archives in the repository.
@ -18,7 +18,10 @@ def display_archives_info(repository, storage_config, local_path='borg', remote_
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
+ (('--info',) if logger.isEnabledFor(logging.INFO) else ())
+ (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
+ (('--json',) if json else ())
)
logger.debug(' '.join(full_command))
subprocess.check_call(full_command)
output = subprocess.check_output(full_command)
return output.decode() if output is not None else None

View File

@ -5,7 +5,7 @@ import subprocess
logger = logging.getLogger(__name__)
def list_archives(repository, storage_config, local_path='borg', remote_path=None):
def list_archives(repository, storage_config, local_path='borg', remote_path=None, json=False):
'''
Given a verbosity flag, a local or remote repository path, and a storage config dict,
list Borg archives in the repository.
@ -18,6 +18,9 @@ def list_archives(repository, storage_config, local_path='borg', remote_path=Non
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
+ (('--info',) if logger.isEnabledFor(logging.INFO) else ())
+ (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ())
+ (('--json',) if json else ())
)
logger.debug(' '.join(full_command))
subprocess.check_call(full_command)
output = subprocess.check_output(full_command)
return output.decode() if output is not None else None

View File

@ -1,5 +1,5 @@
from argparse import ArgumentParser
import json
import logging
import os
from subprocess import CalledProcessError
@ -24,6 +24,8 @@ def parse_arguments(*arguments):
Given command-line arguments with which this script was invoked, parse the arguments and return
them as an ArgumentParser instance.
'''
config_paths = collect.get_default_config_paths()
parser = ArgumentParser(
description=
'''
@ -36,8 +38,8 @@ def parse_arguments(*arguments):
'-c', '--config',
nargs='+',
dest='config_paths',
default=collect.DEFAULT_CONFIG_PATHS,
help='Configuration filenames or directories, defaults to: {}'.format(' '.join(collect.DEFAULT_CONFIG_PATHS)),
default=config_paths,
help='Configuration filenames or directories, defaults to: {}'.format(' '.join(config_paths)),
)
parser.add_argument(
'--excludes',
@ -74,6 +76,13 @@ def parse_arguments(*arguments):
action='store_true',
help='Display summary information on archives',
)
parser.add_argument(
'--json',
dest='json',
default=False,
action='store_true',
help='Output results from the --list option as json',
)
parser.add_argument(
'-n', '--dry-run',
dest='dry_run',
@ -88,6 +97,14 @@ def parse_arguments(*arguments):
args = parser.parse_args(arguments)
if args.json and not (args.list or args.info):
raise ValueError('The --json option can only be used with the --list or --info options')
if args.json and args.list and args.info:
raise ValueError(
'With the --json option, options --list and --info cannot be used together'
)
# If any of the action flags are explicitly requested, leave them as-is. Otherwise, assume
# defaults: Mutate the given arguments to enable the default actions.
if args.prune or args.create or args.check or args.list or args.info:
@ -119,54 +136,7 @@ def run_configuration(config_filename, args): # pragma: no cover
if args.create:
hook.execute_hook(hooks.get('before_backup'), config_filename, 'pre-backup')
for unexpanded_repository in location['repositories']:
repository = os.path.expanduser(unexpanded_repository)
dry_run_label = ' (dry run; not making any changes)' if args.dry_run else ''
if args.prune:
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
borg_prune.prune_archives(
args.dry_run,
repository,
storage,
retention,
local_path=local_path,
remote_path=remote_path,
)
if args.create:
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
borg_create.create_archive(
args.dry_run,
repository,
location,
storage,
local_path=local_path,
remote_path=remote_path,
)
if args.check:
logger.info('{}: Running consistency checks'.format(repository))
borg_check.check_archives(
repository,
storage,
consistency,
local_path=local_path,
remote_path=remote_path,
)
if args.list:
logger.info('{}: Listing archives'.format(repository))
borg_list.list_archives(
repository,
storage,
local_path=local_path,
remote_path=remote_path,
)
if args.info:
logger.info('{}: Displaying summary info for archives'.format(repository))
borg_info.display_archives_info(
repository,
storage,
local_path=local_path,
remote_path=remote_path,
)
_run_commands(args, consistency, local_path, location, remote_path, retention, storage)
if args.create:
hook.execute_hook(hooks.get('after_backup'), config_filename, 'post-backup')
@ -175,6 +145,80 @@ def run_configuration(config_filename, args): # pragma: no cover
raise
def _run_commands(args, consistency, local_path, location, remote_path, retention, storage):
json_results = []
for unexpanded_repository in location['repositories']:
_run_commands_on_repository(
args, consistency, json_results, local_path, location, remote_path, retention, storage,
unexpanded_repository,
)
if args.json:
sys.stdout.write(json.dumps(json_results))
def _run_commands_on_repository(
args, consistency, json_results, local_path, location, remote_path,
retention, storage, unexpanded_repository,
): # pragma: no cover
repository = os.path.expanduser(unexpanded_repository)
dry_run_label = ' (dry run; not making any changes)' if args.dry_run else ''
if args.prune:
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
borg_prune.prune_archives(
args.dry_run,
repository,
storage,
retention,
local_path=local_path,
remote_path=remote_path,
)
if args.create:
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
borg_create.create_archive(
args.dry_run,
repository,
location,
storage,
local_path=local_path,
remote_path=remote_path,
)
if args.check:
logger.info('{}: Running consistency checks'.format(repository))
borg_check.check_archives(
repository,
storage,
consistency,
local_path=local_path,
remote_path=remote_path,
)
if args.list:
logger.info('{}: Listing archives'.format(repository))
output = borg_list.list_archives(
repository,
storage,
local_path=local_path,
remote_path=remote_path,
json=args.json,
)
if args.json:
json_results.append(json.loads(output))
else:
sys.stdout.write(output)
if args.info:
logger.info('{}: Displaying summary info for archives'.format(repository))
output = borg_info.display_archives_info(
repository,
storage,
local_path=local_path,
remote_path=remote_path,
json=args.json,
)
if args.json:
json_results.append(json.loads(output))
else:
sys.stdout.write(output)
def main(): # pragma: no cover
try:
configure_signals()

View File

@ -1,24 +1,35 @@
import os
DEFAULT_CONFIG_PATHS = [
'/etc/borgmatic/config.yaml',
'/etc/borgmatic.d',
os.path.expanduser('~/.config/borgmatic/config.yaml'),
]
def get_default_config_paths():
'''
Based on the value of the XDG_CONFIG_HOME and HOME environment variables, return a list of
default configuration paths. This includes both system-wide configuration and configuration in
the current user's home directory.
'''
user_config_directory = (
os.getenv('XDG_CONFIG_HOME') or os.path.expandvars(os.path.join('$HOME', '.config'))
)
return [
'/etc/borgmatic/config.yaml',
'/etc/borgmatic.d',
'%s/borgmatic/config.yaml' % user_config_directory,
]
def collect_config_filenames(config_paths):
'''
Given a sequence of config paths, both filenames and directories, resolve that to just an
iterable of files. Accomplish this by listing any given directories looking for contained config
files. This is non-recursive, so any directories within the given directories are ignored.
files (ending with the ".yaml" extension). This is non-recursive, so any directories within the
given directories are ignored.
Return paths even if they don't exist on disk, so the user can find out about missing
configuration paths. However, skip a default config path if it's missing, so the user doesn't
have to create a default config path unless they need it.
'''
real_default_config_paths = set(map(os.path.realpath, DEFAULT_CONFIG_PATHS))
real_default_config_paths = set(map(os.path.realpath, get_default_config_paths()))
for path in config_paths:
exists = os.path.exists(path)
@ -32,5 +43,5 @@ def collect_config_filenames(config_paths):
for filename in os.listdir(path):
full_filename = os.path.join(path, filename)
if not os.path.isdir(full_filename):
if full_filename.endswith('.yaml') and not os.path.isdir(full_filename):
yield full_filename

View File

@ -10,6 +10,7 @@ from ruamel import yaml
logger = logging.getLogger(__name__)
def schema_filename():
'''
Path to the installed YAML configuration schema file, used to validate and parse the

View File

@ -7,14 +7,20 @@ from borgmatic.commands import borgmatic as module
def test_parse_arguments_with_no_arguments_uses_defaults():
config_paths = ['default']
flexmock(module.collect).should_receive('get_default_config_paths').and_return(config_paths)
parser = module.parse_arguments()
assert parser.config_paths == module.collect.DEFAULT_CONFIG_PATHS
assert parser.config_paths == config_paths
assert parser.excludes_filename == None
assert parser.verbosity is None
assert parser.json is False
def test_parse_arguments_with_path_arguments_overrides_defaults():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
parser = module.parse_arguments('--config', 'myconfig', '--excludes', 'myexcludes')
assert parser.config_paths == ['myconfig']
@ -23,6 +29,8 @@ def test_parse_arguments_with_path_arguments_overrides_defaults():
def test_parse_arguments_with_multiple_config_paths_parses_as_list():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
parser = module.parse_arguments('--config', 'myconfig', 'otherconfig')
assert parser.config_paths == ['myconfig', 'otherconfig']
@ -30,14 +38,24 @@ def test_parse_arguments_with_multiple_config_paths_parses_as_list():
def test_parse_arguments_with_verbosity_flag_overrides_default():
config_paths = ['default']
flexmock(module.collect).should_receive('get_default_config_paths').and_return(config_paths)
parser = module.parse_arguments('--verbosity', '1')
assert parser.config_paths == module.collect.DEFAULT_CONFIG_PATHS
assert parser.config_paths == config_paths
assert parser.excludes_filename == None
assert parser.verbosity == 1
def test_parse_arguments_with_json_flag_overrides_default():
parser = module.parse_arguments('--list', '--json')
assert parser.json is True
def test_parse_arguments_with_no_actions_defaults_to_all_actions_enabled():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
parser = module.parse_arguments()
assert parser.prune is True
@ -46,6 +64,8 @@ def test_parse_arguments_with_no_actions_defaults_to_all_actions_enabled():
def test_parse_arguments_with_prune_action_leaves_other_actions_disabled():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
parser = module.parse_arguments('--prune')
assert parser.prune is True
@ -54,6 +74,8 @@ def test_parse_arguments_with_prune_action_leaves_other_actions_disabled():
def test_parse_arguments_with_multiple_actions_leaves_other_action_disabled():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
parser = module.parse_arguments('--create', '--check')
assert parser.prune is False
@ -62,5 +84,22 @@ def test_parse_arguments_with_multiple_actions_leaves_other_action_disabled():
def test_parse_arguments_with_invalid_arguments_exits():
flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default'])
with pytest.raises(SystemExit):
module.parse_arguments('--posix-me-harder')
def test_parse_arguments_with_json_flag_with_list_or_info_flag_does_not_raise_any_error():
module.parse_arguments('--list', '--json')
module.parse_arguments('--info', '--json')
def test_parse_arguments_with_json_flag_but_no_list_or_info_flag_raises_value_error():
with pytest.raises(ValueError):
module.parse_arguments('--json')
def test_parse_arguments_with_json_flag_and_both_list_and_info_flag_raises_value_error():
with pytest.raises(ValueError):
module.parse_arguments('--list', '--info', '--json')

View File

@ -8,7 +8,7 @@ from borgmatic.borg import info as module
def insert_subprocess_mock(check_call_command, **kwargs):
subprocess = flexmock(module.subprocess)
subprocess.should_receive('check_call').with_args(check_call_command, **kwargs).once()
subprocess.should_receive('check_output').with_args(check_call_command, **kwargs).once()
def insert_logging_mock(log_level):
logging = flexmock(module.logging.Logger)
@ -46,6 +46,16 @@ def test_display_archives_info_with_verbosity_lots_calls_borg_with_debug_paramet
)
def test_display_archives_info_with_json_calls_borg_with_json_parameter():
insert_subprocess_mock(INFO_COMMAND + ('--json',))
module.display_archives_info(
repository='repo',
storage_config={},
json=True,
)
def test_display_archives_info_with_local_path_calls_borg_via_local_path():
insert_subprocess_mock(('borg1',) + INFO_COMMAND[1:])

View File

@ -8,7 +8,7 @@ from borgmatic.borg import list as module
def insert_subprocess_mock(check_call_command, **kwargs):
subprocess = flexmock(module.subprocess)
subprocess.should_receive('check_call').with_args(check_call_command, **kwargs).once()
subprocess.should_receive('check_output').with_args(check_call_command, **kwargs).once()
def insert_logging_mock(log_level):
logging = flexmock(module.logging.Logger)
@ -48,6 +48,16 @@ def test_list_archives_with_verbosity_lots_calls_borg_with_debug_parameter():
)
def test_list_archives_with_json_calls_borg_with_json_parameter():
insert_subprocess_mock(LIST_COMMAND + ('--json',))
module.list_archives(
repository='repo',
storage_config={},
json=True,
)
def test_list_archives_with_local_path_calls_borg_via_local_path():
insert_subprocess_mock(('borg1',) + LIST_COMMAND[1:])

View File

@ -0,0 +1,52 @@
import json
import sys
from flexmock import flexmock
import pytest
from borgmatic.commands import borgmatic
def test__run_commands_handles_multiple_json_outputs_in_array():
(
flexmock(borgmatic)
.should_receive('_run_commands_on_repository')
.times(3)
.replace_with(
lambda args, consistency, json_results, local_path, location, remote_path, retention,
storage,
unexpanded_repository: json_results.append({"whatever": unexpanded_repository})
)
)
(
flexmock(sys.stdout)
.should_call("write")
.with_args(
json.dumps(
json.loads(
'''
[
{"whatever": "fake_repo1"},
{"whatever": "fake_repo2"},
{"whatever": "fake_repo3"}
]
''',
)
)
)
)
borgmatic._run_commands(
args=flexmock(json=True),
consistency=None,
local_path=None,
location={'repositories': [
'fake_repo1',
'fake_repo2',
'fake_repo3'
]},
remote_path=None,
retention=None,
storage=None,
)

View File

@ -3,6 +3,22 @@ from flexmock import flexmock
from borgmatic.config import collect as module
def test_get_default_config_paths_includes_absolute_user_config_path():
flexmock(module.os, environ={'XDG_CONFIG_HOME': None, 'HOME': '/home/user'})
config_paths = module.get_default_config_paths()
assert '/home/user/.config/borgmatic/config.yaml' in config_paths
def test_get_default_config_paths_prefers_xdg_config_home_for_user_config_path():
flexmock(module.os, environ={'XDG_CONFIG_HOME': '/home/user/.etc', 'HOME': '/home/user'})
config_paths = module.get_default_config_paths()
assert '/home/user/.etc/borgmatic/config.yaml' in config_paths
def test_collect_config_filenames_collects_given_files():
config_paths = ('config.yaml', 'other.yaml')
flexmock(module.os.path).should_receive('isdir').and_return(False)
@ -32,6 +48,21 @@ def test_collect_config_filenames_collects_files_from_given_directories_and_igno
)
def test_collect_config_filenames_collects_files_from_given_directories_and_ignores_non_yaml_filenames():
config_paths = ('/etc/borgmatic.d',)
mock_path = flexmock(module.os.path)
mock_path.should_receive('exists').and_return(True)
mock_path.should_receive('isdir').with_args('/etc/borgmatic.d').and_return(True)
mock_path.should_receive('isdir').with_args('/etc/borgmatic.d/foo.yaml').and_return(False)
mock_path.should_receive('isdir').with_args('/etc/borgmatic.d/bar.yaml~').and_return(False)
mock_path.should_receive('isdir').with_args('/etc/borgmatic.d/baz.txt').and_return(False)
flexmock(module.os).should_receive('listdir').and_return(['foo.yaml', 'bar.yaml~', 'baz.txt'])
config_filenames = tuple(module.collect_config_filenames(config_paths))
assert config_filenames == ('/etc/borgmatic.d/foo.yaml',)
def test_collect_config_filenames_skips_etc_borgmatic_config_dot_yaml_if_it_does_not_exist():
config_paths = ('config.yaml', '/etc/borgmatic/config.yaml')
mock_path = flexmock(module.os.path)

View File

@ -33,7 +33,7 @@ setup(
],
install_requires=(
'pykwalify>=1.6.0',
'ruamel.yaml<=0.15',
'ruamel.yaml>0.15.0,<0.16.0',
'setuptools',
),
tests_require=(

View File

@ -1,5 +1,5 @@
flexmock==0.10.2
pykwalify==1.6.0
pytest==2.9.1
pykwalify==1.6.1
pytest==3.6.3
pytest-cov==2.5.1
ruamel.yaml==0.15.18
ruamel.yaml>0.15.0,<0.16.0