add support for --list --json
#74
|
@ -131,67 +131,7 @@ def run_configuration(config_filename, args): # pragma: no cover
|
|||
if args.create:
|
||||
hook.execute_hook(hooks.get('before_backup'), config_filename, 'pre-backup')
|
||||
|
||||
json_results = []
|
||||
for unexpanded_repository in location['repositories']:
|
||||
repository = os.path.expanduser(unexpanded_repository)
|
||||
dry_run_label = ' (dry run; not making any changes)' if args.dry_run else ''
|
||||
if args.prune:
|
||||
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
|
||||
borg_prune.prune_archives(
|
||||
args.verbosity,
|
||||
args.dry_run,
|
||||
repository,
|
||||
storage,
|
||||
retention,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
)
|
||||
if args.create:
|
||||
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
|
||||
borg_create.create_archive(
|
||||
args.verbosity,
|
||||
args.dry_run,
|
||||
repository,
|
||||
location,
|
||||
storage,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
)
|
||||
if args.check:
|
||||
logger.info('{}: Running consistency checks'.format(repository))
|
||||
borg_check.check_archives(
|
||||
args.verbosity,
|
||||
repository,
|
||||
storage,
|
||||
consistency,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
)
|
||||
if args.list:
|
||||
logger.info('{}: Listing archives'.format(repository))
|
||||
output = borg_list.list_archives(
|
||||
args.verbosity,
|
||||
repository,
|
||||
storage,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
json=args.json,
|
||||
)
|
||||
if args.json:
|
||||
json_results.append(json.loads(output))
|
||||
else:
|
||||
sys.stdout.write(output)
|
||||
if args.info:
|
||||
logger.info('{}: Displaying summary info for archives'.format(repository))
|
||||
borg_info.display_archives_info(
|
||||
args.verbosity,
|
||||
repository,
|
||||
storage,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
)
|
||||
if args.json:
|
||||
sys.stdout.write(json.dumps(json_results))
|
||||
_run_commands(args, consistency, local_path, location, remote_path, retention, storage)
|
||||
|
||||
if args.create:
|
||||
hook.execute_hook(hooks.get('after_backup'), config_filename, 'post-backup')
|
||||
|
@ -200,6 +140,76 @@ def run_configuration(config_filename, args): # pragma: no cover
|
|||
raise
|
||||
|
||||
|
||||
def _run_commands(args, consistency, local_path, location, remote_path, retention, storage):
|
||||
json_results = []
|
||||
for unexpanded_repository in location['repositories']:
|
||||
_run_commands_on_repository(args, consistency, json_results, local_path, location, remote_path, retention,
|
||||
storage, unexpanded_repository)
|
||||
if args.json:
|
||||
sys.stdout.write(json.dumps(json_results))
|
||||
|
||||
|
||||
def _run_commands_on_repository(args, consistency, json_results, local_path, location, remote_path, retention, storage,
|
||||
unexpanded_repository): # pragma: no cover
|
||||
repository = os.path.expanduser(unexpanded_repository)
|
||||
dry_run_label = ' (dry run; not making any changes)' if args.dry_run else ''
|
||||
if args.prune:
|
||||
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
|
||||
borg_prune.prune_archives(
|
||||
args.verbosity,
|
||||
args.dry_run,
|
||||
repository,
|
||||
storage,
|
||||
retention,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
)
|
||||
if args.create:
|
||||
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
|
||||
borg_create.create_archive(
|
||||
args.verbosity,
|
||||
args.dry_run,
|
||||
repository,
|
||||
location,
|
||||
storage,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
)
|
||||
if args.check:
|
||||
logger.info('{}: Running consistency checks'.format(repository))
|
||||
borg_check.check_archives(
|
||||
args.verbosity,
|
||||
repository,
|
||||
storage,
|
||||
consistency,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
)
|
||||
if args.list:
|
||||
logger.info('{}: Listing archives'.format(repository))
|
||||
output = borg_list.list_archives(
|
||||
args.verbosity,
|
||||
repository,
|
||||
storage,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
json=args.json,
|
||||
)
|
||||
if args.json:
|
||||
json_results.append(json.loads(output))
|
||||
else:
|
||||
sys.stdout.write(output)
|
||||
if args.info:
|
||||
logger.info('{}: Displaying summary info for archives'.format(repository))
|
||||
borg_info.display_archives_info(
|
||||
args.verbosity,
|
||||
repository,
|
||||
storage,
|
||||
local_path=local_path,
|
||||
remote_path=remote_path,
|
||||
)
|
||||
|
||||
|
||||
def main(): # pragma: no cover
|
||||
try:
|
||||
configure_signals()
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
from borgmatic.commands import borgmatic
|
||||
from flexmock import flexmock
|
||||
import json
|
||||
import pytest
|
||||
import sys
|
||||
|
||||
|
||||
def test__run_commands_handles_multiple_json_outputs_in_array():
|
||||
# THEN
|
||||
(flexmock(borgmatic)
|
||||
.should_receive("_run_commands_on_repository")
|
||||
.times(3)
|
||||
.replace_with(lambda args, consistency, json_results, local_path, location, remote_path, retention, storage,
|
||||
unexpanded_repository: json_results.append({"whatever": unexpanded_repository}))
|
||||
)
|
||||
|
||||
(flexmock(sys.stdout)
|
||||
.should_call("write")
|
||||
.with_args(json.dumps(json.loads('''
|
||||
[
|
||||
{"whatever": "fake_repo1"},
|
||||
{"whatever": "fake_repo2"},
|
||||
{"whatever": "fake_repo3"}
|
||||
]
|
||||
''')))
|
||||
)
|
||||
|
||||
borgmatic._run_commands(args=flexmock(json=True),
|
||||
consistency=None,
|
||||
local_path=None,
|
||||
location={"repositories": [
|
||||
"fake_repo1",
|
||||
"fake_repo2",
|
||||
"fake_repo3"
|
||||
]},
|
||||
remote_path=None,
|
||||
retention=None,
|
||||
storage=None)
|
Loading…
Reference in New Issue