WIP: Separate endpoint for each primary action for monitor hooks #660

Closed
jetchirag wants to merge 8 commits from jetchirag/borgmatic:action-based-monitor-endpoints into main
3 changed files with 21 additions and 22 deletions
Showing only changes of commit 08e8262ee7 - Show all commits

View File

@ -146,7 +146,6 @@ def make_parsers():
help='Go through the motions, but do not actually write to any repositories',
)
global_group.add_argument(
'-nm',
'--skip-monitoring',
jetchirag marked this conversation as resolved Outdated

I'd prefer not including short flags for an option like this one. I generally only add short flags for the more frequently used options.

But if it's just too annoying to type --skip-monitoring all the time, you could remove the option entirely and do what borgmatic does for logging: If borgmatic is run interactively, it implicitly disables syslog logging. So it could also disable monitoring when at an interactive console. Up to you! (Although, a related issue: #665.)

I'd prefer not including short flags for an option like this one. I generally only add short flags for the more frequently used options. But if it's just too annoying to type `--skip-monitoring` all the time, you could remove the option entirely and do what borgmatic does for logging: If borgmatic is run interactively, it implicitly disables syslog logging. So it could also disable monitoring when at an interactive console. Up to you! (Although, a related issue: #665.)

Noted. I would personally prefer the skip-monitoring flag if I am configuring monitoring. I've removed the short flag but let me know if you rather want to go through your alternate suggestion.

Noted. I would personally prefer the skip-monitoring flag if I am configuring monitoring. I've removed the short flag but let me know if you rather want to go through your alternate suggestion.

Nope, it's fine as-is IMO.

Nope, it's fine as-is IMO.
dest='skip_monitoring',
action='store_true',
jetchirag marked this conversation as resolved Outdated

I think you can omit this dest line, because Python will automatically use this name based on the flag name.

I think you can omit this `dest` line, because Python will automatically use this name based on the flag name.

View File

@ -66,9 +66,9 @@ def run_configuration(config_filename, config, arguments):
error_repository = ''
using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments)
monitoring_log_level = verbosity_to_log_level(global_arguments.monitoring_verbosity)
skip_monitoring = getattr(global_arguments, 'skip_monitoring', False)
skip_monitoring = global_arguments.skip_monitoring
jetchirag marked this conversation as resolved Outdated
Outdated
Review

nit(pythonic): next(iter(arguments))

nit(pythonic): `next(iter(arguments))`
Outdated
Review

Also, if we are only using this variable when using_primary_action, it might make sense to rename it to primary_aciton_name and/or move it where it's needed too.

Also, if we are only using this variable when `using_primary_action`, it might make sense to rename it to `primary_aciton_name` and/or move it where it's needed too.

Thanks, that's definitely more effecient.

Thanks, that's definitely more effecient.

If I'm reading this correctly, this will just grab the first action. But there could very well be multiple actions at once, and I'd expect Cronitor to be pinged for each one (if configured for each one).

If I'm reading this correctly, this will just grab the first action. But there could very well be multiple actions at once, and I'd expect Cronitor to be pinged for each one (if configured for each one).

@witten I unexpectedly missed that multiple actions. Will fix it.

@witten I unexpectedly missed that multiple actions. Will fix it.

Why getattr()? Doesn't the attribute always exist now that you've added it to the global arguments? So would just global_arguments.skip_monitoring work? I don't even think you need the False here because action='store_true' above defaults to False if not specified.

Why `getattr()`? Doesn't the attribute always exist now that you've added it to the global arguments? So would just `global_arguments.skip_monitoring` work? I don't even think you need the `False` here because `action='store_true'` above defaults to False if not specified.
action_names = [
action for action in arguments.keys() if action != 'global' and isinstance(action, str)
action for action in arguments.keys() if action != 'global'
jetchirag marked this conversation as resolved Outdated

On the isinstance() call: Shouldn't they always be strings? Or is something else sneaking in there?

On the `isinstance()` call: Shouldn't they always be strings? Or is something else sneaking in there?
]
try:

View File

@ -16,7 +16,7 @@ def test_run_configuration_runs_actions_for_each_repository():
expected_results[1:]
)
config = {'location': {'repositories': ['foo', 'bar']}}
arguments = {'global': flexmock(monitoring_verbosity=1)}
arguments = {'global': flexmock(monitoring_verbosity=1, skip_monitoring=False)}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -30,7 +30,7 @@ def test_run_configuration_with_invalid_borg_version_errors():
flexmock(module.dispatch).should_receive('call_hooks').never()
flexmock(module).should_receive('run_actions').never()
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'prune': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'prune': flexmock()}
list(module.run_configuration('test.yaml', config, arguments))
@ -45,7 +45,7 @@ def test_run_configuration_logs_monitor_start_error():
flexmock(module).should_receive('log_error_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').never()
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -60,7 +60,7 @@ def test_run_configuration_bails_for_monitor_start_soft_failure():
flexmock(module).should_receive('log_error_records').never()
flexmock(module).should_receive('run_actions').never()
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -76,7 +76,7 @@ def test_run_configuration_logs_actions_error():
flexmock(module).should_receive('log_error_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').and_raise(OSError)
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False)}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False)}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -92,7 +92,7 @@ def test_run_configuration_bails_for_actions_soft_failure():
flexmock(module).should_receive('log_error_records').never()
flexmock(module.command).should_receive('considered_soft_failure').and_return(True)
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -109,7 +109,7 @@ def test_run_configuration_logs_monitor_log_error():
flexmock(module).should_receive('log_error_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').and_return([])
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -127,7 +127,7 @@ def test_run_configuration_bails_for_monitor_log_soft_failure():
flexmock(module).should_receive('run_actions').and_return([])
flexmock(module.command).should_receive('considered_soft_failure').and_return(True)
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -144,7 +144,7 @@ def test_run_configuration_logs_monitor_finish_error():
flexmock(module).should_receive('log_error_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').and_return([])
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -162,7 +162,7 @@ def test_run_configuration_bails_for_monitor_finish_soft_failure():
flexmock(module).should_receive('run_actions').and_return([])
flexmock(module.command).should_receive('considered_soft_failure').and_return(True)
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -179,7 +179,7 @@ def test_run_configuration_logs_on_error_hook_error():
).and_return(expected_results[1:])
flexmock(module).should_receive('run_actions').and_raise(OSError)
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -195,7 +195,7 @@ def test_run_configuration_bails_for_on_error_hook_soft_failure():
flexmock(module).should_receive('log_error_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').and_raise(OSError)
config = {'location': {'repositories': ['foo']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
@ -210,7 +210,7 @@ def test_run_configuration_retries_soft_error():
flexmock(module).should_receive('run_actions').and_raise(OSError).and_return([])
flexmock(module).should_receive('log_error_records').and_return([flexmock()]).once()
config = {'location': {'repositories': ['foo']}, 'storage': {'retries': 1}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
assert results == []
@ -232,7 +232,7 @@ def test_run_configuration_retries_hard_error():
'foo: Error running actions for repository', OSError,
).and_return(error_logs)
config = {'location': {'repositories': ['foo']}, 'storage': {'retries': 1}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
assert results == error_logs
@ -250,7 +250,7 @@ def test_run_configuration_repos_ordered():
'bar: Error running actions for repository', OSError
).and_return(expected_results[1:]).ordered()
config = {'location': {'repositories': ['foo', 'bar']}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
assert results == expected_results
@ -281,7 +281,7 @@ def test_run_configuration_retries_round_robbin():
'bar: Error running actions for repository', OSError
).and_return(bar_error_logs).ordered()
config = {'location': {'repositories': ['foo', 'bar']}, 'storage': {'retries': 1}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
assert results == foo_error_logs + bar_error_logs
@ -310,7 +310,7 @@ def test_run_configuration_retries_one_passes():
'bar: Error running actions for repository', OSError
).and_return(error_logs).ordered()
config = {'location': {'repositories': ['foo', 'bar']}, 'storage': {'retries': 1}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
assert results == error_logs
@ -349,7 +349,7 @@ def test_run_configuration_retry_wait():
'foo: Error running actions for repository', OSError
).and_return(error_logs).ordered()
config = {'location': {'repositories': ['foo']}, 'storage': {'retries': 3, 'retry_wait': 10}}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
assert results == error_logs
@ -387,7 +387,7 @@ def test_run_configuration_retries_timeout_multiple_repos():
'location': {'repositories': ['foo', 'bar']},
'storage': {'retries': 1, 'retry_wait': 10},
}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False, skip_monitoring=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments))
assert results == error_logs