Compare commits

...

19 Commits

Author SHA1 Message Date
Dan Helfman 5b3cfc542d Switch to PyPI API token. 2022-03-14 14:00:03 -07:00
Dan Helfman c838c1d11b Fix header placement in documentation guide. 2022-03-14 13:50:22 -07:00
Dan Helfman 4d1d8d7409 Bump version for release. 2022-03-14 13:43:24 -07:00
Dan Helfman db7499db82 Document "repositories" context to for "before_*" and "after_*" command action hooks (#469). 2022-03-14 13:34:14 -07:00
Dan Helfman 6b500c2a8b Add repositories context for command hooks.
Reviewed-on: borgmatic-collective/borgmatic#469
2022-03-14 20:13:15 +00:00
Dan Helfman 95c518e59b Documentation tip about dealing with hangs when database hook is enabled. 2022-03-12 13:17:32 -08:00
Dan Helfman 976516d0e1 When loading a configuration file that is unreadable due to file permissions, warn instead of erroring (#444). 2022-03-08 10:19:36 -08:00
Dan Helfman 574eb91921 Fix Borg usage error in the "compact" action when running "borgmatic --dry-run". Now, skip "compact" entirely during a dry run (#507). 2022-03-07 21:46:12 -08:00
Dan Helfman 28fef3264b Fix handling of "patterns_from" and "exclude_from" options to error instead of warning when referencing unreadable files and running "create" action (#486). 2022-03-07 15:32:07 -08:00
Dan Helfman 9161dbcb7d Removing unnecessary leading underscores from functions. 2022-03-07 11:58:29 -08:00
Dan Helfman 4b3027e4fc Add test for new working_directory option (#431). 2022-03-03 11:48:18 -08:00
Dan Helfman 0eb2634f9b Working directory option to support source directories with relative paths (#431).
Reviewed-on: borgmatic-collective/borgmatic#477
2022-03-03 19:28:17 +00:00
fabianschilling b39f08694d Merge branch 'master' into pr-working-directory 2022-01-05 09:30:27 +00:00
Fabian Schilling 85e0334826 Add missing working_directory arg to pass tests 2021-12-10 18:24:41 +01:00
Fabian Schilling 2a80e48a92 Pass working directory to execute functions 2021-12-10 18:23:44 +01:00
Fabian Schilling 5821c6782e Add defaults to not set in schema 2021-12-10 18:23:08 +01:00
Fabian Schilling f15498f6d9 Add working_directory to borgmatic schema 2021-12-10 17:58:27 +01:00
Chen Yufei 0014b149f8 remove configuration_filename as it's already set. 2021-11-26 11:38:58 +08:00
Chen Yufei 091c07bbe2 Add context for various hooks. 2021-11-26 11:35:10 +08:00
14 changed files with 552 additions and 265 deletions

13
NEWS
View File

@ -1,3 +1,16 @@
1.5.24
* #431: Add "working_directory" option to support source directories with relative paths.
* #444: When loading a configuration file that is unreadable due to file permissions, warn instead
of erroring. This supports running borgmatic as a non-root user with configuration in ~/.config
even if there is an unreadable global configuration file in /etc.
* #469: Add "repositories" context to "before_*" and "after_*" command action hooks. See the
documentation for more information:
https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/
* #486: Fix handling of "patterns_from" and "exclude_from" options to error instead of warning when
referencing unreadable files and "create" action is run.
* #507: Fix Borg usage error in the "compact" action when running "borgmatic --dry-run". Now, skip
"compact" entirely during a dry run.
1.5.23
* #394: Compact repository segments and free space with new "borgmatic compact" action. Borg 1.2+
only. Also run "compact" by default when no actions are specified, as "prune" in Borg 1.2 no

View File

@ -33,9 +33,9 @@ def compact_segments(
+ (('--threshold', str(threshold)) if threshold else ())
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ (('--dry-run',) if dry_run else ())
+ (tuple(extra_borg_options.split(' ')) if extra_borg_options else ())
+ (repository,)
)
execute_command(full_command, output_log_level=logging.INFO, borg_local_path=local_path)
if not dry_run:
execute_command(full_command, output_log_level=logging.INFO, borg_local_path=local_path)

View File

@ -11,7 +11,7 @@ from borgmatic.execute import DO_NOT_CAPTURE, execute_command, execute_command_w
logger = logging.getLogger(__name__)
def _expand_directory(directory):
def expand_directory(directory):
'''
Given a directory path, expand any tilde (representing a user's home directory) and any globs
therein. Return a list of one or more resulting paths.
@ -21,7 +21,7 @@ def _expand_directory(directory):
return glob.glob(expanded_directory) or [expanded_directory]
def _expand_directories(directories):
def expand_directories(directories):
'''
Given a sequence of directory paths, expand tildes and globs in each one. Return all the
resulting directories as a single flattened tuple.
@ -30,11 +30,11 @@ def _expand_directories(directories):
return ()
return tuple(
itertools.chain.from_iterable(_expand_directory(directory) for directory in directories)
itertools.chain.from_iterable(expand_directory(directory) for directory in directories)
)
def _expand_home_directories(directories):
def expand_home_directories(directories):
'''
Given a sequence of directory paths, expand tildes in each one. Do not perform any globbing.
Return the results as a tuple.
@ -98,7 +98,7 @@ def deduplicate_directories(directory_devices):
return tuple(sorted(deduplicated))
def _write_pattern_file(patterns=None):
def write_pattern_file(patterns=None):
'''
Given a sequence of patterns, write them to a named temporary file and return it. Return None
if no patterns are provided.
@ -113,7 +113,19 @@ def _write_pattern_file(patterns=None):
return pattern_file
def _make_pattern_flags(location_config, pattern_filename=None):
def ensure_files_readable(*filename_lists):
'''
Given a sequence of filename sequences, ensure that each filename is openable. This prevents
unreadable files from being passed to Borg, which in certain situations only warns instead of
erroring.
'''
for file_object in itertools.chain.from_iterable(
filename_list for filename_list in filename_lists if filename_list
):
open(file_object).close()
def make_pattern_flags(location_config, pattern_filename=None):
'''
Given a location config dict with a potential patterns_from option, and a filename containing
any additional patterns, return the corresponding Borg flags for those files as a tuple.
@ -129,7 +141,7 @@ def _make_pattern_flags(location_config, pattern_filename=None):
)
def _make_exclude_flags(location_config, exclude_filename=None):
def make_exclude_flags(location_config, exclude_filename=None):
'''
Given a location config dict with various exclude options, and a filename containing any exclude
patterns, return the corresponding Borg flags as a tuple.
@ -206,16 +218,20 @@ def create_archive(
'''
sources = deduplicate_directories(
map_directories_to_devices(
_expand_directories(
expand_directories(
location_config['source_directories']
+ borgmatic_source_directories(location_config.get('borgmatic_source_directory'))
)
)
)
pattern_file = _write_pattern_file(location_config.get('patterns'))
exclude_file = _write_pattern_file(
_expand_home_directories(location_config.get('exclude_patterns'))
try:
working_directory = os.path.expanduser(location_config.get('working_directory'))
except TypeError:
working_directory = None
pattern_file = write_pattern_file(location_config.get('patterns'))
exclude_file = write_pattern_file(
expand_home_directories(location_config.get('exclude_patterns'))
)
checkpoint_interval = storage_config.get('checkpoint_interval', None)
chunker_params = storage_config.get('chunker_params', None)
@ -251,11 +267,13 @@ def create_archive(
('--remote-ratelimit', str(remote_rate_limit)) if remote_rate_limit else ()
)
ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from'))
full_command = (
tuple(local_path.split(' '))
+ ('create',)
+ _make_pattern_flags(location_config, pattern_file.name if pattern_file else None)
+ _make_exclude_flags(location_config, exclude_file.name if exclude_file else None)
+ make_pattern_flags(location_config, pattern_file.name if pattern_file else None)
+ make_exclude_flags(location_config, exclude_file.name if exclude_file else None)
+ (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ())
+ (('--chunker-params', chunker_params) if chunker_params else ())
+ (('--compression', compression) if compression else ())
@ -309,6 +327,13 @@ def create_archive(
output_log_level,
output_file,
borg_local_path=local_path,
working_directory=working_directory,
)
return execute_command(full_command, output_log_level, output_file, borg_local_path=local_path)
return execute_command(
full_command,
output_log_level,
output_file,
borg_local_path=local_path,
working_directory=working_directory,
)

View File

@ -65,6 +65,10 @@ def run_configuration(config_filename, config, arguments):
using_primary_action = {'prune', 'compact', 'create', 'check'}.intersection(arguments)
monitoring_log_level = verbosity_to_log_level(global_arguments.monitoring_verbosity)
hook_context = {
'repositories': ','.join(location['repositories']),
}
try:
local_borg_version = borg_version.local_borg_version(local_path)
except (OSError, CalledProcessError, ValueError) as error:
@ -90,6 +94,7 @@ def run_configuration(config_filename, config, arguments):
config_filename,
'pre-prune',
global_arguments.dry_run,
**hook_context,
)
if 'compact' in arguments:
command.execute_hook(
@ -106,6 +111,7 @@ def run_configuration(config_filename, config, arguments):
config_filename,
'pre-backup',
global_arguments.dry_run,
**hook_context,
)
if 'check' in arguments:
command.execute_hook(
@ -114,6 +120,7 @@ def run_configuration(config_filename, config, arguments):
config_filename,
'pre-check',
global_arguments.dry_run,
**hook_context,
)
if 'extract' in arguments:
command.execute_hook(
@ -122,6 +129,7 @@ def run_configuration(config_filename, config, arguments):
config_filename,
'pre-extract',
global_arguments.dry_run,
**hook_context,
)
if using_primary_action:
dispatch.call_hooks(
@ -188,6 +196,7 @@ def run_configuration(config_filename, config, arguments):
config_filename,
'post-prune',
global_arguments.dry_run,
**hook_context,
)
if 'compact' in arguments:
command.execute_hook(
@ -212,6 +221,7 @@ def run_configuration(config_filename, config, arguments):
config_filename,
'post-backup',
global_arguments.dry_run,
**hook_context,
)
if 'check' in arguments:
command.execute_hook(
@ -220,6 +230,7 @@ def run_configuration(config_filename, config, arguments):
config_filename,
'post-check',
global_arguments.dry_run,
**hook_context,
)
if 'extract' in arguments:
command.execute_hook(
@ -228,6 +239,7 @@ def run_configuration(config_filename, config, arguments):
config_filename,
'post-extract',
global_arguments.dry_run,
**hook_context,
)
if using_primary_action:
dispatch.call_hooks(
@ -646,6 +658,20 @@ def load_configurations(config_filenames, overrides=None):
configs[config_filename] = validate.parse_configuration(
config_filename, validate.schema_filename(), overrides
)
except PermissionError:
logs.extend(
[
logging.makeLogRecord(
dict(
levelno=logging.WARNING,
levelname='WARNING',
msg='{}: Insufficient permissions to read configuration file'.format(
config_filename
),
)
),
]
)
except (ValueError, OSError, validate.Validation_error) as error:
logs.extend(
[

View File

@ -42,6 +42,14 @@ properties:
example:
- user@backupserver:sourcehostname.borg
- "user@backupserver:{fqdn}"
working_directory:
type: string
description: |
Working directory for the "borg create" command. Tildes are
expanded. Useful for backing up using relative paths. See
http://borgbackup.readthedocs.io/en/stable/usage/create.html
for details. Defaults to not set.
example: /path/to/working/directory
one_file_system:
type: boolean
description: |

View File

@ -258,6 +258,7 @@ footer.elv-layout {
/* Header */
.elv-header {
position: relative;
text-align: center;
}
.elv-header-default {
display: flex;

View File

@ -37,6 +37,30 @@ There are additional hooks that run before/after other actions as well. For
instance, `before_prune` runs before a `prune` action, while `after_prune`
runs after it.
## Variable interpolation
The before and after action hooks support interpolating particular runtime
variables into the hook command. Here's an example that assumes you provide a
separate shell script:
```yaml
hooks:
after_prune:
- record-prune.sh "{configuration_filename}" "{repositories}"
```
In this example, when the hook is triggered, borgmatic interpolates runtime
values into the hook command: the borgmatic configuration filename and the
paths of all configured repositories. Here's the full set of supported
variables you can use here:
* `configuration_filename`: borgmatic configuration filename in which the
hook was defined
* `repositories`: comma-separated paths of all repositories configured in the
current borgmatic configuration file
## Global hooks
You can also use `before_everything` and `after_everything` hooks to perform
global setup or cleanup:
@ -58,6 +82,8 @@ but only if there is a `create` action. It runs even if an error occurs during
a backup or a backup hook, but not if an error occurs during a
`before_everything` hook.
## Error hooks
borgmatic also runs `on_error` hooks if an error occurs, either when creating
a backup or running a backup hook. See the [monitoring and alerting
documentation](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/)

View File

@ -199,10 +199,10 @@ backups to avoid getting caught without a way to restore a database.
databases that share the exact same name on different hosts.
4. Because database hooks implicitly enable the `read_special` configuration
setting to support dump and restore streaming, you'll need to ensure that any
special files are excluded from backups (named pipes, block devices, and
character devices) to prevent hanging. Try a command like `find / -type c,b,p`
to find such files. Common directories to exclude are `/dev` and `/run`, but
that may not be exhaustive.
special files are excluded from backups (named pipes, block devices,
character devices, and sockets) to prevent hanging. Try a command like
`find /your/source/path -type c,b,p,s` to find such files. Common directories
to exclude are `/dev` and `/run`, but that may not be exhaustive.
### Manual restoration
@ -244,5 +244,10 @@ hooks:
### borgmatic hangs during backup
See Limitations above about `read_special`. You may need to exclude certain
paths with named pipes, block devices, or character devices on which borgmatic
is hanging.
paths with named pipes, block devices, character devices, or sockets on which
borgmatic is hanging.
Alternatively, if excluding special files is too onerous, you can create two
separate borgmatic configuration files—one for your source files and a
separate one for backing up databases. That way, the database `read_special`
option will not be active when backing up special files.

View File

@ -104,10 +104,9 @@ hooks:
- send-text-message.sh "{configuration_filename}" "{repository}"
```
In this example, when the error occurs, borgmatic interpolates a few runtime
values into the hook command: the borgmatic configuration filename, and the
path of the repository. Here's the full set of supported variables you can use
here:
In this example, when the error occurs, borgmatic interpolates runtime values
into the hook command: the borgmatic configuration filename, and the path of
the repository. Here's the full set of supported variables you can use here:
* `configuration_filename`: borgmatic configuration filename in which the
error occurred

View File

@ -31,8 +31,8 @@ python3 setup.py bdist_wheel
python3 setup.py sdist
gpg --detach-sign --armor dist/borgmatic-*.tar.gz
gpg --detach-sign --armor dist/borgmatic-*-py3-none-any.whl
twine upload -r pypi dist/borgmatic-*.tar.gz dist/borgmatic-*.tar.gz.asc
twine upload -r pypi dist/borgmatic-*-py3-none-any.whl dist/borgmatic-*-py3-none-any.whl.asc
twine upload -r pypi --username __token__ dist/borgmatic-*.tar.gz dist/borgmatic-*.tar.gz.asc
twine upload -r pypi --username __token__ dist/borgmatic-*-py3-none-any.whl dist/borgmatic-*-py3-none-any.whl.asc
# Set release changelogs on projects.torsion.org and GitHub.
release_changelog="$(cat NEWS | sed '/^$/q' | grep -v '^\S')"

View File

@ -1,6 +1,6 @@
from setuptools import find_packages, setup
VERSION = '1.5.23'
VERSION = '1.5.24'
setup(

View File

@ -36,8 +36,8 @@ def test_compact_segments_with_log_debug_calls_borg_with_debug_parameter():
module.compact_segments(repository='repo', storage_config={}, dry_run=False)
def test_compact_segments_with_dry_run_calls_borg_with_dry_run_parameter():
insert_execute_command_mock(COMPACT_COMMAND + ('--dry-run', 'repo'), logging.INFO)
def test_compact_segments_with_dry_run_skips_borg_call():
flexmock(module).should_receive('execute_command').never()
module.compact_segments(repository='repo', storage_config={}, dry_run=True)

File diff suppressed because it is too large Load Diff

View File

@ -395,6 +395,15 @@ def test_load_configurations_collects_parsed_configurations():
assert logs == []
def test_load_configurations_logs_warning_for_permission_error():
flexmock(module.validate).should_receive('parse_configuration').and_raise(PermissionError)
configs, logs = tuple(module.load_configurations(('test.yaml',)))
assert configs == {}
assert {log.levelno for log in logs} == {logging.WARNING}
def test_load_configurations_logs_critical_for_parse_error():
flexmock(module.validate).should_receive('parse_configuration').and_raise(ValueError)