mirror of
https://github.com/borgbackup/borg.git
synced 2026-02-20 00:10:35 -05:00
Rework how exclude patterns work
This commit is contained in:
parent
445b862d8c
commit
7e06d30ed4
5 changed files with 77 additions and 56 deletions
|
|
@ -221,8 +221,8 @@ class Archive(object):
|
|||
cache.rollback()
|
||||
return stats
|
||||
|
||||
def extract_item(self, item, dest=None, restore_attrs=True, peek=None):
|
||||
dest = dest or self.cwd
|
||||
def extract_item(self, item, restore_attrs=True, peek=None):
|
||||
dest = self.cwd
|
||||
assert item[b'path'][:1] not in ('/', '\\', ':')
|
||||
path = os.path.join(dest, item[b'path'])
|
||||
# Attempt to remove existing files, ignore errors on failure
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ class Archiver:
|
|||
continue
|
||||
else:
|
||||
restrict_dev = None
|
||||
self._process(archive, cache, args.patterns, skip_inodes, path, restrict_dev)
|
||||
self._process(archive, cache, args.excludes, skip_inodes, path, restrict_dev)
|
||||
archive.save()
|
||||
if args.stats:
|
||||
t = datetime.now()
|
||||
|
|
@ -108,8 +108,8 @@ class Archiver:
|
|||
print('-' * 40)
|
||||
return self.exit_code
|
||||
|
||||
def _process(self, archive, cache, patterns, skip_inodes, path, restrict_dev):
|
||||
if exclude_path(path, patterns):
|
||||
def _process(self, archive, cache, excludes, skip_inodes, path, restrict_dev):
|
||||
if exclude_path(path, excludes):
|
||||
return
|
||||
try:
|
||||
st = os.lstat(path)
|
||||
|
|
@ -138,7 +138,7 @@ class Archiver:
|
|||
self.print_error('%s: %s', path, e)
|
||||
else:
|
||||
for filename in sorted(entries):
|
||||
self._process(archive, cache, patterns, skip_inodes,
|
||||
self._process(archive, cache, excludes, skip_inodes,
|
||||
os.path.join(path, filename), restrict_dev)
|
||||
elif stat.S_ISLNK(st.st_mode):
|
||||
archive.process_symlink(path, st)
|
||||
|
|
@ -154,22 +154,23 @@ class Archiver:
|
|||
manifest, key = Manifest.load(repository)
|
||||
archive = Archive(repository, key, manifest, args.archive.archive,
|
||||
numeric_owner=args.numeric_owner)
|
||||
patterns = adjust_patterns(args.paths, args.excludes)
|
||||
dirs = []
|
||||
for item, peek in archive.iter_items(lambda item: not exclude_path(item[b'path'], args.patterns)):
|
||||
for item, peek in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns)):
|
||||
while dirs and not item[b'path'].startswith(dirs[-1][b'path']):
|
||||
archive.extract_item(dirs.pop(-1), args.dest)
|
||||
archive.extract_item(dirs.pop(-1))
|
||||
self.print_verbose(remove_surrogates(item[b'path']))
|
||||
try:
|
||||
if stat.S_ISDIR(item[b'mode']):
|
||||
dirs.append(item)
|
||||
archive.extract_item(item, args.dest, restore_attrs=False)
|
||||
archive.extract_item(item, restore_attrs=False)
|
||||
else:
|
||||
archive.extract_item(item, args.dest, peek=peek)
|
||||
archive.extract_item(item, peek=peek)
|
||||
except IOError as e:
|
||||
self.print_error('%s: %s', remove_surrogates(item[b'path']), e)
|
||||
|
||||
while dirs:
|
||||
archive.extract_item(dirs.pop(-1), args.dest)
|
||||
archive.extract_item(dirs.pop(-1))
|
||||
return self.exit_code
|
||||
|
||||
def do_delete(self, args):
|
||||
|
|
@ -216,6 +217,7 @@ class Archiver:
|
|||
repository = self.open_repository(args.archive)
|
||||
manifest, key = Manifest.load(repository)
|
||||
archive = Archive(repository, key, manifest, args.archive.archive)
|
||||
patterns = adjust_patterns(args.paths, args.excludes)
|
||||
|
||||
def start_cb(item):
|
||||
self.print_verbose('%s ...', remove_surrogates(item[b'path']), newline=False)
|
||||
|
|
@ -226,7 +228,7 @@ class Archiver:
|
|||
else:
|
||||
self.print_verbose('ERROR')
|
||||
self.print_error('%s: verification failed' % remove_surrogates(item[b'path']))
|
||||
for item, peek in archive.iter_items(lambda item: not exclude_path(item[b'path'], args.patterns)):
|
||||
for item, peek in archive.iter_items(lambda item: not exclude_path(item[b'path'], patterns)):
|
||||
if stat.S_ISREG(item[b'mode']) and b'chunks' in item:
|
||||
archive.verify_file(item, start_cb, result_cb, peek=peek)
|
||||
return self.exit_code
|
||||
|
|
@ -310,7 +312,7 @@ class Archiver:
|
|||
help='Encrypt data using key file')
|
||||
subparser.add_argument('--passphrase', dest='passphrase',
|
||||
action='store_true', default=False,
|
||||
help='Encrypt data using passphrase derived key')
|
||||
help='Encrypt data using passphrase derived keys')
|
||||
|
||||
subparser = subparsers.add_parser('change-passphrase', parents=[common_parser])
|
||||
subparser.set_defaults(func=self.do_change_passphrase)
|
||||
|
|
@ -321,12 +323,9 @@ class Archiver:
|
|||
subparser.add_argument('-s', '--stats', dest='stats',
|
||||
action='store_true', default=False,
|
||||
help='Print statistics for the created archive')
|
||||
subparser.add_argument('-i', '--include', dest='patterns',
|
||||
type=IncludePattern, action='append',
|
||||
help='Include condition')
|
||||
subparser.add_argument('-e', '--exclude', dest='patterns',
|
||||
subparser.add_argument('-e', '--exclude', dest='excludes',
|
||||
type=ExcludePattern, action='append',
|
||||
help='Include condition')
|
||||
help='Exclude condition')
|
||||
subparser.add_argument('-c', '--checkpoint-interval', dest='checkpoint_interval',
|
||||
type=int, default=300, metavar='SECONDS',
|
||||
help='Write checkpointe ever SECONDS seconds (Default: 300)')
|
||||
|
|
@ -339,25 +338,22 @@ class Archiver:
|
|||
subparser.add_argument('archive', metavar='ARCHIVE',
|
||||
type=location_validator(archive=True),
|
||||
help='Archive to create')
|
||||
subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
|
||||
default=['.'], help='Paths to add to archive')
|
||||
subparser.add_argument('paths', metavar='PATH', nargs='+', type=str,
|
||||
help='Paths to archive')
|
||||
|
||||
subparser = subparsers.add_parser('extract', parents=[common_parser])
|
||||
subparser.set_defaults(func=self.do_extract)
|
||||
subparser.add_argument('-i', '--include', dest='patterns',
|
||||
type=IncludePattern, action='append',
|
||||
help='Include condition')
|
||||
subparser.add_argument('-e', '--exclude', dest='patterns',
|
||||
subparser.add_argument('-e', '--exclude', dest='excludes',
|
||||
type=ExcludePattern, action='append',
|
||||
help='Include condition')
|
||||
help='Exclude condition')
|
||||
subparser.add_argument('--numeric-owner', dest='numeric_owner',
|
||||
action='store_true', default=False,
|
||||
help='Only obey numeric user and group identifiers')
|
||||
subparser.add_argument('archive', metavar='ARCHIVE',
|
||||
type=location_validator(archive=True),
|
||||
help='Archive to create')
|
||||
subparser.add_argument('dest', metavar='DEST', type=str, nargs='?',
|
||||
help='Where to extract files')
|
||||
help='Archive to extract')
|
||||
subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
|
||||
help='Paths to extract')
|
||||
|
||||
subparser = subparsers.add_parser('delete', parents=[common_parser])
|
||||
subparser.set_defaults(func=self.do_delete)
|
||||
|
|
@ -372,15 +368,14 @@ class Archiver:
|
|||
|
||||
subparser = subparsers.add_parser('verify', parents=[common_parser])
|
||||
subparser.set_defaults(func=self.do_verify)
|
||||
subparser.add_argument('-i', '--include', dest='patterns',
|
||||
type=IncludePattern, action='append',
|
||||
help='Include condition')
|
||||
subparser.add_argument('-e', '--exclude', dest='patterns',
|
||||
subparser.add_argument('-e', '--exclude', dest='excludes',
|
||||
type=ExcludePattern, action='append',
|
||||
help='Include condition')
|
||||
subparser.add_argument('archive', metavar='ARCHIVE',
|
||||
type=location_validator(archive=True),
|
||||
help='Archive to verity integrity of')
|
||||
subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
|
||||
help='Paths to verify')
|
||||
|
||||
subparser = subparsers.add_parser('info', parents=[common_parser])
|
||||
subparser.set_defaults(func=self.do_info)
|
||||
|
|
@ -406,8 +401,6 @@ class Archiver:
|
|||
type=location_validator(archive=False),
|
||||
help='Repository to prune')
|
||||
args = parser.parse_args(args or ['-h'])
|
||||
if getattr(args, 'patterns', None):
|
||||
adjust_patterns(args.patterns)
|
||||
self.verbose = args.verbose
|
||||
return args.func(args)
|
||||
|
||||
|
|
|
|||
|
|
@ -97,9 +97,11 @@ def to_localtime(ts):
|
|||
return ts - timedelta(seconds=time.altzone)
|
||||
|
||||
|
||||
def adjust_patterns(patterns):
|
||||
if patterns and not isinstance(patterns[-1], ExcludePattern):
|
||||
patterns.append(ExcludePattern('*'))
|
||||
def adjust_patterns(paths, excludes):
|
||||
if paths:
|
||||
return (excludes or []) + [IncludePattern(path) for path in paths] + [ExcludePattern('*')]
|
||||
else:
|
||||
return excludes
|
||||
|
||||
|
||||
def exclude_path(path, patterns):
|
||||
|
|
@ -115,6 +117,21 @@ def exclude_path(path, patterns):
|
|||
class IncludePattern:
|
||||
"""--include PATTERN
|
||||
"""
|
||||
def __init__(self, pattern):
|
||||
self.pattern = pattern
|
||||
|
||||
def match(self, path):
|
||||
dir, name = os.path.split(path)
|
||||
return (path == self.pattern
|
||||
or (dir + os.path.sep).startswith(self.pattern))
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (type(self), self.pattern)
|
||||
|
||||
|
||||
class ExcludePattern(IncludePattern):
|
||||
"""
|
||||
"""
|
||||
def __init__(self, pattern):
|
||||
self.pattern = self.dirpattern = pattern
|
||||
if not pattern.endswith(os.path.sep):
|
||||
|
|
@ -130,11 +147,6 @@ class IncludePattern:
|
|||
return '%s(%s)' % (type(self), self.pattern)
|
||||
|
||||
|
||||
class ExcludePattern(IncludePattern):
|
||||
"""
|
||||
"""
|
||||
|
||||
|
||||
def walk_path(path, skip_inodes=None):
|
||||
st = os.lstat(path)
|
||||
if skip_inodes and (st.st_ino, st.st_dev) in skip_inodes:
|
||||
|
|
|
|||
|
|
@ -15,6 +15,17 @@ utime_supports_fd = os.utime in getattr(os, 'supports_fd', {})
|
|||
|
||||
src_dir = os.path.join(os.getcwd(), os.path.dirname(__file__), '..', '..')
|
||||
|
||||
class changedir:
|
||||
def __init__(self, dir):
|
||||
self.dir = dir
|
||||
|
||||
def __enter__(self):
|
||||
self.old = os.getcwd()
|
||||
os.chdir(self.dir)
|
||||
|
||||
def __exit__(self, *args, **kw):
|
||||
os.chdir(self.old)
|
||||
|
||||
|
||||
class ArchiverTestCase(DarcTestCase):
|
||||
|
||||
|
|
@ -123,7 +134,8 @@ class ArchiverTestCase(DarcTestCase):
|
|||
self.darc('init', self.repository_location)
|
||||
self.darc('create', self.repository_location + '::test', 'input')
|
||||
self.darc('create', self.repository_location + '::test.2', 'input')
|
||||
self.darc('extract', self.repository_location + '::test', 'output')
|
||||
with changedir('output'):
|
||||
self.darc('extract', self.repository_location + '::test')
|
||||
self.assert_equal(len(self.darc('list', self.repository_location).splitlines()), 2)
|
||||
self.assert_equal(len(self.darc('list', self.repository_location + '::test').splitlines()), 9)
|
||||
self.diff_dirs('input', 'output/input')
|
||||
|
|
@ -141,9 +153,11 @@ class ArchiverTestCase(DarcTestCase):
|
|||
self.create_regual_file('file3', size=1024 * 80)
|
||||
self.create_regual_file('file4', size=1024 * 80)
|
||||
self.darc('create', '--exclude=input/file4', self.repository_location + '::test', 'input')
|
||||
self.darc('extract', '--include=file1', self.repository_location + '::test', 'output')
|
||||
with changedir('output'):
|
||||
self.darc('extract', self.repository_location + '::test', 'input/file1', )
|
||||
self.assert_equal(sorted(os.listdir('output/input')), ['file1'])
|
||||
self.darc('extract', '--exclude=file2', self.repository_location + '::test', 'output')
|
||||
with changedir('output'):
|
||||
self.darc('extract', '--exclude=input/file2', self.repository_location + '::test')
|
||||
self.assert_equal(sorted(os.listdir('output/input')), ['file1', 'file3'])
|
||||
|
||||
def test_overwrite(self):
|
||||
|
|
@ -155,13 +169,15 @@ class ArchiverTestCase(DarcTestCase):
|
|||
os.mkdir('output/input')
|
||||
os.mkdir('output/input/file1')
|
||||
os.mkdir('output/input/dir2')
|
||||
self.darc('extract', self.repository_location + '::test', 'output')
|
||||
with changedir('output'):
|
||||
self.darc('extract', self.repository_location + '::test')
|
||||
self.diff_dirs('input', 'output/input')
|
||||
# But non-empty dirs should fail
|
||||
os.unlink('output/input/file1')
|
||||
os.mkdir('output/input/file1')
|
||||
os.mkdir('output/input/file1/dir')
|
||||
self.darc('extract', self.repository_location + '::test', 'output', exit_code=1)
|
||||
with changedir('output'):
|
||||
self.darc('extract', self.repository_location + '::test', exit_code=1)
|
||||
|
||||
def test_delete(self):
|
||||
self.create_regual_file('file1', size=1024 * 80)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime
|
||||
from darc.helpers import Location, format_timedelta, IncludePattern
|
||||
from darc.helpers import Location, format_timedelta, IncludePattern, ExcludePattern
|
||||
from darc.testsuite import DarcTestCase
|
||||
|
||||
|
||||
|
|
@ -46,11 +46,11 @@ class FormatTimedeltaTestCase(DarcTestCase):
|
|||
class PatternTestCase(DarcTestCase):
|
||||
|
||||
def test(self):
|
||||
py = IncludePattern('*.py')
|
||||
foo = IncludePattern('/foo')
|
||||
self.assert_equal(py.match('/foo/foo.py'), True)
|
||||
self.assert_equal(py.match('/bar/foo.java'), False)
|
||||
self.assert_equal(foo.match('/foo/foo.py'), True)
|
||||
self.assert_equal(foo.match('/bar/foo.java'), False)
|
||||
self.assert_equal(foo.match('/foobar/foo.py'), False)
|
||||
self.assert_equal(foo.match('/foo'), True)
|
||||
self.assert_equal(IncludePattern('/usr').match('/usr'), True)
|
||||
self.assert_equal(IncludePattern('/usr').match('/usr/bin'), True)
|
||||
self.assert_equal(IncludePattern('/usr').match('/usrbin'), False)
|
||||
self.assert_equal(ExcludePattern('*.py').match('foo.py'), True)
|
||||
self.assert_equal(ExcludePattern('*.py').match('foo.pl'), False)
|
||||
self.assert_equal(ExcludePattern('/tmp').match('/tmp'), True)
|
||||
self.assert_equal(ExcludePattern('/tmp').match('/tmp/foo'), True)
|
||||
self.assert_equal(ExcludePattern('/tmp').match('/tmofoo'), False)
|
||||
|
|
|
|||
Loading…
Reference in a new issue