mirror of
https://github.com/borgbackup/borg.git
synced 2026-04-21 22:29:54 -04:00
backport bin_to_hex and use it
simplifies code and also porting and merging between 1.0 and 1.1/master.
This commit is contained in:
parent
45a2d8b5c0
commit
67aafec195
9 changed files with 45 additions and 42 deletions
|
|
@ -1,4 +1,3 @@
|
|||
from binascii import hexlify
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timezone
|
||||
from getpass import getuser
|
||||
|
|
@ -17,7 +16,7 @@ import sys
|
|||
import time
|
||||
from io import BytesIO
|
||||
from . import xattr
|
||||
from .helpers import Error, uid2user, user2uid, gid2group, group2gid, \
|
||||
from .helpers import Error, uid2user, user2uid, gid2group, group2gid, bin_to_hex, \
|
||||
parse_timestamp, to_localtime, format_time, format_timedelta, remove_surrogates, \
|
||||
Manifest, Statistics, decode_dict, make_path_safe, StableDict, int_to_bigint, bigint_to_int, \
|
||||
ProgressIndicatorPercent
|
||||
|
|
@ -254,7 +253,7 @@ class Archive:
|
|||
|
||||
@property
|
||||
def fpr(self):
|
||||
return hexlify(self.id).decode('ascii')
|
||||
return bin_to_hex(self.id)
|
||||
|
||||
@property
|
||||
def duration(self):
|
||||
|
|
@ -522,7 +521,7 @@ Number of files: {0.stats.nfiles}'''.format(
|
|||
try:
|
||||
self.cache.chunk_decref(id, stats)
|
||||
except KeyError:
|
||||
cid = hexlify(id).decode('ascii')
|
||||
cid = bin_to_hex(id)
|
||||
raise ChunksIndexError(cid)
|
||||
except Repository.ObjectNotFound as e:
|
||||
# object not in repo - strange, but we wanted to delete it anyway.
|
||||
|
|
@ -1010,7 +1009,7 @@ class ArchiveChecker:
|
|||
return _state
|
||||
|
||||
def report(msg, chunk_id, chunk_no):
|
||||
cid = hexlify(chunk_id).decode('ascii')
|
||||
cid = bin_to_hex(chunk_id)
|
||||
msg += ' [chunk: %06d_%s]' % (chunk_no, cid) # see debug-dump-archive-items
|
||||
self.error_found = True
|
||||
logger.error(msg)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from binascii import hexlify, unhexlify
|
||||
from binascii import unhexlify
|
||||
from datetime import datetime
|
||||
from hashlib import sha256
|
||||
from operator import attrgetter
|
||||
|
|
@ -18,7 +18,7 @@ import collections
|
|||
|
||||
from . import __version__
|
||||
from .helpers import Error, location_validator, archivename_validator, format_line, format_time, format_file_size, \
|
||||
parse_pattern, PathPrefixPattern, to_localtime, timestamp, safe_timestamp, \
|
||||
parse_pattern, PathPrefixPattern, to_localtime, timestamp, safe_timestamp, bin_to_hex, \
|
||||
get_cache_dir, prune_within, prune_split, \
|
||||
Manifest, NoManifestError, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
|
||||
dir_is_tagged, bigint_to_int, ChunkerParams, CompressionSpec, PrefixSpec, is_slow_msgpack, yes, sysinfo, \
|
||||
|
|
@ -631,7 +631,7 @@ class Archiver:
|
|||
"""Show archive details such as disk space used"""
|
||||
stats = archive.calc_stats(cache)
|
||||
print('Name:', archive.name)
|
||||
print('Fingerprint: %s' % hexlify(archive.id).decode('ascii'))
|
||||
print('Fingerprint: %s' % bin_to_hex(archive.id))
|
||||
print('Hostname:', archive.metadata[b'hostname'])
|
||||
print('Username:', archive.metadata[b'username'])
|
||||
print('Time (start): %s' % format_time(to_localtime(archive.ts)))
|
||||
|
|
@ -727,7 +727,7 @@ class Archiver:
|
|||
archive = Archive(repository, key, manifest, args.location.archive)
|
||||
for i, item_id in enumerate(archive.metadata[b'items']):
|
||||
data = key.decrypt(item_id, repository.get(item_id))
|
||||
filename = '%06d_%s.items' % (i, hexlify(item_id).decode('ascii'))
|
||||
filename = '%06d_%s.items' % (i, bin_to_hex(item_id))
|
||||
print('Dumping', filename)
|
||||
with open(filename, 'wb') as fd:
|
||||
fd.write(data)
|
||||
|
|
@ -748,7 +748,7 @@ class Archiver:
|
|||
cdata = repository.get(id)
|
||||
give_id = id if id != Manifest.MANIFEST_ID else None
|
||||
data = key.decrypt(give_id, cdata)
|
||||
filename = '%06d_%s.obj' % (i, hexlify(id).decode('ascii'))
|
||||
filename = '%06d_%s.obj' % (i, bin_to_hex(id))
|
||||
print('Dumping', filename)
|
||||
with open(filename, 'wb') as fd:
|
||||
fd.write(data)
|
||||
|
|
|
|||
|
|
@ -3,14 +3,14 @@ from .remote import cache_if_remote
|
|||
from collections import namedtuple
|
||||
import os
|
||||
import stat
|
||||
from binascii import hexlify, unhexlify
|
||||
from binascii import unhexlify
|
||||
import shutil
|
||||
|
||||
from .key import PlaintextKey
|
||||
from .logger import create_logger
|
||||
logger = create_logger()
|
||||
from .helpers import Error, get_cache_dir, decode_dict, int_to_bigint, \
|
||||
bigint_to_int, format_file_size, yes
|
||||
bigint_to_int, format_file_size, yes, bin_to_hex
|
||||
from .locking import Lock
|
||||
from .hashindex import ChunkIndex
|
||||
|
||||
|
|
@ -34,13 +34,13 @@ class Cache:
|
|||
|
||||
@staticmethod
|
||||
def break_lock(repository, path=None):
|
||||
path = path or os.path.join(get_cache_dir(), hexlify(repository.id).decode('ascii'))
|
||||
path = path or os.path.join(get_cache_dir(), bin_to_hex(repository.id))
|
||||
Lock(os.path.join(path, 'lock'), exclusive=True).break_lock()
|
||||
|
||||
@staticmethod
|
||||
def destroy(repository, path=None):
|
||||
"""destroy the cache for ``repository`` or at ``path``"""
|
||||
path = path or os.path.join(get_cache_dir(), hexlify(repository.id).decode('ascii'))
|
||||
path = path or os.path.join(get_cache_dir(), bin_to_hex(repository.id))
|
||||
config = os.path.join(path, 'config')
|
||||
if os.path.exists(config):
|
||||
os.remove(config) # kill config first
|
||||
|
|
@ -55,7 +55,7 @@ class Cache:
|
|||
self.repository = repository
|
||||
self.key = key
|
||||
self.manifest = manifest
|
||||
self.path = path or os.path.join(get_cache_dir(), hexlify(repository.id).decode('ascii'))
|
||||
self.path = path or os.path.join(get_cache_dir(), bin_to_hex(repository.id))
|
||||
self.do_files = do_files
|
||||
# Warn user before sending data to a never seen before unencrypted repository
|
||||
if not os.path.exists(self.path):
|
||||
|
|
@ -122,7 +122,7 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
|
|||
config = configparser.ConfigParser(interpolation=None)
|
||||
config.add_section('cache')
|
||||
config.set('cache', 'version', '1')
|
||||
config.set('cache', 'repository', hexlify(self.repository.id).decode('ascii'))
|
||||
config.set('cache', 'repository', bin_to_hex(self.repository.id))
|
||||
config.set('cache', 'manifest', '')
|
||||
with open(os.path.join(self.path, 'config'), 'w') as fd:
|
||||
config.write(fd)
|
||||
|
|
@ -208,7 +208,7 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
|
|||
if age == 0 and bigint_to_int(item[3]) < self._newest_mtime or \
|
||||
age > 0 and age < ttl:
|
||||
msgpack.pack((path_hash, item), fd)
|
||||
self.config.set('cache', 'manifest', hexlify(self.manifest.id).decode('ascii'))
|
||||
self.config.set('cache', 'manifest', bin_to_hex(self.manifest.id))
|
||||
self.config.set('cache', 'timestamp', self.manifest.timestamp)
|
||||
self.config.set('cache', 'key_type', str(self.key.TYPE))
|
||||
self.config.set('cache', 'previous_location', self.repository._location.canonical_path())
|
||||
|
|
@ -251,7 +251,7 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
|
|||
archive_path = os.path.join(self.path, 'chunks.archive.d')
|
||||
|
||||
def mkpath(id, suffix=''):
|
||||
id_hex = hexlify(id).decode('ascii')
|
||||
id_hex = bin_to_hex(id)
|
||||
path = os.path.join(archive_path, id_hex + suffix)
|
||||
return path.encode('utf-8')
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import argparse
|
||||
from binascii import hexlify
|
||||
from collections import namedtuple
|
||||
import contextlib
|
||||
from functools import wraps
|
||||
|
|
@ -759,6 +760,10 @@ def safe_encode(s, coding='utf-8', errors='surrogateescape'):
|
|||
return s.encode(coding, errors)
|
||||
|
||||
|
||||
def bin_to_hex(binary):
|
||||
return hexlify(binary).decode('ascii')
|
||||
|
||||
|
||||
class Location:
|
||||
"""Object representing a repository / archive location
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import textwrap
|
|||
from hmac import HMAC, compare_digest
|
||||
from hashlib import sha256, pbkdf2_hmac
|
||||
|
||||
from .helpers import IntegrityError, get_keys_dir, Error, yes
|
||||
from .helpers import IntegrityError, get_keys_dir, Error, yes, bin_to_hex
|
||||
from .logger import create_logger
|
||||
logger = create_logger()
|
||||
|
||||
|
|
@ -201,7 +201,7 @@ class Passphrase(str):
|
|||
passphrase.encode('ascii')
|
||||
except UnicodeEncodeError:
|
||||
print('Your passphrase (UTF-8 encoding in hex): %s' %
|
||||
hexlify(passphrase.encode('utf-8')).decode('ascii'),
|
||||
bin_to_hex(passphrase.encode('utf-8')),
|
||||
file=sys.stderr)
|
||||
print('As you have a non-ASCII passphrase, it is recommended to keep the UTF-8 encoding in hex together with the passphrase at a safe place.',
|
||||
file=sys.stderr)
|
||||
|
|
@ -427,7 +427,7 @@ class KeyfileKey(KeyfileKeyBase):
|
|||
def save(self, target, passphrase):
|
||||
key_data = self._save(passphrase)
|
||||
with open(target, 'w') as fd:
|
||||
fd.write('%s %s\n' % (self.FILE_ID, hexlify(self.repository_id).decode('ascii')))
|
||||
fd.write('%s %s\n' % (self.FILE_ID, bin_to_hex(self.repository_id)))
|
||||
fd.write(key_data)
|
||||
fd.write('\n')
|
||||
self.target = target
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from binascii import hexlify, unhexlify, a2b_base64, b2a_base64
|
||||
from binascii import unhexlify, a2b_base64, b2a_base64
|
||||
import binascii
|
||||
import textwrap
|
||||
from hashlib import sha256
|
||||
|
||||
from .key import KeyfileKey, RepoKey, PassphraseKey, KeyfileNotFoundError, PlaintextKey
|
||||
from .helpers import Manifest, NoManifestError, Error, yes
|
||||
from .helpers import Manifest, NoManifestError, Error, yes, bin_to_hex
|
||||
from .repository import Repository
|
||||
|
||||
|
||||
|
|
@ -79,7 +79,7 @@ class KeyManager:
|
|||
|
||||
def store_keyfile(self, target):
|
||||
with open(target, 'w') as fd:
|
||||
fd.write('%s %s\n' % (KeyfileKey.FILE_ID, hexlify(self.repository.id).decode('ascii')))
|
||||
fd.write('%s %s\n' % (KeyfileKey.FILE_ID, bin_to_hex(self.repository.id)))
|
||||
fd.write(self.keyblob)
|
||||
if not self.keyblob.endswith('\n'):
|
||||
fd.write('\n')
|
||||
|
|
@ -103,7 +103,7 @@ class KeyManager:
|
|||
binary = a2b_base64(self.keyblob)
|
||||
export += 'BORG PAPER KEY v1\n'
|
||||
lines = (len(binary) + 17) // 18
|
||||
repoid = hexlify(self.repository.id).decode('ascii')[:18]
|
||||
repoid = bin_to_hex(self.repository.id)[:18]
|
||||
complete_checksum = sha256_truncated(binary, 12)
|
||||
export += 'id: {0:d} / {1} / {2} - {3}\n'.format(lines,
|
||||
grouped(repoid),
|
||||
|
|
@ -114,7 +114,7 @@ class KeyManager:
|
|||
idx += 1
|
||||
binline = binary[:18]
|
||||
checksum = sha256_truncated(idx.to_bytes(2, byteorder='big') + binline, 2)
|
||||
export += '{0:2d}: {1} - {2}\n'.format(idx, grouped(hexlify(binline).decode('ascii')), checksum)
|
||||
export += '{0:2d}: {1} - {2}\n'.format(idx, grouped(bin_to_hex(binline)), checksum)
|
||||
binary = binary[18:]
|
||||
|
||||
if path:
|
||||
|
|
@ -125,7 +125,7 @@ class KeyManager:
|
|||
|
||||
def import_keyfile(self, args):
|
||||
file_id = KeyfileKey.FILE_ID
|
||||
first_line = file_id + ' ' + hexlify(self.repository.id).decode('ascii') + '\n'
|
||||
first_line = file_id + ' ' + bin_to_hex(self.repository.id) + '\n'
|
||||
with open(args.path, 'r') as fd:
|
||||
file_first_line = fd.read(len(first_line))
|
||||
if file_first_line != first_line:
|
||||
|
|
@ -141,7 +141,7 @@ class KeyManager:
|
|||
# imported here because it has global side effects
|
||||
import readline
|
||||
|
||||
repoid = hexlify(self.repository.id).decode('ascii')[:18]
|
||||
repoid = bin_to_hex(self.repository.id)[:18]
|
||||
try:
|
||||
while True: # used for repeating on overall checksum mismatch
|
||||
# id line input
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from configparser import ConfigParser
|
||||
from binascii import hexlify, unhexlify
|
||||
from binascii import unhexlify
|
||||
from datetime import datetime
|
||||
from itertools import islice
|
||||
import errno
|
||||
|
|
@ -12,7 +12,7 @@ import struct
|
|||
from zlib import crc32
|
||||
|
||||
import msgpack
|
||||
from .helpers import Error, ErrorWithTraceback, IntegrityError, Location, ProgressIndicatorPercent
|
||||
from .helpers import Error, ErrorWithTraceback, IntegrityError, Location, ProgressIndicatorPercent, bin_to_hex
|
||||
from .hashindex import NSIndex
|
||||
from .locking import Lock, LockError, LockErrorT
|
||||
from .lrucache import LRUCache
|
||||
|
|
@ -109,7 +109,7 @@ class Repository:
|
|||
config.set('repository', 'segments_per_dir', str(self.DEFAULT_SEGMENTS_PER_DIR))
|
||||
config.set('repository', 'max_segment_size', str(self.DEFAULT_MAX_SEGMENT_SIZE))
|
||||
config.set('repository', 'append_only', str(int(self.append_only)))
|
||||
config.set('repository', 'id', hexlify(os.urandom(32)).decode('ascii'))
|
||||
config.set('repository', 'id', bin_to_hex(os.urandom(32)))
|
||||
self.save_config(path, config)
|
||||
|
||||
def save_config(self, path, config):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from binascii import hexlify, unhexlify, b2a_base64
|
||||
from binascii import unhexlify, b2a_base64
|
||||
from configparser import ConfigParser
|
||||
import errno
|
||||
import os
|
||||
|
|
@ -21,7 +21,7 @@ from ..archive import Archive, ChunkBuffer, CHUNK_MAX_EXP, flags_noatime, flags_
|
|||
from ..archiver import Archiver
|
||||
from ..cache import Cache
|
||||
from ..crypto import bytes_to_long, num_aes_blocks
|
||||
from ..helpers import Manifest, PatternMatcher, parse_pattern, EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
|
||||
from ..helpers import Manifest, PatternMatcher, parse_pattern, EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, bin_to_hex
|
||||
from ..key import RepoKey, KeyfileKey, Passphrase
|
||||
from ..keymanager import RepoIdMismatch, NotABorgKeyFile
|
||||
from ..remote import RemoteRepository, PathNotAllowed
|
||||
|
|
@ -409,7 +409,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
|||
def _set_repository_id(self, path, id):
|
||||
config = ConfigParser(interpolation=None)
|
||||
config.read(os.path.join(path, 'config'))
|
||||
config.set('repository', 'id', hexlify(id).decode('ascii'))
|
||||
config.set('repository', 'id', bin_to_hex(id))
|
||||
with open(os.path.join(path, 'config'), 'w') as fd:
|
||||
config.write(fd)
|
||||
with Repository(self.repository_path) as repository:
|
||||
|
|
@ -1205,7 +1205,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
|||
with open(export_file, 'r') as fd:
|
||||
export_contents = fd.read()
|
||||
|
||||
assert export_contents.startswith('BORG_KEY ' + hexlify(repo_id).decode() + '\n')
|
||||
assert export_contents.startswith('BORG_KEY ' + bin_to_hex(repo_id) + '\n')
|
||||
|
||||
key_file = self.keys_path + '/' + os.listdir(self.keys_path)[0]
|
||||
|
||||
|
|
@ -1232,7 +1232,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
|||
with open(export_file, 'r') as fd:
|
||||
export_contents = fd.read()
|
||||
|
||||
assert export_contents.startswith('BORG_KEY ' + hexlify(repo_id).decode() + '\n')
|
||||
assert export_contents.startswith('BORG_KEY ' + bin_to_hex(repo_id) + '\n')
|
||||
|
||||
with Repository(self.repository_path) as repository:
|
||||
repo_key = RepoKey(repository)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from binascii import hexlify
|
||||
import datetime
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -6,7 +5,7 @@ import os
|
|||
import shutil
|
||||
import time
|
||||
|
||||
from .helpers import get_keys_dir, get_cache_dir, ProgressIndicatorPercent
|
||||
from .helpers import get_keys_dir, get_cache_dir, ProgressIndicatorPercent, bin_to_hex
|
||||
from .locking import Lock
|
||||
from .repository import Repository, MAGIC
|
||||
from .key import KeyfileKey, KeyfileNotFoundError
|
||||
|
|
@ -188,8 +187,8 @@ class AtticRepositoryUpgrader(Repository):
|
|||
attic_cache_dir = os.environ.get('ATTIC_CACHE_DIR',
|
||||
os.path.join(os.path.expanduser('~'),
|
||||
'.cache', 'attic'))
|
||||
attic_cache_dir = os.path.join(attic_cache_dir, hexlify(self.id).decode('ascii'))
|
||||
borg_cache_dir = os.path.join(get_cache_dir(), hexlify(self.id).decode('ascii'))
|
||||
attic_cache_dir = os.path.join(attic_cache_dir, bin_to_hex(self.id))
|
||||
borg_cache_dir = os.path.join(get_cache_dir(), bin_to_hex(self.id))
|
||||
|
||||
def copy_cache_file(path):
|
||||
"""copy the given attic cache path into the borg directory
|
||||
|
|
@ -263,7 +262,7 @@ class AtticKeyfileKey(KeyfileKey):
|
|||
assume the repository has been opened by the archiver yet
|
||||
"""
|
||||
get_keys_dir = cls.get_keys_dir
|
||||
id = hexlify(repository.id).decode('ascii')
|
||||
id = bin_to_hex(repository.id)
|
||||
keys_dir = get_keys_dir()
|
||||
if not os.path.exists(keys_dir):
|
||||
raise KeyfileNotFoundError(repository.path, keys_dir)
|
||||
|
|
@ -313,7 +312,7 @@ class Borg0xxKeyfileKey(KeyfileKey):
|
|||
@classmethod
|
||||
def find_key_file(cls, repository):
|
||||
get_keys_dir = cls.get_keys_dir
|
||||
id = hexlify(repository.id).decode('ascii')
|
||||
id = bin_to_hex(repository.id)
|
||||
keys_dir = get_keys_dir()
|
||||
if not os.path.exists(keys_dir):
|
||||
raise KeyfileNotFoundError(repository.path, keys_dir)
|
||||
|
|
|
|||
Loading…
Reference in a new issue