Skip to content

Commit

Permalink
misc fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
ThomasWaldmann committed Sep 24, 2024
1 parent 3101a7e commit f293694
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion src/borg/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -1644,7 +1644,7 @@ def check(
self.check_all = not any((first, last, match, older, newer, oldest, newest))
self.repair = repair
self.repository = repository
self.chunks = build_chunkindex_from_repo(self.repository, disable_caches=True)
self.chunks = build_chunkindex_from_repo(self.repository, disable_caches=True, cache_immediately=not repair)
self.key = self.make_key(repository)
self.repo_objs = RepoObj(self.key)
if verify_data:
Expand Down
16 changes: 8 additions & 8 deletions src/borg/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -623,7 +623,8 @@ def load_chunks_hash(repository) -> bytes:
try:
hash = repository.store_load("cache/chunks_hash")
logger.debug(f"cache/chunks_hash is '{bin_to_hex(hash)}'.")
except StoreObjectNotFound:
except (Repository.ObjectNotFound, StoreObjectNotFound):
# TODO: ^ seem like RemoteRepository raises Repository.ONF instead of StoreONF
hash = b""
logger.debug("cache/chunks_hash missing!")
return hash
Expand All @@ -647,22 +648,21 @@ def write_chunkindex_to_repo_cache(repository, chunks, *, compact=False, clear=F
# hash against cache/chunks_hash in the repository. if it is the same, the cache
# is valid. If it is different, the cache is either corrupted or out of date and
# has to be discarded.
# when some functionality is DELETING chunks from the repository, it has to either
# update both cache/chunks and cache/chunks_hash (like borg compact does) or it has to set
# chunks_hash to an invalid value (like empty), so that all clients will discard their chunks
# index caches.
# when some functionality is DELETING chunks from the repository, it has to either update
# both cache/chunks and cache/chunks_hash (like borg compact does) or it has to delete both,
# so that all clients will discard any client-local chunks index caches.
logger.debug(f"caching chunks index {bin_to_hex(new_hash)} in repository...")
repository.store_store("cache/chunks", data)
repository.store_store("cache/chunks_hash", new_hash)
return new_hash


def build_chunkindex_from_repo(repository, *, disable_caches=False, cache_immediately=True):
def build_chunkindex_from_repo(repository, *, disable_caches=False, cache_immediately=False):
chunks = None
# first, try to load a pre-computed and centrally cached chunks index:
if not disable_caches:
wanted_hash = load_chunks_hash(repository)
logger.debug(f"trying to get cached chunk index (id {bin_to_hex(wanted_hash or b'')}) from the repo...")
logger.debug(f"trying to get cached chunk index (id {bin_to_hex(wanted_hash)}) from the repo...")
try:
chunks_data = repository.store_load("cache/chunks")
except (Repository.ObjectNotFound, StoreObjectNotFound):
Expand Down Expand Up @@ -721,7 +721,7 @@ def __init__(self):
@property
def chunks(self):
if self._chunks is None:
self._chunks = build_chunkindex_from_repo(self.repository)
self._chunks = build_chunkindex_from_repo(self.repository, cache_immediately=True)
return self._chunks

def seen_chunk(self, id, size=None):
Expand Down

0 comments on commit f293694

Please sign in to comment.