1
0
Fork 0
mirror of https://github.com/Kozea/Radicale.git synced 2025-08-01 18:18:31 +00:00

Type hints for multifilesystem

This commit is contained in:
Unrud 2021-07-26 20:56:47 +02:00 committed by Unrud
parent cecb17df03
commit 698ae875ce
14 changed files with 428 additions and 246 deletions

View file

@ -21,16 +21,27 @@ import os
import pickle
import time
from hashlib import sha256
from typing import BinaryIO, Iterable, NamedTuple, Optional, cast
import radicale.item as radicale_item
from radicale import pathutils, storage
from radicale.log import logger
from radicale.storage.multifilesystem.base import CollectionBase
CacheContent = NamedTuple("CacheContent", [
("uid", str), ("etag", str), ("text", str), ("name", str), ("tag", str),
("start", int), ("end", int)])
class CollectionCacheMixin:
def _clean_cache(self, folder, names, max_age=None):
class CollectionPartCache(CollectionBase):
def _clean_cache(self, folder: str, names: Iterable[str],
max_age: int = 0) -> None:
"""Delete all ``names`` in ``folder`` that are older than ``max_age``.
"""
age_limit = time.time() - max_age if max_age is not None else None
age_limit: Optional[float] = None
if max_age is not None and max_age > 0:
age_limit = time.time() - max_age
modified = False
for name in names:
if not pathutils.is_safe_filesystem_path_component(name):
@ -55,47 +66,49 @@ class CollectionCacheMixin:
self._storage._sync_directory(folder)
@staticmethod
def _item_cache_hash(raw_text):
def _item_cache_hash(raw_text: bytes) -> str:
_hash = sha256()
_hash.update(storage.CACHE_VERSION)
_hash.update(raw_text)
return _hash.hexdigest()
def _item_cache_content(self, item, cache_hash=None):
text = item.serialize()
if cache_hash is None:
cache_hash = self._item_cache_hash(text.encode(self._encoding))
return (cache_hash, item.uid, item.etag, text, item.name,
item.component_name, *item.time_range)
def _item_cache_content(self, item: radicale_item.Item) -> CacheContent:
return CacheContent(item.uid, item.etag, item.serialize(), item.name,
item.component_name, *item.time_range)
def _store_item_cache(self, href, item, cache_hash=None):
def _store_item_cache(self, href: str, item: radicale_item.Item,
cache_hash: str = "") -> CacheContent:
if not cache_hash:
cache_hash = self._item_cache_hash(
item.serialize().encode(self._encoding))
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
"item")
content = self._item_cache_content(item, cache_hash)
content = self._item_cache_content(item)
self._storage._makedirs_synced(cache_folder)
# Race: Other processes might have created and locked the file.
with contextlib.suppress(PermissionError), self._atomic_write(
os.path.join(cache_folder, href), "wb") as f:
pickle.dump(content, f)
os.path.join(cache_folder, href), "wb") as fo:
fb = cast(BinaryIO, fo)
pickle.dump((cache_hash, *content), fb)
return content
def _load_item_cache(self, href, input_hash):
def _load_item_cache(self, href: str, cache_hash: str
) -> Optional[CacheContent]:
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
"item")
cache_hash = uid = etag = text = name = tag = start = end = None
try:
with open(os.path.join(cache_folder, href), "rb") as f:
cache_hash, *content = pickle.load(f)
if cache_hash == input_hash:
uid, etag, text, name, tag, start, end = content
hash_, *remainder = pickle.load(f)
if hash_ and hash_ == cache_hash:
return CacheContent(*remainder)
except FileNotFoundError:
pass
except (pickle.UnpicklingError, ValueError) as e:
logger.warning("Failed to load item cache entry %r in %r: %s",
href, self.path, e, exc_info=True)
return cache_hash, uid, etag, text, name, tag, start, end
return None
def _clean_item_cache(self):
def _clean_item_cache(self) -> None:
cache_folder = os.path.join(self._filesystem_path, ".Radicale.cache",
"item")
self._clean_cache(cache_folder, (