mirror of
https://github.com/Kozea/Radicale.git
synced 2025-06-26 16:45:52 +00:00
item-cache-mtime-size: feature
This commit is contained in:
parent
ff3f2fc3de
commit
62bdfeab40
3 changed files with 77 additions and 30 deletions
|
@ -73,6 +73,10 @@ class CollectionPartCache(CollectionBase):
|
|||
_hash.update(raw_text)
|
||||
return _hash.hexdigest()
|
||||
|
||||
@staticmethod
|
||||
def _item_cache_mtime_and_size(size: bytes, raw_text: bytes) -> str:
|
||||
return str(storage.CACHE_VERSION.decode()) + "size=" + str(size) + ";mtime=" + str(raw_text)
|
||||
|
||||
def _item_cache_content(self, item: radicale_item.Item) -> CacheContent:
|
||||
return CacheContent(item.uid, item.etag, item.serialize(), item.name,
|
||||
item.component_name, *item.time_range)
|
||||
|
@ -80,8 +84,11 @@ class CollectionPartCache(CollectionBase):
|
|||
def _store_item_cache(self, href: str, item: radicale_item.Item,
|
||||
cache_hash: str = "") -> CacheContent:
|
||||
if not cache_hash:
|
||||
cache_hash = self._item_cache_hash(
|
||||
item.serialize().encode(self._encoding))
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
raise RuntimeError("_store_item_cache called without cache_hash is not supported if [storage] use_mtime_and_size_for_item_cache is True")
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(
|
||||
item.serialize().encode(self._encoding))
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
content = self._item_cache_content(item)
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
|
@ -96,12 +103,20 @@ class CollectionPartCache(CollectionBase):
|
|||
def _load_item_cache(self, href: str, cache_hash: str
|
||||
) -> Optional[CacheContent]:
|
||||
cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
|
||||
path = os.path.join(cache_folder, href)
|
||||
try:
|
||||
with open(os.path.join(cache_folder, href), "rb") as f:
|
||||
with open(path, "rb") as f:
|
||||
hash_, *remainder = pickle.load(f)
|
||||
if hash_ and hash_ == cache_hash:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache match : %r with hash %r", path, cache_hash)
|
||||
return CacheContent(*remainder)
|
||||
else:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache no match : %r with hash %r", path, cache_hash)
|
||||
except FileNotFoundError:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache not found : %r with hash %r", path, cache_hash)
|
||||
pass
|
||||
except (pickle.UnpicklingError, ValueError) as e:
|
||||
logger.warning("Failed to load item cache entry %r in %r: %s",
|
||||
|
|
|
@ -80,11 +80,18 @@ class CollectionPartGet(CollectionPartCache, CollectionPartLock,
|
|||
raise
|
||||
# The hash of the component in the file system. This is used to check,
|
||||
# if the entry in the cache is still valid.
|
||||
cache_hash = self._item_cache_hash(raw_text)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Check cache for: %r with hash %r", path, cache_hash)
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
cache_hash = self._item_cache_mtime_and_size(os.stat(path).st_size, os.stat(path).st_mtime_ns)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache check for: %r with mtime and size %r", path, cache_hash)
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(raw_text)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache check for: %r with hash %r", path, cache_hash)
|
||||
cache_content = self._load_item_cache(href, cache_hash)
|
||||
if cache_content is None:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache miss for: %r", path)
|
||||
with self._acquire_cache_lock("item"):
|
||||
# Lock the item cache to prevent multiple processes from
|
||||
# generating the same data in parallel.
|
||||
|
@ -101,6 +108,8 @@ class CollectionPartGet(CollectionPartCache, CollectionPartLock,
|
|||
vobject_item, = vobject_items
|
||||
temp_item = radicale_item.Item(
|
||||
collection=self, vobject_item=vobject_item)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r", path)
|
||||
cache_content = self._store_item_cache(
|
||||
href, temp_item, cache_hash)
|
||||
except Exception as e:
|
||||
|
@ -115,6 +124,9 @@ class CollectionPartGet(CollectionPartCache, CollectionPartLock,
|
|||
if not self._item_cache_cleaned:
|
||||
self._item_cache_cleaned = True
|
||||
self._clean_item_cache()
|
||||
else:
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache hit for: %r", path)
|
||||
last_modified = time.strftime(
|
||||
"%a, %d %b %Y %H:%M:%S GMT",
|
||||
time.gmtime(os.path.getmtime(path)))
|
||||
|
|
|
@ -41,19 +41,26 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
|
|||
raise pathutils.UnsafePathError(href)
|
||||
path = pathutils.path_to_filesystem(self._filesystem_path, href)
|
||||
try:
|
||||
cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
|
||||
logger.debug("Store cache for: %r with hash %r", path, cache_hash)
|
||||
self._store_item_cache(href, item, cache_hash)
|
||||
with self._atomic_write(path, newline="") as fo: # type: ignore
|
||||
f = cast(TextIO, fo)
|
||||
f.write(item.serialize())
|
||||
except Exception as e:
|
||||
raise ValueError("Failed to store item %r in collection %r: %s" %
|
||||
(href, self.path, e)) from e
|
||||
# TODO: better fix for "mypy"
|
||||
with self._atomic_write(path, newline="") as fo: # type: ignore
|
||||
f = cast(TextIO, fo)
|
||||
f.write(item.serialize())
|
||||
# Clean the cache after the actual item is stored, or the cache entry
|
||||
# will be removed again.
|
||||
self._clean_item_cache()
|
||||
# store cache file
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
cache_hash = self._item_cache_mtime_and_size(os.stat(path).st_size, os.stat(path).st_mtime_ns)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with mtime and size %r", path, cache_hash)
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with hash %r", path, cache_hash)
|
||||
try:
|
||||
self._store_item_cache(href, item, cache_hash)
|
||||
except Exception as e:
|
||||
raise ValueError("Failed to store item cache of %r in collection %r: %s" %
|
||||
(href, self.path, e)) from e
|
||||
# Track the change
|
||||
self._update_history_etag(href, item)
|
||||
self._clean_history()
|
||||
|
@ -84,15 +91,11 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
|
|||
for item in items:
|
||||
uid = item.uid
|
||||
logger.debug("Store item from list with uid: '%s'" % uid)
|
||||
try:
|
||||
cache_content = self._item_cache_content(item)
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
"Failed to store item %r in temporary collection %r: %s" %
|
||||
(uid, self.path, e)) from e
|
||||
cache_content = self._item_cache_content(item)
|
||||
for href in get_safe_free_hrefs(uid):
|
||||
path = os.path.join(self._filesystem_path, href)
|
||||
try:
|
||||
f = open(os.path.join(self._filesystem_path, href),
|
||||
f = open(path,
|
||||
"w", newline="", encoding=self._encoding)
|
||||
except OSError as e:
|
||||
if (sys.platform != "win32" and e.errno == errno.EINVAL or
|
||||
|
@ -104,14 +107,31 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
|
|||
else:
|
||||
raise RuntimeError("No href found for item %r in temporary "
|
||||
"collection %r" % (uid, self.path))
|
||||
with f:
|
||||
f.write(item.serialize())
|
||||
f.flush()
|
||||
self._storage._fsync(f)
|
||||
with open(os.path.join(cache_folder, href), "wb") as fb:
|
||||
|
||||
try:
|
||||
with f:
|
||||
f.write(item.serialize())
|
||||
f.flush()
|
||||
self._storage._fsync(f)
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
"Failed to store item %r in temporary collection %r: %s" %
|
||||
(uid, self.path, e)) from e
|
||||
|
||||
# store cache file
|
||||
if self._storage._use_mtime_and_size_for_item_cache is True:
|
||||
cache_hash = self._item_cache_mtime_and_size(os.stat(path).st_size, os.stat(path).st_mtime_ns)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with mtime and size %r", path, cache_hash)
|
||||
else:
|
||||
cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
|
||||
logger.debug("Store cache for: %r with hash %r", fb.name, cache_hash)
|
||||
pickle.dump(cache_content, fb)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store for: %r with hash %r", path, cache_hash)
|
||||
path_cache = os.path.join(cache_folder, href)
|
||||
if self._storage._debug_cache_actions is True:
|
||||
logger.debug("Item cache store into: %r", path_cache)
|
||||
with open(os.path.join(cache_folder, href), "wb") as fb:
|
||||
pickle.dump((cache_hash, *cache_content), fb)
|
||||
fb.flush()
|
||||
self._storage._fsync(fb)
|
||||
self._storage._sync_directory(cache_folder)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue