1
0
Fork 0
mirror of https://github.com/Kozea/Radicale.git synced 2025-08-04 18:22:26 +00:00

rename function

This commit is contained in:
Peter Bieringer 2024-12-10 08:25:14 +01:00
parent 05d4e91856
commit 644548c866
5 changed files with 10 additions and 14 deletions

View file

@ -82,7 +82,7 @@ class CollectionPartCache(CollectionBase):
if not cache_hash: if not cache_hash:
cache_hash = self._item_cache_hash( cache_hash = self._item_cache_hash(
item.serialize().encode(self._encoding)) item.serialize().encode(self._encoding))
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item") cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
content = self._item_cache_content(item) content = self._item_cache_content(item)
self._storage._makedirs_synced(cache_folder) self._storage._makedirs_synced(cache_folder)
# Race: Other processes might have created and locked the file. # Race: Other processes might have created and locked the file.
@ -95,7 +95,7 @@ class CollectionPartCache(CollectionBase):
def _load_item_cache(self, href: str, cache_hash: str def _load_item_cache(self, href: str, cache_hash: str
) -> Optional[CacheContent]: ) -> Optional[CacheContent]:
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item") cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
try: try:
with open(os.path.join(cache_folder, href), "rb") as f: with open(os.path.join(cache_folder, href), "rb") as f:
hash_, *remainder = pickle.load(f) hash_, *remainder = pickle.load(f)
@ -109,7 +109,7 @@ class CollectionPartCache(CollectionBase):
return None return None
def _clean_item_cache(self) -> None: def _clean_item_cache(self) -> None:
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item") cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
self._clean_cache(cache_folder, ( self._clean_cache(cache_folder, (
e.name for e in os.scandir(cache_folder) if not e.name for e in os.scandir(cache_folder) if not
os.path.isfile(os.path.join(self._filesystem_path, e.name)))) os.path.isfile(os.path.join(self._filesystem_path, e.name))))

View file

@ -47,8 +47,7 @@ class CollectionPartHistory(CollectionBase):
string for deleted items) and a history etag, which is a hash over string for deleted items) and a history etag, which is a hash over
the previous history etag and the etag separated by "/". the previous history etag and the etag separated by "/".
""" """
history_folder = os.path.join(self._filesystem_path, history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
".Radicale.cache", "history")
try: try:
with open(os.path.join(history_folder, href), "rb") as f: with open(os.path.join(history_folder, href), "rb") as f:
cache_etag, history_etag = pickle.load(f) cache_etag, history_etag = pickle.load(f)
@ -76,8 +75,7 @@ class CollectionPartHistory(CollectionBase):
def _get_deleted_history_hrefs(self): def _get_deleted_history_hrefs(self):
"""Returns the hrefs of all deleted items that are still in the """Returns the hrefs of all deleted items that are still in the
history cache.""" history cache."""
history_folder = os.path.join(self._filesystem_path, history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
".Radicale.cache", "history")
with contextlib.suppress(FileNotFoundError): with contextlib.suppress(FileNotFoundError):
for entry in os.scandir(history_folder): for entry in os.scandir(history_folder):
href = entry.name href = entry.name
@ -89,7 +87,6 @@ class CollectionPartHistory(CollectionBase):
def _clean_history(self): def _clean_history(self):
# Delete all expired history entries of deleted items. # Delete all expired history entries of deleted items.
history_folder = os.path.join(self._filesystem_path, history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history")
".Radicale.cache", "history")
self._clean_cache(history_folder, self._get_deleted_history_hrefs(), self._clean_cache(history_folder, self._get_deleted_history_hrefs(),
max_age=self._max_sync_token_age) max_age=self._max_sync_token_age)

View file

@ -42,8 +42,8 @@ class StoragePartMove(StorageBase):
if item.collection._filesystem_path != to_collection._filesystem_path: if item.collection._filesystem_path != to_collection._filesystem_path:
self._sync_directory(item.collection._filesystem_path) self._sync_directory(item.collection._filesystem_path)
# Move the item cache entry # Move the item cache entry
cache_folder = self._get_collection_cache_folder(item.collection._filesystem_path, ".Radicale.cache", "item") cache_folder = self._get_collection_cache_subfolder(item.collection._filesystem_path, ".Radicale.cache", "item")
to_cache_folder = self._get_collection_cache_folder(to_collection._filesystem_path, ".Radicale.cache", "item") to_cache_folder = self._get_collection_cache_subfolder(to_collection._filesystem_path, ".Radicale.cache", "item")
self._makedirs_synced(to_cache_folder) self._makedirs_synced(to_cache_folder)
try: try:
os.replace(os.path.join(cache_folder, item.href), os.replace(os.path.join(cache_folder, item.href),

View file

@ -67,8 +67,7 @@ class CollectionPartSync(CollectionPartCache, CollectionPartHistory,
if token_name == old_token_name: if token_name == old_token_name:
# Nothing changed # Nothing changed
return token, () return token, ()
token_folder = os.path.join(self._filesystem_path, token_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "sync-token")
".Radicale.cache", "sync-token")
token_path = os.path.join(token_folder, token_name) token_path = os.path.join(token_folder, token_name)
old_state = {} old_state = {}
if old_token_name: if old_token_name:

View file

@ -76,7 +76,7 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
yield radicale_item.find_available_uid( yield radicale_item.find_available_uid(
lambda href: not is_safe_free_href(href), suffix) lambda href: not is_safe_free_href(href), suffix)
cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item") cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item")
self._storage._makedirs_synced(cache_folder) self._storage._makedirs_synced(cache_folder)
for item in items: for item in items:
uid = item.uid uid = item.uid