From 644548c866dc07f7172cae0d53219577aca7c6a0 Mon Sep 17 00:00:00 2001 From: Peter Bieringer Date: Tue, 10 Dec 2024 08:25:14 +0100 Subject: [PATCH] rename function --- radicale/storage/multifilesystem/cache.py | 6 +++--- radicale/storage/multifilesystem/history.py | 9 +++------ radicale/storage/multifilesystem/move.py | 4 ++-- radicale/storage/multifilesystem/sync.py | 3 +-- radicale/storage/multifilesystem/upload.py | 2 +- 5 files changed, 10 insertions(+), 14 deletions(-) diff --git a/radicale/storage/multifilesystem/cache.py b/radicale/storage/multifilesystem/cache.py index bf596eb3..1d90f975 100644 --- a/radicale/storage/multifilesystem/cache.py +++ b/radicale/storage/multifilesystem/cache.py @@ -82,7 +82,7 @@ class CollectionPartCache(CollectionBase): if not cache_hash: cache_hash = self._item_cache_hash( item.serialize().encode(self._encoding)) - cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item") + cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item") content = self._item_cache_content(item) self._storage._makedirs_synced(cache_folder) # Race: Other processes might have created and locked the file. @@ -95,7 +95,7 @@ class CollectionPartCache(CollectionBase): def _load_item_cache(self, href: str, cache_hash: str ) -> Optional[CacheContent]: - cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item") + cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item") try: with open(os.path.join(cache_folder, href), "rb") as f: hash_, *remainder = pickle.load(f) @@ -109,7 +109,7 @@ class CollectionPartCache(CollectionBase): return None def _clean_item_cache(self) -> None: - cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item") + cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item") self._clean_cache(cache_folder, ( e.name for e in os.scandir(cache_folder) if not os.path.isfile(os.path.join(self._filesystem_path, e.name)))) diff --git a/radicale/storage/multifilesystem/history.py b/radicale/storage/multifilesystem/history.py index c385c32a..f618c99a 100644 --- a/radicale/storage/multifilesystem/history.py +++ b/radicale/storage/multifilesystem/history.py @@ -47,8 +47,7 @@ class CollectionPartHistory(CollectionBase): string for deleted items) and a history etag, which is a hash over the previous history etag and the etag separated by "/". """ - history_folder = os.path.join(self._filesystem_path, - ".Radicale.cache", "history") + history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history") try: with open(os.path.join(history_folder, href), "rb") as f: cache_etag, history_etag = pickle.load(f) @@ -76,8 +75,7 @@ class CollectionPartHistory(CollectionBase): def _get_deleted_history_hrefs(self): """Returns the hrefs of all deleted items that are still in the history cache.""" - history_folder = os.path.join(self._filesystem_path, - ".Radicale.cache", "history") + history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history") with contextlib.suppress(FileNotFoundError): for entry in os.scandir(history_folder): href = entry.name @@ -89,7 +87,6 @@ class CollectionPartHistory(CollectionBase): def _clean_history(self): # Delete all expired history entries of deleted items. - history_folder = os.path.join(self._filesystem_path, - ".Radicale.cache", "history") + history_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "history") self._clean_cache(history_folder, self._get_deleted_history_hrefs(), max_age=self._max_sync_token_age) diff --git a/radicale/storage/multifilesystem/move.py b/radicale/storage/multifilesystem/move.py index 3518a3b4..7b1eb490 100644 --- a/radicale/storage/multifilesystem/move.py +++ b/radicale/storage/multifilesystem/move.py @@ -42,8 +42,8 @@ class StoragePartMove(StorageBase): if item.collection._filesystem_path != to_collection._filesystem_path: self._sync_directory(item.collection._filesystem_path) # Move the item cache entry - cache_folder = self._get_collection_cache_folder(item.collection._filesystem_path, ".Radicale.cache", "item") - to_cache_folder = self._get_collection_cache_folder(to_collection._filesystem_path, ".Radicale.cache", "item") + cache_folder = self._get_collection_cache_subfolder(item.collection._filesystem_path, ".Radicale.cache", "item") + to_cache_folder = self._get_collection_cache_subfolder(to_collection._filesystem_path, ".Radicale.cache", "item") self._makedirs_synced(to_cache_folder) try: os.replace(os.path.join(cache_folder, item.href), diff --git a/radicale/storage/multifilesystem/sync.py b/radicale/storage/multifilesystem/sync.py index ae703c91..6a315c4f 100644 --- a/radicale/storage/multifilesystem/sync.py +++ b/radicale/storage/multifilesystem/sync.py @@ -67,8 +67,7 @@ class CollectionPartSync(CollectionPartCache, CollectionPartHistory, if token_name == old_token_name: # Nothing changed return token, () - token_folder = os.path.join(self._filesystem_path, - ".Radicale.cache", "sync-token") + token_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "sync-token") token_path = os.path.join(token_folder, token_name) old_state = {} if old_token_name: diff --git a/radicale/storage/multifilesystem/upload.py b/radicale/storage/multifilesystem/upload.py index 01c52b75..41af0a36 100644 --- a/radicale/storage/multifilesystem/upload.py +++ b/radicale/storage/multifilesystem/upload.py @@ -76,7 +76,7 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache, yield radicale_item.find_available_uid( lambda href: not is_safe_free_href(href), suffix) - cache_folder = self._storage._get_collection_cache_folder(self._filesystem_path, ".Radicale.cache", "item") + cache_folder = self._storage._get_collection_cache_subfolder(self._filesystem_path, ".Radicale.cache", "item") self._storage._makedirs_synced(cache_folder) for item in items: uid = item.uid