1
0
Fork 0
mirror of https://github.com/Kozea/Radicale.git synced 2025-06-26 16:45:52 +00:00
Radicale/radicale/storage/multifilesystem/sync.py

124 lines
5.6 KiB
Python
Raw Normal View History

2021-12-08 21:45:42 +01:00
# This file is part of Radicale - CalDAV and CardDAV server
2018-09-04 03:33:50 +02:00
# Copyright © 2014 Jean-Marc Martins
# Copyright © 2012-2017 Guillaume Ayoub
2019-06-17 04:13:25 +02:00
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
2018-09-04 03:33:50 +02:00
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
2021-07-26 20:56:46 +02:00
import contextlib
2018-09-04 03:33:50 +02:00
import itertools
import os
import pickle
from hashlib import sha256
2021-07-26 20:56:47 +02:00
from typing import BinaryIO, Iterable, Tuple, cast
2018-09-04 03:33:50 +02:00
from radicale.log import logger
2021-07-26 20:56:47 +02:00
from radicale.storage.multifilesystem.base import CollectionBase
from radicale.storage.multifilesystem.cache import CollectionPartCache
from radicale.storage.multifilesystem.history import CollectionPartHistory
2018-09-04 03:33:50 +02:00
2021-07-26 20:56:47 +02:00
class CollectionPartSync(CollectionPartCache, CollectionPartHistory,
CollectionBase):
def sync(self, old_token: str = "") -> Tuple[str, Iterable[str]]:
2018-09-04 03:33:50 +02:00
# The sync token has the form http://radicale.org/ns/sync/TOKEN_NAME
# where TOKEN_NAME is the sha256 hash of all history etags of present
# and past items of the collection.
2021-07-26 20:56:47 +02:00
def check_token_name(token_name: str) -> bool:
if len(token_name) != 64:
2018-09-04 03:33:50 +02:00
return False
for c in token_name:
if c not in "0123456789abcdef":
return False
return True
2021-07-26 20:56:46 +02:00
old_token_name = ""
2018-09-04 03:33:50 +02:00
if old_token:
# Extract the token name from the sync token
if not old_token.startswith("http://radicale.org/ns/sync/"):
raise ValueError("Malformed token: %r" % old_token)
old_token_name = old_token[len("http://radicale.org/ns/sync/"):]
if not check_token_name(old_token_name):
raise ValueError("Malformed token: %r" % old_token)
# Get the current state and sync-token of the collection.
state = {}
token_name_hash = sha256()
2018-09-04 03:33:50 +02:00
# Find the history of all existing and deleted items
for href, item in itertools.chain(
((item.href, item) for item in self.get_all()),
((href, None) for href in self._get_deleted_history_hrefs())):
history_etag = self._update_history_etag(href, item)
state[href] = history_etag
2020-01-19 18:13:05 +01:00
token_name_hash.update((href + "/" + history_etag).encode())
2018-09-04 03:33:50 +02:00
token_name = token_name_hash.hexdigest()
token = "http://radicale.org/ns/sync/%s" % token_name
if token_name == old_token_name:
# Nothing changed
return token, ()
token_folder = os.path.join(self._filesystem_path,
".Radicale.cache", "sync-token")
token_path = os.path.join(token_folder, token_name)
old_state = {}
if old_token_name:
# load the old token state
old_token_path = os.path.join(token_folder, old_token_name)
try:
# Race: Another process might have deleted the file.
with open(old_token_path, "rb") as f:
old_state = pickle.load(f)
except (FileNotFoundError, pickle.UnpicklingError,
ValueError) as e:
if isinstance(e, (pickle.UnpicklingError, ValueError)):
logger.warning(
"Failed to load stored sync token %r in %r: %s",
old_token_name, self.path, e, exc_info=True)
# Delete the damaged file
2021-07-26 20:56:46 +02:00
with contextlib.suppress(FileNotFoundError,
PermissionError):
2018-09-04 03:33:50 +02:00
os.remove(old_token_path)
raise ValueError("Token not found: %r" % old_token)
# write the new token state or update the modification time of
# existing token state
if not os.path.exists(token_path):
self._storage._makedirs_synced(token_folder)
2018-09-04 03:33:50 +02:00
try:
# Race: Other processes might have created and locked the file.
2024-03-02 07:36:14 +01:00
with self._atomic_write(token_path, "wb") as fo: # type: ignore # for now, TODO fix for "mypy"
2021-07-26 20:56:47 +02:00
fb = cast(BinaryIO, fo)
pickle.dump(state, fb)
2018-09-04 03:33:50 +02:00
except PermissionError:
pass
else:
# clean up old sync tokens and item cache
self._clean_cache(token_folder, os.listdir(token_folder),
2021-07-26 20:56:47 +02:00
max_age=self._max_sync_token_age)
2018-09-04 03:33:50 +02:00
self._clean_history()
else:
# Try to update the modification time
2021-07-26 20:56:46 +02:00
with contextlib.suppress(FileNotFoundError):
2018-09-04 03:33:50 +02:00
# Race: Another process might have deleted the file.
os.utime(token_path)
changes = []
# Find all new, changed and deleted (that are still in the item cache)
# items
for href, history_etag in state.items():
if history_etag != old_state.get(href):
changes.append(href)
# Find all deleted items that are no longer in the item cache
for href, history_etag in old_state.items():
if href not in state:
changes.append(href)
return token, changes