1
0
Fork 0
mirror of https://github.com/Kozea/Radicale.git synced 2025-06-26 16:45:52 +00:00

Add: option [debug] storage_cache_action for conditional logging

This commit is contained in:
Peter Bieringer 2024-12-14 16:49:54 +01:00
parent 05b8172f8f
commit a7ce8f032c
9 changed files with 33 additions and 5 deletions

View file

@ -13,6 +13,7 @@
* Improve: log important module versions on startup * Improve: log important module versions on startup
* Improve: auth.ldap config shown on startup, terminate in case no password is supplied for bind user * Improve: auth.ldap config shown on startup, terminate in case no password is supplied for bind user
* Add: option [auth] uc_username for uppercase conversion (similar to existing lc_username) * Add: option [auth] uc_username for uppercase conversion (similar to existing lc_username)
* Add: option [debug] storage_cache_action for conditional logging
## 3.3.1 ## 3.3.1

View file

@ -1158,18 +1158,24 @@ Log request on level=debug
Default: `False` Default: `False`
##### response_content_on_debug = True ##### response_content_on_debug
Log response on level=debug Log response on level=debug
Default: `False` Default: `False`
##### rights_rule_doesnt_match_on_debug = True ##### rights_rule_doesnt_match_on_debug
Log rights rule which doesn't match on level=debug Log rights rule which doesn't match on level=debug
Default: `False` Default: `False`
##### #storage_cache_actions
Log storage cache actions
Default: `False`
#### headers #### headers
In this section additional HTTP headers that are sent to clients can be In this section additional HTTP headers that are sent to clients can be

2
config
View file

@ -226,6 +226,8 @@
# Log rights rule which doesn't match on level=debug # Log rights rule which doesn't match on level=debug
#rights_rule_doesnt_match_on_debug = False #rights_rule_doesnt_match_on_debug = False
# Log storage cache actions
#storage_cache_actions = False
[headers] [headers]

View file

@ -376,6 +376,10 @@ DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
"value": "False", "value": "False",
"help": "log rights rules which doesn't match on level=debug", "help": "log rights rules which doesn't match on level=debug",
"type": bool}), "type": bool}),
("storage_cache_actions", {
"value": "False",
"help": "log storage cache action on level=debug",
"type": bool}),
("mask_passwords", { ("mask_passwords", {
"value": "True", "value": "True",
"help": "mask passwords in logs", "help": "mask passwords in logs",

View file

@ -97,6 +97,7 @@ class Storage(
logger.info("storage cache subfolder usage for 'item': %s", self._use_cache_subfolder_for_item) logger.info("storage cache subfolder usage for 'item': %s", self._use_cache_subfolder_for_item)
logger.info("storage cache subfolder usage for 'history': %s", self._use_cache_subfolder_for_history) logger.info("storage cache subfolder usage for 'history': %s", self._use_cache_subfolder_for_history)
logger.info("storage cache subfolder usage for 'sync-token': %s", self._use_cache_subfolder_for_synctoken) logger.info("storage cache subfolder usage for 'sync-token': %s", self._use_cache_subfolder_for_synctoken)
logger.debug("storage cache action logging: %s", self._debug_cache_actions)
if self._use_cache_subfolder_for_item is True or self._use_cache_subfolder_for_history is True or self._use_cache_subfolder_for_synctoken is True: if self._use_cache_subfolder_for_item is True or self._use_cache_subfolder_for_history is True or self._use_cache_subfolder_for_synctoken is True:
logger.info("storage cache subfolder: %r", self._get_collection_cache_folder()) logger.info("storage cache subfolder: %r", self._get_collection_cache_folder())
self._makedirs_synced(self._get_collection_cache_folder()) self._makedirs_synced(self._get_collection_cache_folder())

View file

@ -74,6 +74,7 @@ class StorageBase(storage.BaseStorage):
_use_cache_subfolder_for_item: bool _use_cache_subfolder_for_item: bool
_use_cache_subfolder_for_history: bool _use_cache_subfolder_for_history: bool
_use_cache_subfolder_for_synctoken: bool _use_cache_subfolder_for_synctoken: bool
_debug_cache_actions: bool
_folder_umask: str _folder_umask: str
_config_umask: int _config_umask: int
@ -93,6 +94,8 @@ class StorageBase(storage.BaseStorage):
"storage", "use_cache_subfolder_for_synctoken") "storage", "use_cache_subfolder_for_synctoken")
self._folder_umask = configuration.get( self._folder_umask = configuration.get(
"storage", "folder_umask") "storage", "folder_umask")
self._debug_cache_actions = configuration.get(
"logging", "storage_cache_actions")
def _get_collection_root_folder(self) -> str: def _get_collection_root_folder(self) -> str:
return os.path.join(self._filesystem_folder, "collection-root") return os.path.join(self._filesystem_folder, "collection-root")

View file

@ -1,7 +1,8 @@
# This file is part of Radicale - CalDAV and CardDAV server # This file is part of Radicale - CalDAV and CardDAV server
# Copyright © 2014 Jean-Marc Martins # Copyright © 2014 Jean-Marc Martins
# Copyright © 2012-2017 Guillaume Ayoub # Copyright © 2012-2017 Guillaume Ayoub
# Copyright © 2017-2018 Unrud <unrud@outlook.com> # Copyright © 2017-2021 Unrud <unrud@outlook.com>
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
# #
# This library is free software: you can redistribute it and/or modify # This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by # it under the terms of the GNU General Public License as published by
@ -24,6 +25,7 @@ import radicale.item as radicale_item
from radicale import pathutils from radicale import pathutils
from radicale.storage import multifilesystem from radicale.storage import multifilesystem
from radicale.storage.multifilesystem.base import StorageBase from radicale.storage.multifilesystem.base import StorageBase
from radicale.log import logger
class StoragePartCreateCollection(StorageBase): class StoragePartCreateCollection(StorageBase):
@ -36,6 +38,7 @@ class StoragePartCreateCollection(StorageBase):
# Path should already be sanitized # Path should already be sanitized
sane_path = pathutils.strip_path(href) sane_path = pathutils.strip_path(href)
filesystem_path = pathutils.path_to_filesystem(folder, sane_path) filesystem_path = pathutils.path_to_filesystem(folder, sane_path)
logger.debug("Create collection: %r" % filesystem_path)
if not props: if not props:
self._makedirs_synced(filesystem_path) self._makedirs_synced(filesystem_path)

View file

@ -81,6 +81,8 @@ class CollectionPartGet(CollectionPartCache, CollectionPartLock,
# The hash of the component in the file system. This is used to check, # The hash of the component in the file system. This is used to check,
# if the entry in the cache is still valid. # if the entry in the cache is still valid.
cache_hash = self._item_cache_hash(raw_text) cache_hash = self._item_cache_hash(raw_text)
if self._storage._debug_cache_actions is True:
logger.debug("Check cache for: %r with hash %r", path, cache_hash)
cache_content = self._load_item_cache(href, cache_hash) cache_content = self._load_item_cache(href, cache_hash)
if cache_content is None: if cache_content is None:
with self._acquire_cache_lock("item"): with self._acquire_cache_lock("item"):

View file

@ -29,6 +29,7 @@ from radicale.storage.multifilesystem.base import CollectionBase
from radicale.storage.multifilesystem.cache import CollectionPartCache from radicale.storage.multifilesystem.cache import CollectionPartCache
from radicale.storage.multifilesystem.get import CollectionPartGet from radicale.storage.multifilesystem.get import CollectionPartGet
from radicale.storage.multifilesystem.history import CollectionPartHistory from radicale.storage.multifilesystem.history import CollectionPartHistory
from radicale.log import logger
class CollectionPartUpload(CollectionPartGet, CollectionPartCache, class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
@ -38,12 +39,14 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
) -> radicale_item.Item: ) -> radicale_item.Item:
if not pathutils.is_safe_filesystem_path_component(href): if not pathutils.is_safe_filesystem_path_component(href):
raise pathutils.UnsafePathError(href) raise pathutils.UnsafePathError(href)
path = pathutils.path_to_filesystem(self._filesystem_path, href)
try: try:
self._store_item_cache(href, item) cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
logger.debug("Store cache for: %r with hash %r", path, cache_hash)
self._store_item_cache(href, item, cache_hash)
except Exception as e: except Exception as e:
raise ValueError("Failed to store item %r in collection %r: %s" % raise ValueError("Failed to store item %r in collection %r: %s" %
(href, self.path, e)) from e (href, self.path, e)) from e
path = pathutils.path_to_filesystem(self._filesystem_path, href)
# TODO: better fix for "mypy" # TODO: better fix for "mypy"
with self._atomic_write(path, newline="") as fo: # type: ignore with self._atomic_write(path, newline="") as fo: # type: ignore
f = cast(TextIO, fo) f = cast(TextIO, fo)
@ -80,6 +83,7 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
self._storage._makedirs_synced(cache_folder) self._storage._makedirs_synced(cache_folder)
for item in items: for item in items:
uid = item.uid uid = item.uid
logger.debug("Store item from list with uid: '%s'" % uid)
try: try:
cache_content = self._item_cache_content(item) cache_content = self._item_cache_content(item)
except Exception as e: except Exception as e:
@ -105,6 +109,8 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
f.flush() f.flush()
self._storage._fsync(f) self._storage._fsync(f)
with open(os.path.join(cache_folder, href), "wb") as fb: with open(os.path.join(cache_folder, href), "wb") as fb:
cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
logger.debug("Store cache for: %r with hash %r", fb.name, cache_hash)
pickle.dump(cache_content, fb) pickle.dump(cache_content, fb)
fb.flush() fb.flush()
self._storage._fsync(fb) self._storage._fsync(fb)