1
0
Fork 0
mirror of https://github.com/Kozea/Radicale.git synced 2025-06-26 16:45:52 +00:00

Add: option [debug] storage_cache_action for conditional logging

This commit is contained in:
Peter Bieringer 2024-12-14 16:49:54 +01:00
parent 05b8172f8f
commit a7ce8f032c
9 changed files with 33 additions and 5 deletions

View file

@ -13,6 +13,7 @@
* Improve: log important module versions on startup
* Improve: auth.ldap config shown on startup, terminate in case no password is supplied for bind user
* Add: option [auth] uc_username for uppercase conversion (similar to existing lc_username)
* Add: option [debug] storage_cache_action for conditional logging
## 3.3.1

View file

@ -1158,18 +1158,24 @@ Log request on level=debug
Default: `False`
##### response_content_on_debug = True
##### response_content_on_debug
Log response on level=debug
Default: `False`
##### rights_rule_doesnt_match_on_debug = True
##### rights_rule_doesnt_match_on_debug
Log rights rule which doesn't match on level=debug
Default: `False`
##### #storage_cache_actions
Log storage cache actions
Default: `False`
#### headers
In this section additional HTTP headers that are sent to clients can be

2
config
View file

@ -226,6 +226,8 @@
# Log rights rule which doesn't match on level=debug
#rights_rule_doesnt_match_on_debug = False
# Log storage cache actions
#storage_cache_actions = False
[headers]

View file

@ -376,6 +376,10 @@ DEFAULT_CONFIG_SCHEMA: types.CONFIG_SCHEMA = OrderedDict([
"value": "False",
"help": "log rights rules which doesn't match on level=debug",
"type": bool}),
("storage_cache_actions", {
"value": "False",
"help": "log storage cache action on level=debug",
"type": bool}),
("mask_passwords", {
"value": "True",
"help": "mask passwords in logs",

View file

@ -97,6 +97,7 @@ class Storage(
logger.info("storage cache subfolder usage for 'item': %s", self._use_cache_subfolder_for_item)
logger.info("storage cache subfolder usage for 'history': %s", self._use_cache_subfolder_for_history)
logger.info("storage cache subfolder usage for 'sync-token': %s", self._use_cache_subfolder_for_synctoken)
logger.debug("storage cache action logging: %s", self._debug_cache_actions)
if self._use_cache_subfolder_for_item is True or self._use_cache_subfolder_for_history is True or self._use_cache_subfolder_for_synctoken is True:
logger.info("storage cache subfolder: %r", self._get_collection_cache_folder())
self._makedirs_synced(self._get_collection_cache_folder())

View file

@ -74,6 +74,7 @@ class StorageBase(storage.BaseStorage):
_use_cache_subfolder_for_item: bool
_use_cache_subfolder_for_history: bool
_use_cache_subfolder_for_synctoken: bool
_debug_cache_actions: bool
_folder_umask: str
_config_umask: int
@ -93,6 +94,8 @@ class StorageBase(storage.BaseStorage):
"storage", "use_cache_subfolder_for_synctoken")
self._folder_umask = configuration.get(
"storage", "folder_umask")
self._debug_cache_actions = configuration.get(
"logging", "storage_cache_actions")
def _get_collection_root_folder(self) -> str:
return os.path.join(self._filesystem_folder, "collection-root")

View file

@ -1,7 +1,8 @@
# This file is part of Radicale - CalDAV and CardDAV server
# Copyright © 2014 Jean-Marc Martins
# Copyright © 2012-2017 Guillaume Ayoub
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
# Copyright © 2017-2021 Unrud <unrud@outlook.com>
# Copyright © 2024-2024 Peter Bieringer <pb@bieringer.de>
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@ -24,6 +25,7 @@ import radicale.item as radicale_item
from radicale import pathutils
from radicale.storage import multifilesystem
from radicale.storage.multifilesystem.base import StorageBase
from radicale.log import logger
class StoragePartCreateCollection(StorageBase):
@ -36,6 +38,7 @@ class StoragePartCreateCollection(StorageBase):
# Path should already be sanitized
sane_path = pathutils.strip_path(href)
filesystem_path = pathutils.path_to_filesystem(folder, sane_path)
logger.debug("Create collection: %r" % filesystem_path)
if not props:
self._makedirs_synced(filesystem_path)

View file

@ -81,6 +81,8 @@ class CollectionPartGet(CollectionPartCache, CollectionPartLock,
# The hash of the component in the file system. This is used to check,
# if the entry in the cache is still valid.
cache_hash = self._item_cache_hash(raw_text)
if self._storage._debug_cache_actions is True:
logger.debug("Check cache for: %r with hash %r", path, cache_hash)
cache_content = self._load_item_cache(href, cache_hash)
if cache_content is None:
with self._acquire_cache_lock("item"):

View file

@ -29,6 +29,7 @@ from radicale.storage.multifilesystem.base import CollectionBase
from radicale.storage.multifilesystem.cache import CollectionPartCache
from radicale.storage.multifilesystem.get import CollectionPartGet
from radicale.storage.multifilesystem.history import CollectionPartHistory
from radicale.log import logger
class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
@ -38,12 +39,14 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
) -> radicale_item.Item:
if not pathutils.is_safe_filesystem_path_component(href):
raise pathutils.UnsafePathError(href)
path = pathutils.path_to_filesystem(self._filesystem_path, href)
try:
self._store_item_cache(href, item)
cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
logger.debug("Store cache for: %r with hash %r", path, cache_hash)
self._store_item_cache(href, item, cache_hash)
except Exception as e:
raise ValueError("Failed to store item %r in collection %r: %s" %
(href, self.path, e)) from e
path = pathutils.path_to_filesystem(self._filesystem_path, href)
# TODO: better fix for "mypy"
with self._atomic_write(path, newline="") as fo: # type: ignore
f = cast(TextIO, fo)
@ -80,6 +83,7 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
self._storage._makedirs_synced(cache_folder)
for item in items:
uid = item.uid
logger.debug("Store item from list with uid: '%s'" % uid)
try:
cache_content = self._item_cache_content(item)
except Exception as e:
@ -105,6 +109,8 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
f.flush()
self._storage._fsync(f)
with open(os.path.join(cache_folder, href), "wb") as fb:
cache_hash = self._item_cache_hash(item.serialize().encode(self._encoding))
logger.debug("Store cache for: %r with hash %r", fb.name, cache_hash)
pickle.dump(cache_content, fb)
fb.flush()
self._storage._fsync(fb)