mirror of
https://github.com/Kozea/Radicale.git
synced 2025-09-15 20:36:55 +00:00
resolved conflicts
This commit is contained in:
commit
50140a54f5
27 changed files with 383 additions and 80 deletions
|
@ -45,8 +45,8 @@ def propose_filename(collection: storage.BaseCollection) -> str:
|
|||
|
||||
class ApplicationPartGet(ApplicationBase):
|
||||
|
||||
def _content_disposition_attachement(self, filename: str) -> str:
|
||||
value = "attachement"
|
||||
def _content_disposition_attachment(self, filename: str) -> str:
|
||||
value = "attachment"
|
||||
try:
|
||||
encoded_filename = quote(filename, encoding=self._encoding)
|
||||
except UnicodeEncodeError:
|
||||
|
@ -91,7 +91,7 @@ class ApplicationPartGet(ApplicationBase):
|
|||
return (httputils.NOT_ALLOWED if limited_access else
|
||||
httputils.DIRECTORY_LISTING)
|
||||
content_type = xmlutils.MIMETYPES[item.tag]
|
||||
content_disposition = self._content_disposition_attachement(
|
||||
content_disposition = self._content_disposition_attachment(
|
||||
propose_filename(item))
|
||||
elif limited_access:
|
||||
return httputils.NOT_ALLOWED
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import posixpath
|
||||
import re
|
||||
from http import client
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
@ -26,6 +27,22 @@ from radicale.app.base import Access, ApplicationBase
|
|||
from radicale.log import logger
|
||||
|
||||
|
||||
def get_server_netloc(environ: types.WSGIEnviron, force_port: bool = False):
|
||||
if environ.get("HTTP_X_FORWARDED_HOST"):
|
||||
host = environ["HTTP_X_FORWARDED_HOST"]
|
||||
proto = environ.get("HTTP_X_FORWARDED_PROTO") or "http"
|
||||
port = "443" if proto == "https" else "80"
|
||||
port = environ["HTTP_X_FORWARDED_PORT"] or port
|
||||
else:
|
||||
host = environ.get("HTTP_HOST") or environ["SERVER_NAME"]
|
||||
proto = environ["wsgi.url_scheme"]
|
||||
port = environ["SERVER_PORT"]
|
||||
if (not force_port and port == ("443" if proto == "https" else "80") or
|
||||
re.search(r":\d+$", host)):
|
||||
return host
|
||||
return host + ":" + port
|
||||
|
||||
|
||||
class ApplicationPartMove(ApplicationBase):
|
||||
|
||||
def do_MOVE(self, environ: types.WSGIEnviron, base_prefix: str,
|
||||
|
@ -33,7 +50,11 @@ class ApplicationPartMove(ApplicationBase):
|
|||
"""Manage MOVE request."""
|
||||
raw_dest = environ.get("HTTP_DESTINATION", "")
|
||||
to_url = urlparse(raw_dest)
|
||||
if to_url.netloc != environ["HTTP_HOST"]:
|
||||
to_netloc_with_port = to_url.netloc
|
||||
if to_url.port is None:
|
||||
to_netloc_with_port += (":443" if to_url.scheme == "https"
|
||||
else ":80")
|
||||
if to_netloc_with_port != get_server_netloc(environ, force_port=True):
|
||||
logger.info("Unsupported destination address: %r", raw_dest)
|
||||
# Remote destination server, not supported
|
||||
return httputils.REMOTE_DESTINATION
|
||||
|
|
|
@ -164,7 +164,7 @@ def check_and_sanitize_items(
|
|||
ref_value_param = component.dtstart.params.get("VALUE")
|
||||
for dates in chain(component.contents.get("exdate", []),
|
||||
component.contents.get("rdate", [])):
|
||||
if all(type(d) == type(ref_date) for d in dates.value):
|
||||
if all(type(d) is type(ref_date) for d in dates.value):
|
||||
continue
|
||||
for i, date in enumerate(dates.value):
|
||||
dates.value[i] = ref_date.replace(
|
||||
|
|
|
@ -225,6 +225,7 @@ def visit_time_ranges(vobject_item: vobject.base.Component, child_name: str,
|
|||
def get_children(components: Iterable[vobject.base.Component]) -> Iterator[
|
||||
Tuple[vobject.base.Component, bool, List[date]]]:
|
||||
main = None
|
||||
rec_main = None
|
||||
recurrences = []
|
||||
for comp in components:
|
||||
if hasattr(comp, "recurrence_id") and comp.recurrence_id.value:
|
||||
|
@ -232,11 +233,14 @@ def visit_time_ranges(vobject_item: vobject.base.Component, child_name: str,
|
|||
if comp.rruleset:
|
||||
# Prevent possible infinite loop
|
||||
raise ValueError("Overwritten recurrence with RRULESET")
|
||||
rec_main = comp
|
||||
yield comp, True, []
|
||||
else:
|
||||
if main is not None:
|
||||
raise ValueError("Multiple main components")
|
||||
main = comp
|
||||
if main is None and len(recurrences) == 1:
|
||||
main = rec_main
|
||||
if main is None:
|
||||
raise ValueError("Main component missing")
|
||||
yield main, False, recurrences
|
||||
|
@ -468,7 +472,15 @@ def text_match(vobject_item: vobject.base.Component,
|
|||
match(attrib) for child in children
|
||||
for attrib in child.params.get(attrib_name, []))
|
||||
else:
|
||||
condition = any(match(child.value) for child in children)
|
||||
res = []
|
||||
for child in children:
|
||||
# Some filters such as CATEGORIES provide a list in child.value
|
||||
if type(child.value) is list:
|
||||
for value in child.value:
|
||||
res.append(match(value))
|
||||
else:
|
||||
res.append(match(child.value))
|
||||
condition = any(res)
|
||||
if filter_.get("negate-condition") == "yes":
|
||||
return not condition
|
||||
return condition
|
||||
|
|
132
radicale/log.py
132
radicale/log.py
|
@ -25,16 +25,25 @@ Log messages are sent to the first available target of:
|
|||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import struct
|
||||
import sys
|
||||
import threading
|
||||
from typing import Any, Callable, ClassVar, Dict, Iterator, Union
|
||||
import time
|
||||
from typing import (Any, Callable, ClassVar, Dict, Iterator, Mapping, Optional,
|
||||
Tuple, Union, cast)
|
||||
|
||||
from radicale import types
|
||||
|
||||
LOGGER_NAME: str = "radicale"
|
||||
LOGGER_FORMAT: str = "[%(asctime)s] [%(ident)s] [%(levelname)s] %(message)s"
|
||||
LOGGER_FORMATS: Mapping[str, str] = {
|
||||
"verbose": "[%(asctime)s] [%(ident)s] [%(levelname)s] %(message)s",
|
||||
"journal": "[%(ident)s] [%(levelname)s] %(message)s",
|
||||
}
|
||||
DATE_FORMAT: str = "%Y-%m-%d %H:%M:%S %z"
|
||||
|
||||
logger: logging.Logger = logging.getLogger(LOGGER_NAME)
|
||||
|
@ -59,12 +68,17 @@ class IdentLogRecordFactory:
|
|||
|
||||
def __call__(self, *args: Any, **kwargs: Any) -> logging.LogRecord:
|
||||
record = self._upstream_factory(*args, **kwargs)
|
||||
ident = "%d" % os.getpid()
|
||||
main_thread = threading.main_thread()
|
||||
current_thread = threading.current_thread()
|
||||
if current_thread.name and main_thread != current_thread:
|
||||
ident += "/%s" % current_thread.name
|
||||
ident = ("%d" % record.process if record.process is not None
|
||||
else record.processName or "unknown")
|
||||
tid = None
|
||||
if record.thread is not None:
|
||||
if record.thread != threading.main_thread().ident:
|
||||
ident += "/%s" % (record.threadName or "unknown")
|
||||
if (sys.version_info >= (3, 8) and
|
||||
record.thread == threading.get_ident()):
|
||||
tid = threading.get_native_id()
|
||||
record.ident = ident # type:ignore[attr-defined]
|
||||
record.tid = tid # type:ignore[attr-defined]
|
||||
return record
|
||||
|
||||
|
||||
|
@ -75,19 +89,102 @@ class ThreadedStreamHandler(logging.Handler):
|
|||
terminator: ClassVar[str] = "\n"
|
||||
|
||||
_streams: Dict[int, types.ErrorStream]
|
||||
_journal_stream_id: Optional[Tuple[int, int]]
|
||||
_journal_socket: Optional[socket.socket]
|
||||
_journal_socket_failed: bool
|
||||
_formatters: Mapping[str, logging.Formatter]
|
||||
_formatter: Optional[logging.Formatter]
|
||||
|
||||
def __init__(self) -> None:
|
||||
def __init__(self, format_name: Optional[str] = None) -> None:
|
||||
super().__init__()
|
||||
self._streams = {}
|
||||
self._journal_stream_id = None
|
||||
with contextlib.suppress(TypeError, ValueError):
|
||||
dev, inode = os.environ.get("JOURNAL_STREAM", "").split(":", 1)
|
||||
self._journal_stream_id = (int(dev), int(inode))
|
||||
self._journal_socket = None
|
||||
self._journal_socket_failed = False
|
||||
self._formatters = {name: logging.Formatter(fmt, DATE_FORMAT)
|
||||
for name, fmt in LOGGER_FORMATS.items()}
|
||||
self._formatter = (self._formatters[format_name]
|
||||
if format_name is not None else None)
|
||||
|
||||
def _get_formatter(self, default_format_name: str) -> logging.Formatter:
|
||||
return self._formatter or self._formatters[default_format_name]
|
||||
|
||||
def _detect_journal(self, stream: types.ErrorStream) -> bool:
|
||||
if not self._journal_stream_id or not isinstance(stream, io.IOBase):
|
||||
return False
|
||||
try:
|
||||
stat = os.fstat(stream.fileno())
|
||||
except OSError:
|
||||
return False
|
||||
return self._journal_stream_id == (stat.st_dev, stat.st_ino)
|
||||
|
||||
@staticmethod
|
||||
def _encode_journal(data: Mapping[str, Optional[Union[str, int]]]
|
||||
) -> bytes:
|
||||
msg = b""
|
||||
for key, value in data.items():
|
||||
if value is None:
|
||||
continue
|
||||
keyb = key.encode()
|
||||
valueb = str(value).encode()
|
||||
if b"\n" in valueb:
|
||||
msg += (keyb + b"\n" +
|
||||
struct.pack("<Q", len(valueb)) + valueb + b"\n")
|
||||
else:
|
||||
msg += keyb + b"=" + valueb + b"\n"
|
||||
return msg
|
||||
|
||||
def _try_emit_journal(self, record: logging.LogRecord) -> bool:
|
||||
if not self._journal_socket:
|
||||
# Try to connect to systemd journal socket
|
||||
if self._journal_socket_failed or not hasattr(socket, "AF_UNIX"):
|
||||
return False
|
||||
journal_socket = None
|
||||
try:
|
||||
journal_socket = socket.socket(
|
||||
socket.AF_UNIX, socket.SOCK_DGRAM)
|
||||
journal_socket.connect("/run/systemd/journal/socket")
|
||||
except OSError as e:
|
||||
self._journal_socket_failed = True
|
||||
if journal_socket:
|
||||
journal_socket.close()
|
||||
# Log after setting `_journal_socket_failed` to prevent loop!
|
||||
logger.error("Failed to connect to systemd journal: %s",
|
||||
e, exc_info=True)
|
||||
return False
|
||||
self._journal_socket = journal_socket
|
||||
|
||||
priority = {"DEBUG": 7,
|
||||
"INFO": 6,
|
||||
"WARNING": 4,
|
||||
"ERROR": 3,
|
||||
"CRITICAL": 2}.get(record.levelname, 4)
|
||||
timestamp = time.strftime("%Y-%m-%dT%H:%M:%S.%%03dZ",
|
||||
time.gmtime(record.created)) % record.msecs
|
||||
data = {"PRIORITY": priority,
|
||||
"TID": cast(Optional[int], getattr(record, "tid", None)),
|
||||
"SYSLOG_IDENTIFIER": record.name,
|
||||
"SYSLOG_FACILITY": 1,
|
||||
"SYSLOG_PID": record.process,
|
||||
"SYSLOG_TIMESTAMP": timestamp,
|
||||
"CODE_FILE": record.pathname,
|
||||
"CODE_LINE": record.lineno,
|
||||
"CODE_FUNC": record.funcName,
|
||||
"MESSAGE": self._get_formatter("journal").format(record)}
|
||||
self._journal_socket.sendall(self._encode_journal(data))
|
||||
return True
|
||||
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
try:
|
||||
stream = self._streams.get(threading.get_ident(), sys.stderr)
|
||||
msg = self.format(record)
|
||||
stream.write(msg)
|
||||
stream.write(self.terminator)
|
||||
if hasattr(stream, "flush"):
|
||||
stream.flush()
|
||||
if self._detect_journal(stream) and self._try_emit_journal(record):
|
||||
return
|
||||
msg = self._get_formatter("verbose").format(record)
|
||||
stream.write(msg + self.terminator)
|
||||
stream.flush()
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
|
@ -111,13 +208,16 @@ def register_stream(stream: types.ErrorStream) -> Iterator[None]:
|
|||
def setup() -> None:
|
||||
"""Set global logging up."""
|
||||
global register_stream
|
||||
handler = ThreadedStreamHandler()
|
||||
logging.basicConfig(format=LOGGER_FORMAT, datefmt=DATE_FORMAT,
|
||||
handlers=[handler])
|
||||
format_name = os.environ.get("RADICALE_LOG_FORMAT") or None
|
||||
sane_format_name = format_name if format_name in LOGGER_FORMATS else None
|
||||
handler = ThreadedStreamHandler(sane_format_name)
|
||||
logging.basicConfig(handlers=[handler])
|
||||
register_stream = handler.register_stream
|
||||
log_record_factory = IdentLogRecordFactory(logging.getLogRecordFactory())
|
||||
logging.setLogRecordFactory(log_record_factory)
|
||||
set_level(logging.WARNING)
|
||||
if format_name != sane_format_name:
|
||||
logger.error("Invalid RADICALE_LOG_FORMAT: %r", format_name)
|
||||
|
||||
|
||||
def set_level(level: Union[int, str]) -> None:
|
||||
|
|
|
@ -58,11 +58,16 @@ elif sys.platform == "win32":
|
|||
|
||||
|
||||
# IPv4 (host, port) and IPv6 (host, port, flowinfo, scopeid)
|
||||
ADDRESS_TYPE = Union[Tuple[str, int], Tuple[str, int, int, int]]
|
||||
ADDRESS_TYPE = Union[Tuple[Union[str, bytes, bytearray], int],
|
||||
Tuple[str, int, int, int]]
|
||||
|
||||
|
||||
def format_address(address: ADDRESS_TYPE) -> str:
|
||||
return "[%s]:%d" % address[:2]
|
||||
host, port, *_ = address
|
||||
if not isinstance(host, str):
|
||||
raise NotImplementedError("Unsupported address format: %r" %
|
||||
(address,))
|
||||
return "[%s]:%d" % (host, port)
|
||||
|
||||
|
||||
class ParallelHTTPServer(socketserver.ThreadingMixIn,
|
||||
|
|
|
@ -44,7 +44,8 @@ class CollectionBase(storage.BaseCollection):
|
|||
filesystem_path = pathutils.path_to_filesystem(folder, self.path)
|
||||
self._filesystem_path = filesystem_path
|
||||
|
||||
@types.contextmanager
|
||||
# TODO: better fix for "mypy"
|
||||
@types.contextmanager # type: ignore
|
||||
def _atomic_write(self, path: str, mode: str = "w",
|
||||
newline: Optional[str] = None) -> Iterator[IO[AnyStr]]:
|
||||
# TODO: Overload with Literal when dropping support for Python < 3.8
|
||||
|
|
|
@ -86,7 +86,8 @@ class CollectionPartCache(CollectionBase):
|
|||
content = self._item_cache_content(item)
|
||||
self._storage._makedirs_synced(cache_folder)
|
||||
# Race: Other processes might have created and locked the file.
|
||||
with contextlib.suppress(PermissionError), self._atomic_write(
|
||||
# TODO: better fix for "mypy"
|
||||
with contextlib.suppress(PermissionError), self._atomic_write( # type: ignore
|
||||
os.path.join(cache_folder, href), "wb") as fo:
|
||||
fb = cast(BinaryIO, fo)
|
||||
pickle.dump((cache_hash, *content), fb)
|
||||
|
|
|
@ -61,6 +61,7 @@ class CollectionPartMeta(CollectionBase):
|
|||
return self._meta_cache if key is None else self._meta_cache.get(key)
|
||||
|
||||
def set_meta(self, props: Mapping[str, str]) -> None:
|
||||
with self._atomic_write(self._props_path, "w") as fo:
|
||||
# TODO: better fix for "mypy"
|
||||
with self._atomic_write(self._props_path, "w") as fo: # type: ignore
|
||||
f = cast(TextIO, fo)
|
||||
json.dump(props, f, sort_keys=True)
|
||||
|
|
|
@ -95,7 +95,8 @@ class CollectionPartSync(CollectionPartCache, CollectionPartHistory,
|
|||
self._storage._makedirs_synced(token_folder)
|
||||
try:
|
||||
# Race: Other processes might have created and locked the file.
|
||||
with self._atomic_write(token_path, "wb") as fo:
|
||||
# TODO: better fix for "mypy"
|
||||
with self._atomic_write(token_path, "wb") as fo: # type: ignore
|
||||
fb = cast(BinaryIO, fo)
|
||||
pickle.dump(state, fb)
|
||||
except PermissionError:
|
||||
|
|
|
@ -43,7 +43,8 @@ class CollectionPartUpload(CollectionPartGet, CollectionPartCache,
|
|||
raise ValueError("Failed to store item %r in collection %r: %s" %
|
||||
(href, self.path, e)) from e
|
||||
path = pathutils.path_to_filesystem(self._filesystem_path, href)
|
||||
with self._atomic_write(path, newline="") as fo:
|
||||
# TODO: better fix for "mypy"
|
||||
with self._atomic_write(path, newline="") as fo: # type: ignore
|
||||
f = cast(TextIO, fo)
|
||||
f.write(item.serialize())
|
||||
# Clean the cache after the actual item is stored, or the cache entry
|
||||
|
|
|
@ -25,6 +25,7 @@ import logging
|
|||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import wsgiref.util
|
||||
import xml.etree.ElementTree as ET
|
||||
from io import BytesIO
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
|
@ -83,11 +84,12 @@ class BaseTest:
|
|||
login.encode(encoding)).decode()
|
||||
environ["REQUEST_METHOD"] = method.upper()
|
||||
environ["PATH_INFO"] = path
|
||||
if data:
|
||||
if data is not None:
|
||||
data_bytes = data.encode(encoding)
|
||||
environ["wsgi.input"] = BytesIO(data_bytes)
|
||||
environ["CONTENT_LENGTH"] = str(len(data_bytes))
|
||||
environ["wsgi.errors"] = sys.stderr
|
||||
wsgiref.util.setup_testing_defaults(environ)
|
||||
status = headers = None
|
||||
|
||||
def start_response(status_: str, headers_: List[Tuple[str, str]]
|
||||
|
@ -137,8 +139,8 @@ class BaseTest:
|
|||
status, _, answer = self.request("GET", path, check=check, **kwargs)
|
||||
return status, answer
|
||||
|
||||
def post(self, path: str, data: str = None, check: Optional[int] = 200,
|
||||
**kwargs) -> Tuple[int, str]:
|
||||
def post(self, path: str, data: Optional[str] = None,
|
||||
check: Optional[int] = 200, **kwargs) -> Tuple[int, str]:
|
||||
status, _, answer = self.request("POST", path, data, check=check,
|
||||
**kwargs)
|
||||
return status, answer
|
||||
|
|
|
@ -25,6 +25,7 @@ LAST-MODIFIED:20130902T150158Z
|
|||
DTSTAMP:20130902T150158Z
|
||||
UID:event1
|
||||
SUMMARY:Event
|
||||
CATEGORIES:some_category1,another_category2
|
||||
ORGANIZER:mailto:unclesam@example.com
|
||||
ATTENDEE;ROLE=REQ-PARTICIPANT;PARTSTAT=TENTATIVE;CN=Jane Doe:MAILTO:janedoe@example.com
|
||||
ATTENDEE;ROLE=REQ-PARTICIPANT;DELEGATED-FROM="MAILTO:bob@host.com";PARTSTAT=ACCEPTED;CN=John Doe:MAILTO:johndoe@example.com
|
||||
|
|
|
@ -355,7 +355,7 @@ permissions: RrWw""")
|
|||
path2 = "/calendar.ics/event2.ics"
|
||||
self.put(path1, event)
|
||||
self.request("MOVE", path1, check=201,
|
||||
HTTP_DESTINATION=path2, HTTP_HOST="")
|
||||
HTTP_DESTINATION="http://127.0.0.1/"+path2)
|
||||
self.get(path1, check=404)
|
||||
self.get(path2)
|
||||
|
||||
|
@ -368,7 +368,7 @@ permissions: RrWw""")
|
|||
path2 = "/calendar2.ics/event2.ics"
|
||||
self.put(path1, event)
|
||||
self.request("MOVE", path1, check=201,
|
||||
HTTP_DESTINATION=path2, HTTP_HOST="")
|
||||
HTTP_DESTINATION="http://127.0.0.1/"+path2)
|
||||
self.get(path1, check=404)
|
||||
self.get(path2)
|
||||
|
||||
|
@ -382,7 +382,7 @@ permissions: RrWw""")
|
|||
self.put(path1, event)
|
||||
self.put("/calendar2.ics/event1.ics", event)
|
||||
status, _, answer = self.request(
|
||||
"MOVE", path1, HTTP_DESTINATION=path2, HTTP_HOST="")
|
||||
"MOVE", path1, HTTP_DESTINATION="http://127.0.0.1/"+path2)
|
||||
assert status in (403, 409)
|
||||
xml = DefusedET.fromstring(answer)
|
||||
assert xml.tag == xmlutils.make_clark("D:error")
|
||||
|
@ -398,9 +398,9 @@ permissions: RrWw""")
|
|||
self.put(path1, event)
|
||||
self.put(path2, event)
|
||||
self.request("MOVE", path1, check=412,
|
||||
HTTP_DESTINATION=path2, HTTP_HOST="")
|
||||
self.request("MOVE", path1, check=204,
|
||||
HTTP_DESTINATION=path2, HTTP_HOST="", HTTP_OVERWRITE="T")
|
||||
HTTP_DESTINATION="http://127.0.0.1/"+path2)
|
||||
self.request("MOVE", path1, check=204, HTTP_OVERWRITE="T",
|
||||
HTTP_DESTINATION="http://127.0.0.1/"+path2)
|
||||
|
||||
def test_move_between_colections_overwrite_uid_conflict(self) -> None:
|
||||
"""Move a item to a collection which already contains the item with
|
||||
|
@ -413,8 +413,9 @@ permissions: RrWw""")
|
|||
path2 = "/calendar2.ics/event2.ics"
|
||||
self.put(path1, event1)
|
||||
self.put(path2, event2)
|
||||
status, _, answer = self.request("MOVE", path1, HTTP_DESTINATION=path2,
|
||||
HTTP_HOST="", HTTP_OVERWRITE="T")
|
||||
status, _, answer = self.request(
|
||||
"MOVE", path1, HTTP_OVERWRITE="T",
|
||||
HTTP_DESTINATION="http://127.0.0.1/"+path2)
|
||||
assert status in (403, 409)
|
||||
xml = DefusedET.fromstring(answer)
|
||||
assert xml.tag == xmlutils.make_clark("D:error")
|
||||
|
@ -916,6 +917,22 @@ permissions: RrWw""")
|
|||
<C:text-match>event</C:text-match>
|
||||
</C:prop-filter>
|
||||
</C:comp-filter>
|
||||
</C:comp-filter>"""])
|
||||
assert "/calendar.ics/event1.ics" in self._test_filter(["""\
|
||||
<C:comp-filter name="VCALENDAR">
|
||||
<C:comp-filter name="VEVENT">
|
||||
<C:prop-filter name="CATEGORIES">
|
||||
<C:text-match>some_category1</C:text-match>
|
||||
</C:prop-filter>
|
||||
</C:comp-filter>
|
||||
</C:comp-filter>"""])
|
||||
assert "/calendar.ics/event1.ics" in self._test_filter(["""\
|
||||
<C:comp-filter name="VCALENDAR">
|
||||
<C:comp-filter name="VEVENT">
|
||||
<C:prop-filter name="CATEGORIES">
|
||||
<C:text-match collation="i;octet">some_category1</C:text-match>
|
||||
</C:prop-filter>
|
||||
</C:comp-filter>
|
||||
</C:comp-filter>"""])
|
||||
assert "/calendar.ics/event1.ics" not in self._test_filter(["""\
|
||||
<C:comp-filter name="VCALENDAR">
|
||||
|
@ -1471,7 +1488,7 @@ permissions: RrWw""")
|
|||
sync_token, responses = self._report_sync_token(calendar_path)
|
||||
assert len(responses) == 1 and responses[event1_path] == 200
|
||||
self.request("MOVE", event1_path, check=201,
|
||||
HTTP_DESTINATION=event2_path, HTTP_HOST="")
|
||||
HTTP_DESTINATION="http://127.0.0.1/"+event2_path)
|
||||
sync_token, responses = self._report_sync_token(
|
||||
calendar_path, sync_token)
|
||||
if not self.full_sync_token_support and not sync_token:
|
||||
|
@ -1490,9 +1507,9 @@ permissions: RrWw""")
|
|||
sync_token, responses = self._report_sync_token(calendar_path)
|
||||
assert len(responses) == 1 and responses[event1_path] == 200
|
||||
self.request("MOVE", event1_path, check=201,
|
||||
HTTP_DESTINATION=event2_path, HTTP_HOST="")
|
||||
HTTP_DESTINATION="http://127.0.0.1/"+event2_path)
|
||||
self.request("MOVE", event2_path, check=201,
|
||||
HTTP_DESTINATION=event1_path, HTTP_HOST="")
|
||||
HTTP_DESTINATION="http://127.0.0.1/"+event1_path)
|
||||
sync_token, responses = self._report_sync_token(
|
||||
calendar_path, sync_token)
|
||||
if not self.full_sync_token_support and not sync_token:
|
||||
|
|
|
@ -60,8 +60,9 @@ class TestBaseServerRequests(BaseTest):
|
|||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
||||
# Find available port
|
||||
sock.bind(("127.0.0.1", 0))
|
||||
self.sockfamily = socket.AF_INET
|
||||
self.sockname = sock.getsockname()
|
||||
self.configure({"server": {"hosts": "[%s]:%d" % self.sockname},
|
||||
self.configure({"server": {"hosts": "%s:%d" % self.sockname},
|
||||
# Enable debugging for new processes
|
||||
"logging": {"level": "debug"}})
|
||||
self.thread = threading.Thread(target=server.serve, args=(
|
||||
|
@ -105,8 +106,12 @@ class TestBaseServerRequests(BaseTest):
|
|||
data_bytes = None
|
||||
if data:
|
||||
data_bytes = data.encode(encoding)
|
||||
if self.sockfamily == socket.AF_INET6:
|
||||
req_host = ("[%s]" % self.sockname[0])
|
||||
else:
|
||||
req_host = self.sockname[0]
|
||||
req = request.Request(
|
||||
"%s://[%s]:%d%s" % (scheme, *self.sockname, path),
|
||||
"%s://%s:%d%s" % (scheme, req_host, self.sockname[1], path),
|
||||
data=data_bytes, headers=headers, method=method)
|
||||
while True:
|
||||
assert is_alive_fn()
|
||||
|
@ -161,6 +166,7 @@ class TestBaseServerRequests(BaseTest):
|
|||
server.COMPAT_IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
|
||||
# Find available port
|
||||
sock.bind(("::1", 0))
|
||||
self.sockfamily = socket.AF_INET6
|
||||
self.sockname = sock.getsockname()[:2]
|
||||
except OSError as e:
|
||||
if e.errno in (errno.EADDRNOTAVAIL, errno.EAFNOSUPPORT,
|
||||
|
|
|
@ -50,8 +50,8 @@ if sys.version_info >= (3, 8):
|
|||
|
||||
@runtime_checkable
|
||||
class ErrorStream(Protocol):
|
||||
def flush(self) -> None: ...
|
||||
def write(self, s: str) -> None: ...
|
||||
def flush(self) -> object: ...
|
||||
def write(self, s: str) -> object: ...
|
||||
else:
|
||||
ErrorStream = Any
|
||||
InputStream = Any
|
||||
|
|
|
@ -1,23 +1,27 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<script src="fn.js"></script>
|
||||
<title>Radicale Web Interface</title>
|
||||
<link href="css/main.css" media="screen" rel="stylesheet">
|
||||
<link href="css/icon.png" type="image/png" rel="shortcut icon">
|
||||
<link href="css/icon.png" type="image/png" rel="icon">
|
||||
<style>
|
||||
.hidden {display:none;}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<nav>
|
||||
<ul>
|
||||
<li id="logoutview" class="hidden"><a href="" data-name="link">Logout [<span data-name="user" style="word-wrap:break-word;"></span>]</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
|
||||
<main>
|
||||
<section id="loadingscene">
|
||||
<h1>Loading</h1>
|
||||
<p>Please wait...</p>
|
||||
|
@ -128,3 +132,7 @@
|
|||
<button type="button" data-name="cancel">No</button>
|
||||
</form>
|
||||
</section>
|
||||
</main>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue