mirror of
https://github.com/Kozea/Radicale.git
synced 2025-08-01 18:18:31 +00:00
Rework configuration
This commit is contained in:
parent
63e6d091b9
commit
b7590f8c84
19 changed files with 609 additions and 220 deletions
|
@ -2,7 +2,7 @@
|
|||
# Copyright © 2008-2017 Guillaume Ayoub
|
||||
# Copyright © 2008 Nicolas Kandel
|
||||
# Copyright © 2008 Pascal Halter
|
||||
# Copyright © 2017-2018 Unrud <unrud@outlook.com>
|
||||
# Copyright © 2017-2019 Unrud <unrud@outlook.com>
|
||||
#
|
||||
# This library is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -27,9 +27,14 @@ Give a configparser-like interface to read and write configuration.
|
|||
import math
|
||||
import os
|
||||
from collections import OrderedDict
|
||||
from configparser import RawConfigParser as ConfigParser
|
||||
from configparser import RawConfigParser
|
||||
|
||||
from radicale import auth, rights, storage, web
|
||||
from radicale.log import logger
|
||||
|
||||
DEFAULT_CONFIG_PATH = os.pathsep.join([
|
||||
"?/etc/radicale/config",
|
||||
"?~/.config/radicale/config"])
|
||||
|
||||
|
||||
def positive_int(value):
|
||||
|
@ -52,18 +57,43 @@ def positive_float(value):
|
|||
|
||||
def logging_level(value):
|
||||
if value not in ("debug", "info", "warning", "error", "critical"):
|
||||
raise ValueError("unsupported level: %s" % value)
|
||||
raise ValueError("unsupported level: %r" % value)
|
||||
return value
|
||||
|
||||
|
||||
def filepath(value):
|
||||
if not value:
|
||||
return ""
|
||||
value = os.path.expanduser(value)
|
||||
if os.name == "nt":
|
||||
value = os.path.expandvars(value)
|
||||
return os.path.abspath(value)
|
||||
|
||||
|
||||
def list_of_ip_address(value):
|
||||
def ip_address(value):
|
||||
try:
|
||||
address, port = value.strip().rsplit(":", 1)
|
||||
return address.strip("[] "), int(port)
|
||||
except ValueError:
|
||||
raise ValueError("malformed IP address: %r" % value)
|
||||
return [ip_address(s.strip()) for s in value.split(",")]
|
||||
|
||||
|
||||
def _convert_to_bool(value):
|
||||
if value.lower() not in RawConfigParser.BOOLEAN_STATES:
|
||||
raise ValueError("Not a boolean: %r" % value)
|
||||
return RawConfigParser.BOOLEAN_STATES[value.lower()]
|
||||
|
||||
|
||||
# Default configuration
|
||||
INITIAL_CONFIG = OrderedDict([
|
||||
DEFAULT_CONFIG_SCHEMA = OrderedDict([
|
||||
("server", OrderedDict([
|
||||
("hosts", {
|
||||
"value": "127.0.0.1:5232",
|
||||
"help": "set server hostnames including ports",
|
||||
"aliases": ["-H", "--hosts"],
|
||||
"type": str}),
|
||||
"type": list_of_ip_address}),
|
||||
("max_connections", {
|
||||
"value": "8",
|
||||
"help": "maximum number of parallel connections",
|
||||
|
@ -86,17 +116,17 @@ INITIAL_CONFIG = OrderedDict([
|
|||
"value": "/etc/ssl/radicale.cert.pem",
|
||||
"help": "set certificate file",
|
||||
"aliases": ["-c", "--certificate"],
|
||||
"type": str}),
|
||||
"type": filepath}),
|
||||
("key", {
|
||||
"value": "/etc/ssl/radicale.key.pem",
|
||||
"help": "set private key file",
|
||||
"aliases": ["-k", "--key"],
|
||||
"type": str}),
|
||||
"type": filepath}),
|
||||
("certificate_authority", {
|
||||
"value": "",
|
||||
"help": "set CA certificate for validating clients",
|
||||
"aliases": ["--certificate-authority"],
|
||||
"type": str}),
|
||||
"type": filepath}),
|
||||
("protocol", {
|
||||
"value": "PROTOCOL_TLSv1_2",
|
||||
"help": "SSL protocol used",
|
||||
|
@ -127,7 +157,7 @@ INITIAL_CONFIG = OrderedDict([
|
|||
("htpasswd_filename", {
|
||||
"value": "/etc/radicale/users",
|
||||
"help": "htpasswd filename",
|
||||
"type": str}),
|
||||
"type": filepath}),
|
||||
("htpasswd_encryption", {
|
||||
"value": "bcrypt",
|
||||
"help": "htpasswd encryption method",
|
||||
|
@ -149,7 +179,7 @@ INITIAL_CONFIG = OrderedDict([
|
|||
("file", {
|
||||
"value": "/etc/radicale/rights",
|
||||
"help": "file for rights management from_file",
|
||||
"type": str})])),
|
||||
"type": filepath})])),
|
||||
("storage", OrderedDict([
|
||||
("type", {
|
||||
"value": "multifilesystem",
|
||||
|
@ -157,14 +187,13 @@ INITIAL_CONFIG = OrderedDict([
|
|||
"type": str,
|
||||
"internal": storage.INTERNAL_TYPES}),
|
||||
("filesystem_folder", {
|
||||
"value": os.path.expanduser(
|
||||
"/var/lib/radicale/collections"),
|
||||
"value": "/var/lib/radicale/collections",
|
||||
"help": "path where collections are stored",
|
||||
"type": str}),
|
||||
"type": filepath}),
|
||||
("max_sync_token_age", {
|
||||
"value": "2592000", # 30 days
|
||||
"help": "delete sync token that are older",
|
||||
"type": int}),
|
||||
"type": positive_int}),
|
||||
("hook", {
|
||||
"value": "",
|
||||
"help": "command that is run after changes to storage",
|
||||
|
@ -183,62 +212,208 @@ INITIAL_CONFIG = OrderedDict([
|
|||
("mask_passwords", {
|
||||
"value": "True",
|
||||
"help": "mask passwords in logs",
|
||||
"type": bool})])),
|
||||
("headers", OrderedDict([
|
||||
("_allow_extra", True)])),
|
||||
("internal", OrderedDict([
|
||||
("_internal", True),
|
||||
("filesystem_fsync", {
|
||||
"value": "True",
|
||||
"help": "sync all changes to filesystem during requests",
|
||||
"type": bool}),
|
||||
("internal_server", {
|
||||
"value": "False",
|
||||
"help": "the internal server is used",
|
||||
"type": bool})]))])
|
||||
# Default configuration for "internal" settings
|
||||
INTERNAL_CONFIG = OrderedDict([
|
||||
("filesystem_fsync", {
|
||||
"value": "True",
|
||||
"help": "sync all changes to filesystem during requests",
|
||||
"type": bool}),
|
||||
("internal_server", {
|
||||
"value": "False",
|
||||
"help": "the internal server is used",
|
||||
"type": bool})])
|
||||
|
||||
|
||||
def load(paths=(), ignore_missing_paths=True):
|
||||
config = ConfigParser()
|
||||
for section, values in INITIAL_CONFIG.items():
|
||||
config.add_section(section)
|
||||
for key, data in values.items():
|
||||
config.set(section, key, data["value"])
|
||||
for path in paths:
|
||||
if path or not ignore_missing_paths:
|
||||
try:
|
||||
if not config.read(path) and not ignore_missing_paths:
|
||||
def parse_compound_paths(*compound_paths):
|
||||
"""Parse a compound path and return the individual paths.
|
||||
Paths in a compound path are joined by ``os.pathsep``. If a path starts
|
||||
with ``?`` the return value ``IGNORE_IF_MISSING`` is set.
|
||||
|
||||
When multiple ``compound_paths`` are passed, the last argument that is
|
||||
not ``None`` is used.
|
||||
|
||||
Returns a dict of the format ``[(PATH, IGNORE_IF_MISSING), ...]``
|
||||
|
||||
"""
|
||||
compound_path = ""
|
||||
for p in compound_paths:
|
||||
if p is not None:
|
||||
compound_path = p
|
||||
paths = []
|
||||
for path in compound_path.split(os.pathsep):
|
||||
ignore_if_missing = path.startswith("?")
|
||||
if ignore_if_missing:
|
||||
path = path[1:]
|
||||
path = filepath(path)
|
||||
if path:
|
||||
paths.append((path, ignore_if_missing))
|
||||
return paths
|
||||
|
||||
|
||||
def load(paths=()):
|
||||
"""Load configuration from files.
|
||||
|
||||
``paths`` a list of the format ``[(PATH, IGNORE_IF_MISSING), ...]``.
|
||||
|
||||
"""
|
||||
configuration = Configuration(DEFAULT_CONFIG_SCHEMA)
|
||||
for path, ignore_if_missing in paths:
|
||||
parser = RawConfigParser()
|
||||
config_source = "config file %r" % path
|
||||
try:
|
||||
if not parser.read(path):
|
||||
config = Configuration.SOURCE_MISSING
|
||||
if not ignore_if_missing:
|
||||
raise RuntimeError("No such file: %r" % path)
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
"Failed to load config file %r: %s" % (path, e)) from e
|
||||
# Check the configuration
|
||||
for section in config.sections():
|
||||
if section == "headers":
|
||||
continue
|
||||
if section not in INITIAL_CONFIG:
|
||||
raise RuntimeError("Invalid section %r in config" % section)
|
||||
allow_extra_options = ("type" in INITIAL_CONFIG[section] and
|
||||
config.get(section, "type") not in
|
||||
INITIAL_CONFIG[section]["type"].get("internal",
|
||||
()))
|
||||
for option in config[section]:
|
||||
if option not in INITIAL_CONFIG[section]:
|
||||
if allow_extra_options:
|
||||
else:
|
||||
config = {s: {o: parser[s][o] for o in parser.options(s)}
|
||||
for s in parser.sections()}
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
"Failed to load %s: %s" % (config_source, e)) from e
|
||||
configuration.update(config, config_source, internal=False)
|
||||
return configuration
|
||||
|
||||
|
||||
class Configuration:
|
||||
SOURCE_MISSING = {}
|
||||
|
||||
def __init__(self, schema):
|
||||
"""Initialize configuration.
|
||||
|
||||
``schema`` a dict that describes the configuration format.
|
||||
See ``DEFAULT_CONFIG_SCHEMA``.
|
||||
|
||||
"""
|
||||
self._schema = schema
|
||||
self._values = {}
|
||||
self._configs = []
|
||||
values = {}
|
||||
for section in schema:
|
||||
values[section] = {}
|
||||
for option in schema[section]:
|
||||
if option.startswith("_"):
|
||||
continue
|
||||
raise RuntimeError("Invalid option %r in section %r in "
|
||||
"config" % (option, section))
|
||||
type_ = INITIAL_CONFIG[section][option]["type"]
|
||||
try:
|
||||
if type_ == bool:
|
||||
config.getboolean(section, option)
|
||||
else:
|
||||
type_(config.get(section, option))
|
||||
except Exception as e:
|
||||
values[section][option] = schema[section][option]["value"]
|
||||
self.update(values, "default config")
|
||||
|
||||
def update(self, config, source, internal=True):
|
||||
"""Update the configuration.
|
||||
|
||||
``config`` a dict of the format {SECTION: {OPTION: VALUE, ...}, ...}.
|
||||
Set to ``Configuration.SOURCE_MISSING`` to indicate a missing
|
||||
configuration source for inspection.
|
||||
|
||||
``source`` a description of the configuration source
|
||||
|
||||
``internal`` allows updating "_internal" sections and skips the source
|
||||
during inspection.
|
||||
|
||||
"""
|
||||
new_values = {}
|
||||
for section in config:
|
||||
if (section not in self._schema or not internal and
|
||||
self._schema[section].get("_internal", False)):
|
||||
raise RuntimeError(
|
||||
"Invalid %s value for option %r in section %r in config: "
|
||||
"%r" % (type_.__name__, option, section,
|
||||
config.get(section, option))) from e
|
||||
# Add internal configuration
|
||||
config.add_section("internal")
|
||||
for key, data in INTERNAL_CONFIG.items():
|
||||
config.set("internal", key, data["value"])
|
||||
return config
|
||||
"Invalid section %r in %s" % (section, source))
|
||||
new_values[section] = {}
|
||||
if "_allow_extra" in self._schema[section]:
|
||||
allow_extra_options = self._schema[section]["_allow_extra"]
|
||||
elif "type" in self._schema[section]:
|
||||
if "type" in config[section]:
|
||||
plugin_type = config[section]["type"]
|
||||
else:
|
||||
plugin_type = self.get(section, "type")
|
||||
allow_extra_options = plugin_type not in self._schema[section][
|
||||
"type"].get("internal", [])
|
||||
else:
|
||||
allow_extra_options = False
|
||||
for option in config[section]:
|
||||
if option in self._schema[section]:
|
||||
type_ = self._schema[section][option]["type"]
|
||||
elif allow_extra_options:
|
||||
type_ = str
|
||||
else:
|
||||
raise RuntimeError("Invalid option %r in section %r in "
|
||||
"%s" % (option, section, source))
|
||||
raw_value = config[section][option]
|
||||
try:
|
||||
if type_ == bool:
|
||||
raw_value = _convert_to_bool(raw_value)
|
||||
new_values[section][option] = type_(raw_value)
|
||||
except Exception as e:
|
||||
raise RuntimeError(
|
||||
"Invalid %s value for option %r in section %r in %s: "
|
||||
"%r" % (type_.__name__, option, section, source,
|
||||
raw_value)) from e
|
||||
self._configs.append((config, source, internal))
|
||||
for section in new_values:
|
||||
if section not in self._values:
|
||||
self._values[section] = {}
|
||||
for option in new_values[section]:
|
||||
self._values[section][option] = new_values[section][option]
|
||||
|
||||
def get(self, section, option):
|
||||
"""Get the value of ``option`` in ``section``."""
|
||||
return self._values[section][option]
|
||||
|
||||
def get_raw(self, section, option):
|
||||
"""Get the raw value of ``option`` in ``section``."""
|
||||
fconfig = self._configs[0]
|
||||
for config, _, _ in reversed(self._configs):
|
||||
if section in config and option in config[section]:
|
||||
fconfig = config
|
||||
break
|
||||
return fconfig[section][option]
|
||||
|
||||
def sections(self):
|
||||
"""List all sections."""
|
||||
return self._values.keys()
|
||||
|
||||
def options(self, section):
|
||||
"""List all options in ``section``"""
|
||||
return self._values[section].keys()
|
||||
|
||||
def copy(self, plugin_schema=None):
|
||||
"""Create a copy of the configuration
|
||||
|
||||
``plugin_schema`` is a optional dict that contains additional options
|
||||
for usage with a plugin. See ``DEFAULT_CONFIG_SCHEMA``.
|
||||
|
||||
"""
|
||||
if plugin_schema is None:
|
||||
schema = self._schema
|
||||
skip = 1 # skip default config
|
||||
else:
|
||||
skip = 0
|
||||
schema = self._schema.copy()
|
||||
for section, options in plugin_schema.items():
|
||||
if (section not in schema or "type" not in schema[section] or
|
||||
"internal" not in schema[section]["type"]):
|
||||
raise ValueError("not a plugin section: %r" % section)
|
||||
schema[section] = schema[section].copy()
|
||||
schema[section]["type"] = schema[section]["type"].copy()
|
||||
schema[section]["type"]["internal"] = [
|
||||
self.get(section, "type")]
|
||||
for option, value in options.items():
|
||||
if option in schema[section]:
|
||||
raise ValueError("option already exists in %r: %r" % (
|
||||
section, option))
|
||||
schema[section][option] = value
|
||||
copy = self.__class__(schema)
|
||||
for config, source, allow_internal in self._configs[skip:]:
|
||||
copy.update(config, source, allow_internal)
|
||||
return copy
|
||||
|
||||
def inspect(self):
|
||||
"""Inspect all external config sources and write problems to logger."""
|
||||
for config, source, internal in self._configs:
|
||||
if internal:
|
||||
continue
|
||||
if config is self.SOURCE_MISSING:
|
||||
logger.info("Skipped missing %s", source)
|
||||
else:
|
||||
logger.info("Parsed %s", source)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue