Merge pull request #417 from adamcik/feature/config-api-cleanup
Move towards stable config APIs.
This commit is contained in:
commit
3c4dfc3777
@ -42,35 +42,47 @@ def main():
|
||||
config_files = options.config.split(':')
|
||||
config_overrides = options.overrides
|
||||
|
||||
extensions = [] # Make sure it is defined before the finally block
|
||||
enabled_extensions = [] # Make sure it is defined before the finally block
|
||||
|
||||
# TODO: figure out a way to make the boilerplate in this file reusable in
|
||||
# scanner and other places we need it.
|
||||
|
||||
try:
|
||||
create_file_structures()
|
||||
# TODO: run raw logging config trough escape code etc, or just validate?
|
||||
logging_config = config_lib.load(config_files, config_overrides)
|
||||
# Initial config without extensions to bootstrap logging.
|
||||
logging_config, _ = config_lib.load(config_files, [], config_overrides)
|
||||
|
||||
# TODO: setup_logging needs defaults in-case config values are None
|
||||
log.setup_logging(
|
||||
logging_config, options.verbosity_level, options.save_debug_log)
|
||||
|
||||
installed_extensions = ext.load_extensions()
|
||||
extensions = ext.validate_extensions(installed_extensions)
|
||||
raw_config = config_lib.load(config_files, config_overrides, extensions)
|
||||
extensions = ext.filter_enabled_extensions(raw_config, extensions)
|
||||
config = config_lib.validate(
|
||||
raw_config, config_lib.core_schemas, extensions)
|
||||
log.setup_log_levels(config)
|
||||
check_old_locations()
|
||||
ext.register_gstreamer_elements(extensions)
|
||||
|
||||
# TODO: wrap config in RO proxy.
|
||||
config, config_errors = config_lib.load(
|
||||
config_files, installed_extensions, config_overrides)
|
||||
|
||||
# Filter out disabled extensions and remove any config errors for them.
|
||||
for extension in installed_extensions:
|
||||
enabled = config[extension.ext_name]['enabled']
|
||||
if ext.validate_extension(extension) and enabled:
|
||||
enabled_extensions.append(extension)
|
||||
elif extension.ext_name in config_errors:
|
||||
del config_errors[extension.ext_name]
|
||||
|
||||
log_extension_info(installed_extensions, enabled_extensions)
|
||||
check_config_errors(config_errors)
|
||||
|
||||
log.setup_log_levels(config)
|
||||
create_file_structures()
|
||||
check_old_locations()
|
||||
ext.register_gstreamer_elements(enabled_extensions)
|
||||
|
||||
# Anything that wants to exit after this point must use
|
||||
# mopidy.utils.process.exit_process as actors have been started.
|
||||
audio = setup_audio(config)
|
||||
backends = setup_backends(config, extensions, audio)
|
||||
backends = setup_backends(config, enabled_extensions, audio)
|
||||
core = setup_core(audio, backends)
|
||||
setup_frontends(config, extensions, core)
|
||||
setup_frontends(config, enabled_extensions, core)
|
||||
loop.run()
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Interrupted. Exiting...')
|
||||
@ -78,13 +90,32 @@ def main():
|
||||
logger.exception(ex)
|
||||
finally:
|
||||
loop.quit()
|
||||
stop_frontends(extensions)
|
||||
stop_frontends(enabled_extensions)
|
||||
stop_core()
|
||||
stop_backends(extensions)
|
||||
stop_backends(enabled_extensions)
|
||||
stop_audio()
|
||||
process.stop_remaining_actors()
|
||||
|
||||
|
||||
def log_extension_info(all_extensions, enabled_extensions):
|
||||
# TODO: distinguish disabled vs blocked by env?
|
||||
enabled_names = set(e.ext_name for e in enabled_extensions)
|
||||
disabled_names = set(e.ext_name for e in all_extensions) - enabled_names
|
||||
logging.info(
|
||||
'Enabled extensions: %s', ', '.join(enabled_names) or 'none')
|
||||
logging.info(
|
||||
'Disabled extensions: %s', ', '.join(disabled_names) or 'none')
|
||||
|
||||
|
||||
def check_config_errors(errors):
|
||||
if not errors:
|
||||
return
|
||||
for section in errors:
|
||||
for key, msg in errors[section].items():
|
||||
logger.error('Config value %s/%s %s', section, key, msg)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def check_config_override(option, opt, override):
|
||||
try:
|
||||
return config_lib.parse_override(override)
|
||||
@ -142,31 +173,20 @@ def show_config_callback(option, opt, value, parser):
|
||||
overrides = getattr(parser.values, 'overrides', [])
|
||||
|
||||
extensions = ext.load_extensions()
|
||||
raw_config = config_lib.load(files, overrides, extensions)
|
||||
enabled_extensions = ext.filter_enabled_extensions(raw_config, extensions)
|
||||
config = config_lib.validate(
|
||||
raw_config, config_lib.core_schemas, enabled_extensions)
|
||||
|
||||
# TODO: create mopidy.config.format?
|
||||
output = []
|
||||
for schema in config_lib.core_schemas:
|
||||
options = config.get(schema.name, {})
|
||||
if not options:
|
||||
continue
|
||||
output.append(schema.format(options))
|
||||
config, errors = config_lib.load(files, extensions, overrides)
|
||||
|
||||
# Clear out any config for disabled extensions.
|
||||
for extension in extensions:
|
||||
schema = extension.get_config_schema()
|
||||
if not ext.validate_extension(extension):
|
||||
config[extension.ext_name] = {b'enabled': False}
|
||||
errors[extension.ext_name] = {
|
||||
b'enabled': b'extension disabled its self.'}
|
||||
elif not config[extension.ext_name]['enabled']:
|
||||
config[extension.ext_name] = {b'enabled': False}
|
||||
errors[extension.ext_name] = {
|
||||
b'enabled': b'extension disabled by config.'}
|
||||
|
||||
if extension in enabled_extensions:
|
||||
options = config.get(schema.name, {})
|
||||
output.append(schema.format(options))
|
||||
else:
|
||||
lines = ['[%s]' % schema.name, 'enabled = false',
|
||||
'# Config hidden as extension is disabled']
|
||||
output.append('\n'.join(lines))
|
||||
|
||||
print '\n\n'.join(output)
|
||||
print config_lib.format(config, extensions, errors)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
|
||||
@ -4,7 +4,6 @@ import ConfigParser as configparser
|
||||
import io
|
||||
import logging
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mopidy.config.schemas import *
|
||||
from mopidy.config.types import *
|
||||
@ -32,7 +31,7 @@ _proxy_schema['password'] = Secret(optional=True)
|
||||
# NOTE: if multiple outputs ever comes something like LogLevelConfigSchema
|
||||
#_outputs_schema = config.AudioOutputConfigSchema()
|
||||
|
||||
core_schemas = [_logging_schema, _loglevels_schema, _audio_schema, _proxy_schema]
|
||||
_schemas = [_logging_schema, _loglevels_schema, _audio_schema, _proxy_schema]
|
||||
|
||||
|
||||
def read(config_file):
|
||||
@ -41,15 +40,27 @@ def read(config_file):
|
||||
return filehandle.read()
|
||||
|
||||
|
||||
def load(files, overrides, extensions=None):
|
||||
def load(files, extensions, overrides):
|
||||
# Helper to get configs, as the rest of our config system should not need
|
||||
# to know about extensions.
|
||||
config_dir = os.path.dirname(__file__)
|
||||
defaults = [read(os.path.join(config_dir, 'default.conf'))]
|
||||
if extensions:
|
||||
defaults.extend(e.get_default_config() for e in extensions)
|
||||
return _load(files, defaults, overrides)
|
||||
defaults.extend(e.get_default_config() for e in extensions)
|
||||
raw_config = _load(files, defaults, overrides)
|
||||
|
||||
schemas = _schemas[:]
|
||||
schemas.extend(e.get_config_schema() for e in extensions)
|
||||
return _validate(raw_config, schemas)
|
||||
|
||||
|
||||
def format(config, extensions, comments=None, display=True):
|
||||
# Helper to format configs, as the rest of our config system should not
|
||||
# need to know about extensions.
|
||||
schemas = _schemas[:]
|
||||
schemas.extend(e.get_config_schema() for e in extensions)
|
||||
return _format(config, comments or {}, schemas, display)
|
||||
|
||||
|
||||
# TODO: replace load() with this version of API.
|
||||
def _load(files, defaults, overrides):
|
||||
parser = configparser.RawConfigParser()
|
||||
|
||||
@ -82,39 +93,36 @@ def _load(files, defaults, overrides):
|
||||
return raw_config
|
||||
|
||||
|
||||
def validate(raw_config, schemas, extensions=None):
|
||||
# Collect config schemas to validate against
|
||||
extension_schemas = [e.get_config_schema() for e in extensions or []]
|
||||
config, errors = _validate(raw_config, schemas + extension_schemas)
|
||||
|
||||
if errors:
|
||||
# TODO: raise error instead.
|
||||
#raise exceptions.ConfigError(errors)
|
||||
for error in errors:
|
||||
logger.error(error)
|
||||
sys.exit(1)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
# TODO: replace validate() with this version of API.
|
||||
def _validate(raw_config, schemas):
|
||||
# Get validated config
|
||||
config = {}
|
||||
errors = []
|
||||
errors = {}
|
||||
for schema in schemas:
|
||||
try:
|
||||
items = raw_config[schema.name].items()
|
||||
config[schema.name] = schema.convert(items)
|
||||
except KeyError:
|
||||
errors.append('%s: section not found.' % schema.name)
|
||||
except exceptions.ConfigError as error:
|
||||
for key in error:
|
||||
errors.append('%s/%s: %s' % (schema.name, key, error[key]))
|
||||
# TODO: raise errors instead of return
|
||||
values = raw_config.get(schema.name, {})
|
||||
result, error = schema.deserialize(values)
|
||||
if error:
|
||||
errors[schema.name] = error
|
||||
if result:
|
||||
config[schema.name] = result
|
||||
return config, errors
|
||||
|
||||
|
||||
def _format(config, comments, schemas, display):
|
||||
output = []
|
||||
for schema in schemas:
|
||||
serialized = schema.serialize(config.get(schema.name, {}), display=display)
|
||||
output.append(b'[%s]' % schema.name)
|
||||
for key, value in serialized.items():
|
||||
comment = comments.get(schema.name, {}).get(key, b'')
|
||||
output.append(b'%s =' % key)
|
||||
if value is not None:
|
||||
output[-1] += b' ' + value
|
||||
if comment:
|
||||
output[-1] += b' # ' + comment.capitalize()
|
||||
output.append(b'')
|
||||
return b'\n'.join(output)
|
||||
|
||||
|
||||
def parse_override(override):
|
||||
"""Parse ``section/key=value`` command line overrides"""
|
||||
section, remainder = override.split('/', 1)
|
||||
|
||||
@ -1,8 +1,12 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from mopidy import exceptions
|
||||
import collections
|
||||
|
||||
from mopidy.config import types
|
||||
|
||||
# TODO: 2.6 cleanup (#344).
|
||||
ordered_dict = getattr(collections, 'OrderedDict', dict)
|
||||
|
||||
|
||||
def _did_you_mean(name, choices):
|
||||
"""Suggest most likely setting based on levenshtein."""
|
||||
@ -40,9 +44,11 @@ class ConfigSchema(object):
|
||||
"""Logical group of config values that correspond to a config section.
|
||||
|
||||
Schemas are set up by assigning config keys with config values to
|
||||
instances. Once setup :meth:`convert` can be called with a list of
|
||||
``(key, value)`` tuples to process. For convienience we also support
|
||||
:meth:`format` method that can used for printing out the converted values.
|
||||
instances. Once setup :meth:`deserialize` can be called with a dict of
|
||||
values to process. For convienience we also support :meth:`format` method
|
||||
that can used for converting the values to a dict that can be printed and
|
||||
:meth:`serialize` for converting the values to a form suitable for
|
||||
persistence.
|
||||
"""
|
||||
# TODO: Use collections.OrderedDict once 2.6 support is gone (#344)
|
||||
def __init__(self, name):
|
||||
@ -58,43 +64,38 @@ class ConfigSchema(object):
|
||||
def __getitem__(self, key):
|
||||
return self._schema[key]
|
||||
|
||||
def format(self, values):
|
||||
"""Returns the schema as a config section with the given ``values``
|
||||
filled in"""
|
||||
# TODO: should the output be encoded utf-8 since we use that in
|
||||
# serialize for strings?
|
||||
lines = ['[%s]' % self.name]
|
||||
for key in self._order:
|
||||
value = values.get(key)
|
||||
if value is not None:
|
||||
lines.append('%s = %s' % (
|
||||
key, self._schema[key].format(value)))
|
||||
return '\n'.join(lines)
|
||||
def deserialize(self, values):
|
||||
"""Validates the given ``values`` using the config schema.
|
||||
|
||||
def convert(self, items):
|
||||
"""Validates the given ``items`` using the config schema and returns
|
||||
clean values"""
|
||||
Returns a tuple with cleaned values and errors."""
|
||||
errors = {}
|
||||
values = {}
|
||||
result = {}
|
||||
|
||||
for key, value in items:
|
||||
for key, value in values.items():
|
||||
try:
|
||||
values[key] = self._schema[key].deserialize(value)
|
||||
result[key] = self._schema[key].deserialize(value)
|
||||
except KeyError: # not in our schema
|
||||
errors[key] = 'unknown config key.'
|
||||
suggestion = _did_you_mean(key, self._schema.keys())
|
||||
if suggestion:
|
||||
errors[key] += ' Did you mean %s?' % suggestion
|
||||
except ValueError as e: # deserialization failed
|
||||
result[key] = None
|
||||
errors[key] = str(e)
|
||||
|
||||
for key in self._schema:
|
||||
if key not in values and key not in errors:
|
||||
if key not in result and key not in errors:
|
||||
result[key] = None
|
||||
errors[key] = 'config key not found.'
|
||||
|
||||
if errors:
|
||||
raise exceptions.ConfigError(errors)
|
||||
return values
|
||||
return result, errors
|
||||
|
||||
def serialize(self, values, display=False):
|
||||
result = ordered_dict() # TODO: 2.6 cleanup (#344).
|
||||
for key in self._order:
|
||||
if key in values:
|
||||
result[key] = self._schema[key].serialize(values[key], display)
|
||||
return result
|
||||
|
||||
|
||||
class ExtensionConfigSchema(ConfigSchema):
|
||||
@ -106,6 +107,8 @@ class ExtensionConfigSchema(ConfigSchema):
|
||||
super(ExtensionConfigSchema, self).__init__(name)
|
||||
self['enabled'] = types.Boolean()
|
||||
|
||||
# TODO: override serialize to gate on enabled=true?
|
||||
|
||||
|
||||
class LogLevelConfigSchema(object):
|
||||
"""Special cased schema for handling a config section with loglevels.
|
||||
@ -118,25 +121,20 @@ class LogLevelConfigSchema(object):
|
||||
self.name = name
|
||||
self._config_value = types.LogLevel()
|
||||
|
||||
def format(self, values):
|
||||
lines = ['[%s]' % self.name]
|
||||
for key, value in sorted(values.items()):
|
||||
if value is not None:
|
||||
lines.append('%s = %s' % (
|
||||
key, self._config_value.format(value)))
|
||||
return '\n'.join(lines)
|
||||
|
||||
def convert(self, items):
|
||||
def deserialize(self, values):
|
||||
errors = {}
|
||||
values = {}
|
||||
result = {}
|
||||
|
||||
for key, value in items:
|
||||
for key, value in values.items():
|
||||
try:
|
||||
if value.strip():
|
||||
values[key] = self._config_value.deserialize(value)
|
||||
result[key] = self._config_value.deserialize(value)
|
||||
except ValueError as e: # deserialization failed
|
||||
result[key] = None
|
||||
errors[key] = str(e)
|
||||
return result, errors
|
||||
|
||||
if errors:
|
||||
raise exceptions.ConfigError(errors)
|
||||
return values
|
||||
def serialize(self, values, display=False):
|
||||
result = ordered_dict() # TODO: 2.6 cleanup (#344)
|
||||
for key in sorted(values.keys()):
|
||||
result[key] = self._config_value.serialize(values[key], display)
|
||||
return result
|
||||
|
||||
@ -53,14 +53,12 @@ class ConfigValue(object):
|
||||
"""Cast raw string to appropriate type."""
|
||||
return value
|
||||
|
||||
def serialize(self, value):
|
||||
def serialize(self, value, display=False):
|
||||
"""Convert value back to string for saving."""
|
||||
if value is None:
|
||||
return b''
|
||||
return bytes(value)
|
||||
|
||||
def format(self, value):
|
||||
"""Format value for display."""
|
||||
return self.serialize(value)
|
||||
|
||||
|
||||
class String(ConfigValue):
|
||||
"""String value.
|
||||
@ -79,24 +77,34 @@ class String(ConfigValue):
|
||||
return None
|
||||
return value
|
||||
|
||||
def serialize(self, value):
|
||||
def serialize(self, value, display=False):
|
||||
if value is None:
|
||||
return b''
|
||||
return encode(value)
|
||||
|
||||
|
||||
class Secret(ConfigValue):
|
||||
"""String value.
|
||||
"""Secret value.
|
||||
|
||||
Masked when being displayed, and is not decoded.
|
||||
Should be used for passwords, auth tokens etc. Deserializing will not
|
||||
convert to unicode. Will mask value when being displayed.
|
||||
"""
|
||||
def __init__(self, optional=False, choices=None):
|
||||
self._required = not optional
|
||||
|
||||
def deserialize(self, value):
|
||||
value = value.strip()
|
||||
validators.validate_required(value, self._required)
|
||||
if not value:
|
||||
return None
|
||||
return value
|
||||
|
||||
def format(self, value):
|
||||
return '********'
|
||||
def serialize(self, value, display=False):
|
||||
if value is None:
|
||||
return b''
|
||||
elif display:
|
||||
return b'********'
|
||||
return value
|
||||
|
||||
|
||||
class Integer(ConfigValue):
|
||||
@ -134,11 +142,11 @@ class Boolean(ConfigValue):
|
||||
return False
|
||||
raise ValueError('invalid value for boolean: %r' % value)
|
||||
|
||||
def serialize(self, value):
|
||||
def serialize(self, value, display=False):
|
||||
if value:
|
||||
return 'true'
|
||||
return b'true'
|
||||
else:
|
||||
return 'false'
|
||||
return b'false'
|
||||
|
||||
|
||||
class List(ConfigValue):
|
||||
@ -160,7 +168,7 @@ class List(ConfigValue):
|
||||
validators.validate_required(values, self._required)
|
||||
return tuple(values)
|
||||
|
||||
def serialize(self, value):
|
||||
def serialize(self, value, display=False):
|
||||
return b'\n ' + b'\n '.join(encode(v) for v in value if v)
|
||||
|
||||
|
||||
@ -171,19 +179,22 @@ class LogLevel(ConfigValue):
|
||||
with any casing.
|
||||
"""
|
||||
levels = {
|
||||
'critical': logging.CRITICAL,
|
||||
'error': logging.ERROR,
|
||||
'warning': logging.WARNING,
|
||||
'info': logging.INFO,
|
||||
'debug': logging.DEBUG,
|
||||
b'critical': logging.CRITICAL,
|
||||
b'error': logging.ERROR,
|
||||
b'warning': logging.WARNING,
|
||||
b'info': logging.INFO,
|
||||
b'debug': logging.DEBUG,
|
||||
}
|
||||
|
||||
def deserialize(self, value):
|
||||
validators.validate_choice(value.lower(), self.levels.keys())
|
||||
return self.levels.get(value.lower())
|
||||
|
||||
def serialize(self, value):
|
||||
return dict((v, k) for k, v in self.levels.items()).get(value)
|
||||
def serialize(self, value, display=False):
|
||||
lookup = dict((v, k) for k, v in self.levels.items())
|
||||
if value in lookup:
|
||||
return lookup[value]
|
||||
return b''
|
||||
|
||||
|
||||
class Hostname(ConfigValue):
|
||||
@ -192,7 +203,7 @@ class Hostname(ConfigValue):
|
||||
def __init__(self, optional=False):
|
||||
self._required = not optional
|
||||
|
||||
def deserialize(self, value):
|
||||
def deserialize(self, value, display=False):
|
||||
validators.validate_required(value, self._required)
|
||||
if not value.strip():
|
||||
return None
|
||||
@ -243,7 +254,7 @@ class Path(ConfigValue):
|
||||
return None
|
||||
return ExpandedPath(value)
|
||||
|
||||
def serialize(self, value):
|
||||
def serialize(self, value, display=False):
|
||||
if isinstance(value, ExpandedPath):
|
||||
return value.original
|
||||
return value
|
||||
|
||||
@ -16,23 +16,5 @@ class MopidyException(Exception):
|
||||
self._message = message
|
||||
|
||||
|
||||
class ConfigError(MopidyException):
|
||||
def __init__(self, errors):
|
||||
self._errors = errors
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._errors[key]
|
||||
|
||||
def __iter__(self):
|
||||
return self._errors.iterkeys()
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
lines = []
|
||||
for key, msg in self._errors.items():
|
||||
lines.append('%s: %s' % (key, msg))
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
class ExtensionError(MopidyException):
|
||||
pass
|
||||
|
||||
@ -112,45 +112,38 @@ def load_extensions():
|
||||
return installed_extensions
|
||||
|
||||
|
||||
def validate_extensions(installed_extensions):
|
||||
def validate_extension(extension):
|
||||
"""Verify extension's dependencies and environment.
|
||||
|
||||
:param installed_extensions: list of installed extensions
|
||||
:returns: list of valid extensions
|
||||
:param extensions: an extension to check
|
||||
:returns: if extension should be run
|
||||
"""
|
||||
|
||||
valid_extensions = []
|
||||
logger.debug('Validating extension: %s', extension.ext_name)
|
||||
|
||||
for extension in installed_extensions:
|
||||
logger.debug('Validating extension: %s', extension.ext_name)
|
||||
if extension.ext_name != extension.entry_point.name:
|
||||
logger.warning(
|
||||
'Disabled extension %(ep)s: entry point name (%(ep)s) '
|
||||
'does not match extension name (%(ext)s)',
|
||||
{'ep': extension.entry_point.name, 'ext': extension.ext_name})
|
||||
return False
|
||||
|
||||
if extension.ext_name != extension.entry_point.name:
|
||||
logger.warning(
|
||||
'Disabled extension %(ep)s: entry point name (%(ep)s) '
|
||||
'does not match extension name (%(ext)s)',
|
||||
{'ep': extension.entry_point.name, 'ext': extension.ext_name})
|
||||
continue
|
||||
try:
|
||||
extension.entry_point.require()
|
||||
except pkg_resources.DistributionNotFound as ex:
|
||||
logger.info(
|
||||
'Disabled extension %s: Dependency %s not found',
|
||||
extension.ext_name, ex)
|
||||
return False
|
||||
|
||||
try:
|
||||
extension.entry_point.require()
|
||||
except pkg_resources.DistributionNotFound as ex:
|
||||
logger.info(
|
||||
'Disabled extension %s: Dependency %s not found',
|
||||
extension.ext_name, ex)
|
||||
continue
|
||||
try:
|
||||
extension.validate_environment()
|
||||
except exceptions.ExtensionError as ex:
|
||||
logger.info(
|
||||
'Disabled extension %s: %s', extension.ext_name, ex.message)
|
||||
return False
|
||||
|
||||
try:
|
||||
extension.validate_environment()
|
||||
except exceptions.ExtensionError as ex:
|
||||
logger.info(
|
||||
'Disabled extension %s: %s', extension.ext_name, ex.message)
|
||||
continue
|
||||
|
||||
valid_extensions.append(extension)
|
||||
|
||||
names = (e.ext_name for e in valid_extensions)
|
||||
logger.debug('Valid extensions: %s', ', '.join(names))
|
||||
return valid_extensions
|
||||
return True
|
||||
|
||||
|
||||
def register_gstreamer_elements(enabled_extensions):
|
||||
@ -163,25 +156,3 @@ def register_gstreamer_elements(enabled_extensions):
|
||||
logger.debug(
|
||||
'Registering GStreamer elements for: %s', extension.ext_name)
|
||||
extension.register_gstreamer_elements()
|
||||
|
||||
|
||||
def filter_enabled_extensions(raw_config, extensions):
|
||||
boolean = config_lib.Boolean()
|
||||
enabled_extensions = []
|
||||
enabled_names = []
|
||||
disabled_names = []
|
||||
|
||||
for extension in extensions:
|
||||
# TODO: handle key and value errors.
|
||||
enabled = raw_config[extension.ext_name]['enabled']
|
||||
if boolean.deserialize(enabled):
|
||||
enabled_extensions.append(extension)
|
||||
enabled_names.append(extension.ext_name)
|
||||
else:
|
||||
disabled_names.append(extension.ext_name)
|
||||
|
||||
logging.info(
|
||||
'Enabled extensions: %s', ', '.join(enabled_names) or 'none')
|
||||
logging.info(
|
||||
'Disabled extensions: %s', ', '.join(disabled_names) or 'none')
|
||||
return enabled_extensions
|
||||
|
||||
@ -48,17 +48,17 @@ def main():
|
||||
config_overrides = []
|
||||
|
||||
# TODO: decide if we want to avoid this boilerplate some how.
|
||||
logging_config = config_lib.load(config_files, config_overrides)
|
||||
# Initial config without extensions to bootstrap logging.
|
||||
logging_config, _ = config_lib.load(config_files, [], config_overrides)
|
||||
log.setup_root_logger()
|
||||
log.setup_console_logging(logging_config, options.verbosity_level)
|
||||
|
||||
extensions = ext.load_extensions()
|
||||
raw_config = config_lib.load(config_files, config_overrides, extensions)
|
||||
extensions = ext.filter_enabled_extensions(raw_config, extensions)
|
||||
config = config_lib.validate(
|
||||
raw_config, config_lib.core_schemas, extensions)
|
||||
config, errors = config_lib.load(config_files, extensions, config_overrides)
|
||||
log.setup_log_levels(config)
|
||||
|
||||
# TODO: missing error checking and other default setup code.
|
||||
|
||||
tracks = []
|
||||
|
||||
def store(data):
|
||||
|
||||
@ -8,12 +8,8 @@ import threading
|
||||
from pykka import ActorDeadError
|
||||
from pykka.registry import ActorRegistry
|
||||
|
||||
from mopidy import exceptions
|
||||
|
||||
|
||||
logger = logging.getLogger('mopidy.utils.process')
|
||||
|
||||
|
||||
SIGNALS = dict(
|
||||
(k, v) for v, k in signal.__dict__.iteritems()
|
||||
if v.startswith('SIG') and not v.startswith('SIG_'))
|
||||
|
||||
@ -2,7 +2,7 @@ from __future__ import unicode_literals
|
||||
|
||||
import mock
|
||||
|
||||
from mopidy import config, exceptions
|
||||
from mopidy import config
|
||||
|
||||
from tests import unittest, path_to_data_dir
|
||||
|
||||
@ -53,38 +53,38 @@ class LoadConfigTest(unittest.TestCase):
|
||||
|
||||
class ValidateTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.schema = mock.Mock()
|
||||
self.schema.name = 'foo'
|
||||
self.schema = config.ConfigSchema('foo')
|
||||
self.schema['bar'] = config.ConfigValue()
|
||||
|
||||
def test_empty_config_no_schemas(self):
|
||||
conf, errors = config._validate({}, [])
|
||||
self.assertEqual({}, conf)
|
||||
self.assertEqual([], errors)
|
||||
self.assertEqual({}, errors)
|
||||
|
||||
def test_config_no_schemas(self):
|
||||
raw_config = {'foo': {'bar': 'baz'}}
|
||||
conf, errors = config._validate(raw_config, [])
|
||||
self.assertEqual({}, conf)
|
||||
self.assertEqual([], errors)
|
||||
self.assertEqual({}, errors)
|
||||
|
||||
def test_empty_config_single_schema(self):
|
||||
conf, errors = config._validate({}, [self.schema])
|
||||
self.assertEqual({}, conf)
|
||||
self.assertEqual(['foo: section not found.'], errors)
|
||||
self.assertEqual({'foo': {'bar': None}}, conf)
|
||||
self.assertEqual({'foo': {'bar': 'config key not found.'}}, errors)
|
||||
|
||||
def test_config_single_schema(self):
|
||||
raw_config = {'foo': {'bar': 'baz'}}
|
||||
self.schema.convert.return_value = {'baz': 'bar'}
|
||||
conf, errors = config._validate(raw_config, [self.schema])
|
||||
self.assertEqual({'foo': {'baz': 'bar'}}, conf)
|
||||
self.assertEqual([], errors)
|
||||
self.assertEqual({'foo': {'bar': 'baz'}}, conf)
|
||||
self.assertEqual({}, errors)
|
||||
|
||||
def test_config_single_schema_config_error(self):
|
||||
raw_config = {'foo': {'bar': 'baz'}}
|
||||
self.schema.convert.side_effect = exceptions.ConfigError({'bar': 'bad'})
|
||||
self.schema['bar'] = mock.Mock()
|
||||
self.schema['bar'].deserialize.side_effect = ValueError('bad')
|
||||
conf, errors = config._validate(raw_config, [self.schema])
|
||||
self.assertEqual(['foo/bar: bad'], errors)
|
||||
self.assertEqual({}, conf)
|
||||
self.assertEqual({'foo': {'bar': None}}, conf)
|
||||
self.assertEqual({'foo': {'bar': 'bad'}}, errors)
|
||||
|
||||
# TODO: add more tests
|
||||
|
||||
|
||||
@ -3,10 +3,9 @@ from __future__ import unicode_literals
|
||||
import logging
|
||||
import mock
|
||||
|
||||
from mopidy import exceptions
|
||||
from mopidy.config import schemas, types
|
||||
|
||||
from tests import unittest
|
||||
from tests import unittest, any_unicode
|
||||
|
||||
|
||||
class ConfigSchemaTest(unittest.TestCase):
|
||||
@ -17,73 +16,65 @@ class ConfigSchemaTest(unittest.TestCase):
|
||||
self.schema['baz'] = mock.Mock()
|
||||
self.values = {'bar': '123', 'foo': '456', 'baz': '678'}
|
||||
|
||||
def test_format(self):
|
||||
self.schema['foo'].format.return_value = 'qwe'
|
||||
self.schema['bar'].format.return_value = 'asd'
|
||||
self.schema['baz'].format.return_value = 'zxc'
|
||||
def test_deserialize(self):
|
||||
self.schema.deserialize(self.values)
|
||||
|
||||
expected = ['[test]', 'foo = qwe', 'bar = asd', 'baz = zxc']
|
||||
result = self.schema.format(self.values)
|
||||
self.assertEqual('\n'.join(expected), result)
|
||||
|
||||
def test_format_unkwown_value(self):
|
||||
self.schema['foo'].format.return_value = 'qwe'
|
||||
self.schema['bar'].format.return_value = 'asd'
|
||||
self.schema['baz'].format.return_value = 'zxc'
|
||||
self.values['unknown'] = 'rty'
|
||||
|
||||
result = self.schema.format(self.values)
|
||||
self.assertNotIn('unknown = rty', result)
|
||||
|
||||
def test_convert(self):
|
||||
self.schema.convert(self.values.items())
|
||||
|
||||
def test_convert_with_missing_value(self):
|
||||
def test_deserialize_with_missing_value(self):
|
||||
del self.values['foo']
|
||||
|
||||
with self.assertRaises(exceptions.ConfigError) as cm:
|
||||
self.schema.convert(self.values.items())
|
||||
result, errors = self.schema.deserialize(self.values)
|
||||
self.assertEqual({'foo': any_unicode}, errors)
|
||||
self.assertIsNone(result.pop('foo'))
|
||||
self.assertIsNotNone(result.pop('bar'))
|
||||
self.assertIsNotNone(result.pop('baz'))
|
||||
self.assertEqual({}, result)
|
||||
|
||||
self.assertIn('not found', cm.exception['foo'])
|
||||
|
||||
def test_convert_with_extra_value(self):
|
||||
def test_deserialize_with_extra_value(self):
|
||||
self.values['extra'] = '123'
|
||||
|
||||
with self.assertRaises(exceptions.ConfigError) as cm:
|
||||
self.schema.convert(self.values.items())
|
||||
result, errors = self.schema.deserialize(self.values)
|
||||
self.assertEqual({'extra': any_unicode}, errors)
|
||||
self.assertIsNotNone(result.pop('foo'))
|
||||
self.assertIsNotNone(result.pop('bar'))
|
||||
self.assertIsNotNone(result.pop('baz'))
|
||||
self.assertEqual({}, result)
|
||||
|
||||
self.assertIn('unknown', cm.exception['extra'])
|
||||
|
||||
def test_convert_with_deserialization_error(self):
|
||||
def test_deserialize_with_deserialization_error(self):
|
||||
self.schema['foo'].deserialize.side_effect = ValueError('failure')
|
||||
|
||||
with self.assertRaises(exceptions.ConfigError) as cm:
|
||||
self.schema.convert(self.values.items())
|
||||
result, errors = self.schema.deserialize(self.values)
|
||||
self.assertEqual({'foo': 'failure'}, errors)
|
||||
self.assertIsNone(result.pop('foo'))
|
||||
self.assertIsNotNone(result.pop('bar'))
|
||||
self.assertIsNotNone(result.pop('baz'))
|
||||
self.assertEqual({}, result)
|
||||
|
||||
self.assertIn('failure', cm.exception['foo'])
|
||||
|
||||
def test_convert_with_multiple_deserialization_errors(self):
|
||||
def test_deserialize_with_multiple_deserialization_errors(self):
|
||||
self.schema['foo'].deserialize.side_effect = ValueError('failure')
|
||||
self.schema['bar'].deserialize.side_effect = ValueError('other')
|
||||
|
||||
with self.assertRaises(exceptions.ConfigError) as cm:
|
||||
self.schema.convert(self.values.items())
|
||||
result, errors = self.schema.deserialize(self.values)
|
||||
self.assertEqual({'foo': 'failure', 'bar': 'other'}, errors)
|
||||
self.assertIsNone(result.pop('foo'))
|
||||
self.assertIsNone(result.pop('bar'))
|
||||
self.assertIsNotNone(result.pop('baz'))
|
||||
self.assertEqual({}, result)
|
||||
|
||||
self.assertIn('failure', cm.exception['foo'])
|
||||
self.assertIn('other', cm.exception['bar'])
|
||||
|
||||
def test_convert_deserialization_unknown_and_missing_errors(self):
|
||||
def test_deserialize_deserialization_unknown_and_missing_errors(self):
|
||||
self.values['extra'] = '123'
|
||||
self.schema['bar'].deserialize.side_effect = ValueError('failure')
|
||||
del self.values['baz']
|
||||
|
||||
with self.assertRaises(exceptions.ConfigError) as cm:
|
||||
self.schema.convert(self.values.items())
|
||||
result, errors = self.schema.deserialize(self.values)
|
||||
self.assertIn('unknown', errors['extra'])
|
||||
self.assertNotIn('foo', errors)
|
||||
self.assertIn('failure', errors['bar'])
|
||||
self.assertIn('not found', errors['baz'])
|
||||
|
||||
self.assertIn('unknown', cm.exception['extra'])
|
||||
self.assertNotIn('foo', cm.exception)
|
||||
self.assertIn('failure', cm.exception['bar'])
|
||||
self.assertIn('not found', cm.exception['baz'])
|
||||
self.assertNotIn('unknown', result)
|
||||
self.assertIn('foo', result)
|
||||
self.assertIsNone(result['bar'])
|
||||
self.assertIsNone(result['baz'])
|
||||
|
||||
|
||||
class ExtensionConfigSchemaTest(unittest.TestCase):
|
||||
@ -95,18 +86,11 @@ class ExtensionConfigSchemaTest(unittest.TestCase):
|
||||
class LogLevelConfigSchemaTest(unittest.TestCase):
|
||||
def test_conversion(self):
|
||||
schema = schemas.LogLevelConfigSchema('test')
|
||||
result = schema.convert([('foo.bar', 'DEBUG'), ('baz', 'INFO')])
|
||||
result, errors = schema.deserialize({'foo.bar': 'DEBUG', 'baz': 'INFO'})
|
||||
|
||||
self.assertEqual(logging.DEBUG, result['foo.bar'])
|
||||
self.assertEqual(logging.INFO, result['baz'])
|
||||
|
||||
def test_format(self):
|
||||
schema = schemas.LogLevelConfigSchema('test')
|
||||
values = {'foo.bar': logging.DEBUG, 'baz': logging.INFO}
|
||||
expected = ['[test]', 'baz = info', 'foo.bar = debug']
|
||||
result = schema.format(values)
|
||||
self.assertEqual('\n'.join(expected), result)
|
||||
|
||||
|
||||
class DidYouMeanTest(unittest.TestCase):
|
||||
def testSuggestoins(self):
|
||||
|
||||
@ -23,10 +23,15 @@ class ConfigValueTest(unittest.TestCase):
|
||||
value = types.ConfigValue()
|
||||
self.assertIsInstance(value.serialize(object()), bytes)
|
||||
|
||||
def test_format_uses_serialize(self):
|
||||
def test_serialize_none(self):
|
||||
value = types.ConfigValue()
|
||||
obj = object()
|
||||
self.assertEqual(value.serialize(obj), value.format(obj))
|
||||
result = value.serialize(None)
|
||||
self.assertIsInstance(result, bytes)
|
||||
self.assertEqual(b'', result)
|
||||
|
||||
def test_serialize_supports_display(self):
|
||||
value = types.ConfigValue()
|
||||
self.assertIsInstance(value.serialize(object(), display=True), bytes)
|
||||
|
||||
|
||||
class StringTest(unittest.TestCase):
|
||||
@ -88,6 +93,12 @@ class StringTest(unittest.TestCase):
|
||||
self.assertIsInstance(result, bytes)
|
||||
self.assertEqual(r'a\n\tb'.encode('utf-8'), result)
|
||||
|
||||
def test_serialize_none(self):
|
||||
value = types.String()
|
||||
result = value.serialize(None)
|
||||
self.assertIsInstance(result, bytes)
|
||||
self.assertEqual(b'', result)
|
||||
|
||||
|
||||
class SecretTest(unittest.TestCase):
|
||||
def test_deserialize_passes_through(self):
|
||||
@ -100,13 +111,28 @@ class SecretTest(unittest.TestCase):
|
||||
value = types.Secret()
|
||||
self.assertRaises(ValueError, value.deserialize, b'')
|
||||
|
||||
def test_serialize_conversion_to_string(self):
|
||||
value = types.Secret()
|
||||
self.assertIsInstance(value.serialize(object()), bytes)
|
||||
def test_deserialize_respects_optional(self):
|
||||
value = types.Secret(optional=True)
|
||||
self.assertIsNone(value.deserialize(b''))
|
||||
self.assertIsNone(value.deserialize(b' '))
|
||||
|
||||
def test_format_masks_value(self):
|
||||
def test_serialize_none(self):
|
||||
value = types.Secret()
|
||||
self.assertEqual('********', value.format('s3cret'))
|
||||
result = value.serialize(None)
|
||||
self.assertIsInstance(result, bytes)
|
||||
self.assertEqual(b'', result)
|
||||
|
||||
def test_serialize_for_display_masks_value(self):
|
||||
value = types.Secret()
|
||||
result = value.serialize('s3cret', display=True)
|
||||
self.assertIsInstance(result, bytes)
|
||||
self.assertEqual(b'********', result)
|
||||
|
||||
def test_serialize_none_for_display(self):
|
||||
value = types.Secret()
|
||||
result = value.serialize(None, display=True)
|
||||
self.assertIsInstance(result, bytes)
|
||||
self.assertEqual(b'', result)
|
||||
|
||||
|
||||
class IntegerTest(unittest.TestCase):
|
||||
@ -157,10 +183,19 @@ class BooleanTest(unittest.TestCase):
|
||||
self.assertRaises(ValueError, value.deserialize, 'sure')
|
||||
self.assertRaises(ValueError, value.deserialize, '')
|
||||
|
||||
def test_serialize(self):
|
||||
def test_serialize_true(self):
|
||||
value = types.Boolean()
|
||||
self.assertEqual('true', value.serialize(True))
|
||||
self.assertEqual('false', value.serialize(False))
|
||||
result = value.serialize(True)
|
||||
self.assertEqual(b'true', result)
|
||||
self.assertIsInstance(result, bytes)
|
||||
|
||||
def test_serialize_false(self):
|
||||
value = types.Boolean()
|
||||
result = value.serialize(False)
|
||||
self.assertEqual(b'false', result)
|
||||
self.assertIsInstance(result, bytes)
|
||||
|
||||
# TODO: test None or other invalid values into serialize?
|
||||
|
||||
|
||||
class ListTest(unittest.TestCase):
|
||||
@ -240,7 +275,7 @@ class LogLevelTest(unittest.TestCase):
|
||||
value = types.LogLevel()
|
||||
for name, level in self.levels.items():
|
||||
self.assertEqual(name, value.serialize(level))
|
||||
self.assertIsNone(value.serialize(1337))
|
||||
self.assertEqual(b'', value.serialize(1337))
|
||||
|
||||
|
||||
class HostnameTest(unittest.TestCase):
|
||||
|
||||
@ -15,14 +15,3 @@ class ExceptionsTest(unittest.TestCase):
|
||||
def test_extension_error_is_a_mopidy_exception(self):
|
||||
self.assert_(issubclass(
|
||||
exceptions.ExtensionError, exceptions.MopidyException))
|
||||
|
||||
def test_config_error_is_a_mopidy_exception(self):
|
||||
self.assert_(issubclass(
|
||||
exceptions.ConfigError, exceptions.MopidyException))
|
||||
|
||||
def test_config_error_provides_getitem(self):
|
||||
exception = exceptions.ConfigError(
|
||||
{'field1': 'msg1', 'field2': 'msg2'})
|
||||
self.assertEqual('msg1', exception['field1'])
|
||||
self.assertEqual('msg2', exception['field2'])
|
||||
self.assertItemsEqual(['field1', 'field2'], exception)
|
||||
|
||||
Loading…
Reference in New Issue
Block a user