Merge branch 'develop' into fix/mpd-disambiguated-playlist-not-found

This commit is contained in:
Nick Steel 2013-04-17 21:00:58 +01:00
commit 28b00dbea1
36 changed files with 927 additions and 975 deletions

View File

@ -47,11 +47,17 @@ and improved.
as extensions, but they are still distributed together with Mopidy and are
enabled by default.
- The NAD mixer have been moved out of Mopidy core to its own project,
Mopidy-NAD. See :ref:`ext` for more information.
**Command line options**
- The command option :option:`mopidy --list-settings` is now named
:option:`mopidy --show-config`.
- The command option :option:`mopidy --list-deps` is now named
:option:`mopidy --show-deps`.
- What configuration files to use can now be specified through the command
option :option:`mopidy --config`.

View File

@ -28,9 +28,9 @@ the config values you want to change. If you want to keep the default for a
config value, you **should not** add it to ``~/.config/mopidy/mopidy.conf``.
To see what's the effective configuration for your Mopidy installation, you can
run ``mopidy --show-config``. It will print your full effective config with
passwords masked out so that you safely can share the output with others for
debugging.
run :option:`mopidy --show-config`. It will print your full effective config
with passwords masked out so that you safely can share the output with others
for debugging.
You can find a description of all config values belonging to Mopidy's core
below, together with their default values. In addition, all :ref:`extensions
@ -95,13 +95,13 @@ Core configuration values
.. confval:: logging/debug_file
The file to dump debug log data to when Mopidy is run with the
:option:`--save-debug-log` option.
:option:`mopidy --save-debug-log` option.
.. confval:: logging.levels/*
.. confval:: loglevels/*
The ``logging.levels`` config section can be used to change the log level
for specific parts of Mopidy during development or debugging. Each key in
the config section should match the name of a logger. The value is the log
The ``loglevels`` config section can be used to change the log level for
specific parts of Mopidy during development or debugging. Each key in the
config section should match the name of a logger. The value is the log
level to use for that logger, one of ``debug``, ``info``, ``warning``,
``error``, or ``critical``.

View File

@ -40,7 +40,8 @@ sends all requests to both, returning the primary response to the client and
then printing any diff in the two responses.
Note that this tool depends on ``gevent`` unlike the rest of Mopidy at the time
of writing. See ``--help`` for available options. Sample session::
of writing. See :option:`tools/debug-proxy.py --help` for available options.
Sample session::
[127.0.0.1]:59714
listallinfo

View File

@ -30,6 +30,21 @@ These extensions are maintained outside Mopidy's core, often by other
developers.
Mopidy-NAD
----------
Extension for controlling volume using an external NAD amplifier.
Author:
Stein Magnus Jodal
PyPI:
`Mopidy-NAD <https://pypi.python.org/pypi/Mopidy-NAD>`_
GitHub:
`mopidy/mopidy-nad <https://github.com/mopidy/mopidy-nad>`_
Issues:
https://github.com/mopidy/mopidy/issues
Mopidy-SoundCloud
-----------------
@ -42,3 +57,5 @@ PyPI:
`Mopidy-SoundCloud <https://pypi.python.org/pypi/Mopidy-SoundCloud>`_
GitHub:
`dz0ny/mopidy-soundcloud <https://github.com/dz0ny/mopidy-soundcloud>`_
Issues:
https://github.com/dz0ny/mopidy-soundcloud/issues

View File

@ -230,12 +230,12 @@ and ``password``.
version = __version__
def get_default_config(self):
return default_config
return bytes(default_config)
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['username'] = config.String(required=True)
schema['password'] = config.String(required=True, secret=True)
schema['username'] = config.String()
schema['password'] = config.Secret()
return schema
def validate_environment(self):
@ -365,4 +365,4 @@ Is much better than::
If you want to turn on debug logging for your own extension, but not for
everything else due to the amount of noise, see the docs for the
:confval:`logging.levels/*` config section.
:confval:`loglevels/*` config section.

View File

@ -1,6 +0,0 @@
*********************************************
:mod:`mopidy.audio.mixers.nad` -- NAD mixer
*********************************************
.. automodule:: mopidy.audio.mixers.nad
:synopsis: Mixer element for controlling volume on NAD amplifiers

View File

@ -25,7 +25,48 @@ mopidy command
.. program:: mopidy
TODO: Document all command line options
.. cmdoption:: --version
Show Mopidy's version number and exit.
.. cmdoption:: -h, --help
Show help message and exit.
.. cmdoption:: -q, --quite
Show less output: warning level and higher.
.. cmdoption:: -v, --verbose
Show more output: debug level and higher.
.. cmdoption:: --save-debug-log
Save debug log to the file specified in the :confval:`logging/debug_file`
config value, typically ``./mopidy.conf``.
.. cmdoption:: --show-config
Show the current effective config. All configuration sources are merged
together to show the effective document. Secret values like passwords are
masked out. Config for disabled extensions are not included.
.. cmdoption:: --show-deps
Show dependencies, their versions and installation location.
.. cmdoption:: --config <file>
Specify config file to use. To use multiple config files, separate them
with colon. The later files override the earlier ones if there's a
conflict.
.. cmdoption:: -o <option>, --option <option>
Specify additional config values in the ``section/key=value`` format. Can
be provided multiple times.
mopidy-scan command
@ -33,4 +74,18 @@ mopidy-scan command
.. program:: mopidy-scan
TODO: Document all command line options
.. cmdoption:: --version
Show Mopidy's version number and exit.
.. cmdoption:: -h, --help
Show help message and exit.
.. cmdoption:: -q, --quite
Show less output: warning level and higher.
.. cmdoption:: -v, --verbose
Show more output: debug level and higher.

View File

@ -27,7 +27,7 @@ with others for debugging.
Installed dependencies
======================
The command :option:`mopidy --list-deps` will list the paths to and versions of
The command :option:`mopidy --show-deps` will list the paths to and versions of
any dependency Mopidy or the extensions might need to work. This is very useful
data for checking that you're using the right versions, and that you're using
the right installation if you have multiple installations of a dependency on
@ -42,7 +42,7 @@ run :option:`mopidy --save-debug-log`, it will save the debug log to the file
``mopidy.log`` in the directory you ran the command from.
If you want to turn on more or less logging for some component, see the
docs for the :confval:`logging.levels/*` config section.
docs for the :confval:`loglevels/*` config section.
Debugging deadlocks
@ -56,3 +56,21 @@ system is deadlocking. If you have the ``pkill`` command installed, you can use
this by simply running::
pkill -SIGUSR1 mopidy
Debugging GStreamer
===================
If you really want to dig in and debug GStreamer behaviour, then check out the
`Debugging section
<http://gstreamer.freedesktop.org/data/doc/gstreamer/head/manual/html/section-checklist-debug.html>`_
of GStreamer's documentation for your options. Note that Mopidy does not
support the GStreamer command line options, like ``--gst-debug-level=3``, but
setting GStreamer environment variables, like :envvar:`GST_DEBUG`, works with
Mopidy. For example, to run Mopidy with debug logging and GStreamer logging at
level 3, you can run::
GST_DEBUG=3 mopidy -v
This will produce a lot of output, but given some GStreamer knowledge this is
very useful for debugging GStreamer pipeline issues.

View File

@ -42,29 +42,47 @@ def main():
config_files = options.config.split(':')
config_overrides = options.overrides
extensions = [] # Make sure it is defined before the finally block
enabled_extensions = [] # Make sure it is defined before the finally block
# TODO: figure out a way to make the boilerplate in this file reusable in
# scanner and other places we need it.
try:
create_file_structures()
logging_config = config_lib.load(config_files, config_overrides)
# Initial config without extensions to bootstrap logging.
logging_config, _ = config_lib.load(config_files, [], config_overrides)
# TODO: setup_logging needs defaults in-case config values are None
log.setup_logging(
logging_config, options.verbosity_level, options.save_debug_log)
extensions = ext.load_extensions()
raw_config = config_lib.load(config_files, config_overrides, extensions)
extensions = ext.filter_enabled_extensions(raw_config, extensions)
config = config_lib.validate(
raw_config, config_lib.core_schemas, extensions)
log.setup_log_levels(config)
check_old_locations()
installed_extensions = ext.load_extensions()
# TODO: wrap config in RO proxy.
config, config_errors = config_lib.load(
config_files, installed_extensions, config_overrides)
# Filter out disabled extensions and remove any config errors for them.
for extension in installed_extensions:
enabled = config[extension.ext_name]['enabled']
if ext.validate_extension(extension) and enabled:
enabled_extensions.append(extension)
elif extension.ext_name in config_errors:
del config_errors[extension.ext_name]
log_extension_info(installed_extensions, enabled_extensions)
check_config_errors(config_errors)
log.setup_log_levels(config)
create_file_structures()
check_old_locations()
ext.register_gstreamer_elements(enabled_extensions)
# Anything that wants to exit after this point must use
# mopidy.utils.process.exit_process as actors have been started.
audio = setup_audio(config)
backends = setup_backends(config, extensions, audio)
backends = setup_backends(config, enabled_extensions, audio)
core = setup_core(audio, backends)
setup_frontends(config, extensions, core)
setup_frontends(config, enabled_extensions, core)
loop.run()
except KeyboardInterrupt:
logger.info('Interrupted. Exiting...')
@ -72,13 +90,32 @@ def main():
logger.exception(ex)
finally:
loop.quit()
stop_frontends(extensions)
stop_frontends(enabled_extensions)
stop_core()
stop_backends(extensions)
stop_backends(enabled_extensions)
stop_audio()
process.stop_remaining_actors()
def log_extension_info(all_extensions, enabled_extensions):
# TODO: distinguish disabled vs blocked by env?
enabled_names = set(e.ext_name for e in enabled_extensions)
disabled_names = set(e.ext_name for e in all_extensions) - enabled_names
logging.info(
'Enabled extensions: %s', ', '.join(enabled_names) or 'none')
logging.info(
'Disabled extensions: %s', ', '.join(disabled_names) or 'none')
def check_config_errors(errors):
if not errors:
return
for section in errors:
for key, msg in errors[section].items():
logger.error('Config value %s/%s %s', section, key, msg)
sys.exit(1)
def check_config_override(option, opt, override):
try:
return config_lib.parse_override(override)
@ -114,9 +151,9 @@ def parse_options():
action='callback', callback=show_config_callback,
help='show current config')
parser.add_option(
b'--list-deps',
action='callback', callback=deps.list_deps_optparse_callback,
help='list dependencies and their versions')
b'--show-deps',
action='callback', callback=deps.show_deps_optparse_callback,
help='show dependencies and their versions')
parser.add_option(
b'--config',
action='store', dest='config',
@ -136,31 +173,20 @@ def show_config_callback(option, opt, value, parser):
overrides = getattr(parser.values, 'overrides', [])
extensions = ext.load_extensions()
raw_config = config_lib.load(files, overrides, extensions)
enabled_extensions = ext.filter_enabled_extensions(raw_config, extensions)
config = config_lib.validate(
raw_config, config_lib.core_schemas, enabled_extensions)
# TODO: create mopidy.config.format?
output = []
for schema in config_lib.core_schemas:
options = config.get(schema.name, {})
if not options:
continue
output.append(schema.format(options))
config, errors = config_lib.load(files, extensions, overrides)
# Clear out any config for disabled extensions.
for extension in extensions:
schema = extension.get_config_schema()
if not ext.validate_extension(extension):
config[extension.ext_name] = {b'enabled': False}
errors[extension.ext_name] = {
b'enabled': b'extension disabled its self.'}
elif not config[extension.ext_name]['enabled']:
config[extension.ext_name] = {b'enabled': False}
errors[extension.ext_name] = {
b'enabled': b'extension disabled by config.'}
if extension in enabled_extensions:
options = config.get(schema.name, {})
output.append(schema.format(options))
else:
lines = ['[%s]' % schema.name, 'enabled = false',
'# Config hidden as extension is disabled']
output.append('\n'.join(lines))
print '\n\n'.join(output)
print config_lib.format(config, extensions, errors)
sys.exit(0)

View File

@ -7,7 +7,6 @@ import gobject
from .auto import AutoAudioMixer
from .fake import FakeMixer
from .nad import NadMixer
def register_mixer(mixer_class):
@ -19,4 +18,3 @@ def register_mixer(mixer_class):
def register_mixers():
register_mixer(AutoAudioMixer)
register_mixer(FakeMixer)
register_mixer(NadMixer)

View File

@ -1,292 +0,0 @@
"""Mixer that controls volume using a NAD amplifier.
The NAD amplifier must be connected to the machine running Mopidy using a
serial cable.
Dependencies
============
.. literalinclude:: ../../../../requirements/external_mixers.txt
Configuration
=============
Set the :confval:`audio/mixer` config value to ``nadmixer`` to use it. You
probably also needs to add some properties to the :confval:`audio/mixer` config
value.
Supported properties includes:
``port``:
The serial device to use, defaults to ``/dev/ttyUSB0``. This must be
set correctly for the mixer to work.
``source``:
The source that should be selected on the amplifier, like ``aux``,
``disc``, ``tape``, ``tuner``, etc. Leave unset if you don't want the
mixer to change it for you.
``speakers-a``:
Set to ``on`` or ``off`` if you want the mixer to make sure that
speaker set A is turned on or off. Leave unset if you don't want the
mixer to change it for you.
``speakers-b``:
See ``speakers-a``.
Configuration examples::
# Minimum configuration, if the amplifier is available at /dev/ttyUSB0
mixer = nadmixer
# Minimum configuration, if the amplifier is available elsewhere
mixer = nadmixer port=/dev/ttyUSB3
# Full configuration
mixer = nadmixer port=/dev/ttyUSB0 source=aux speakers-a=on speakers-b=off
"""
from __future__ import unicode_literals
import logging
import pygst
pygst.require('0.10')
import gobject
import gst
try:
import serial
except ImportError:
serial = None # noqa
import pykka
from . import utils
logger = logging.getLogger('mopidy.audio.mixers.nad')
class NadMixer(gst.Element, gst.ImplementsInterface, gst.interfaces.Mixer):
__gstdetails__ = (
'NadMixer',
'Mixer',
'Mixer to control NAD amplifiers using a serial link',
'Mopidy')
port = gobject.property(type=str, default='/dev/ttyUSB0')
source = gobject.property(type=str)
speakers_a = gobject.property(type=str)
speakers_b = gobject.property(type=str)
_volume_cache = 0
_nad_talker = None
def list_tracks(self):
track = utils.create_track(
label='Master',
initial_volume=0,
min_volume=0,
max_volume=100,
num_channels=1,
flags=(
gst.interfaces.MIXER_TRACK_MASTER |
gst.interfaces.MIXER_TRACK_OUTPUT))
return [track]
def get_volume(self, track):
return [self._volume_cache]
def set_volume(self, track, volumes):
if len(volumes):
volume = volumes[0]
self._volume_cache = volume
self._nad_talker.set_volume(volume)
def set_mute(self, track, mute):
self._nad_talker.mute(mute)
def do_change_state(self, transition):
if transition == gst.STATE_CHANGE_NULL_TO_READY:
if serial is None:
logger.warning('nadmixer dependency python-serial not found')
return gst.STATE_CHANGE_FAILURE
self._start_nad_talker()
return gst.STATE_CHANGE_SUCCESS
def _start_nad_talker(self):
self._nad_talker = NadTalker.start(
port=self.port,
source=self.source or None,
speakers_a=self.speakers_a or None,
speakers_b=self.speakers_b or None
).proxy()
class NadTalker(pykka.ThreadingActor):
"""
Independent thread which does the communication with the NAD amplifier.
Since the communication is done in an independent thread, Mopidy won't
block other requests while doing rather time consuming work like
calibrating the NAD amplifier's volume.
"""
# Serial link config
BAUDRATE = 115200
BYTESIZE = 8
PARITY = 'N'
STOPBITS = 1
# Timeout in seconds used for read/write operations.
# If you set the timeout too low, the reads will never get complete
# confirmations and calibration will decrease volume forever. If you set
# the timeout too high, stuff takes more time. 0.2s seems like a good value
# for NAD C 355BEE.
TIMEOUT = 0.2
# Number of volume levels the amplifier supports. 40 for NAD C 355BEE.
VOLUME_LEVELS = 40
def __init__(self, port, source, speakers_a, speakers_b):
super(NadTalker, self).__init__()
self.port = port
self.source = source
self.speakers_a = speakers_a
self.speakers_b = speakers_b
# Volume in range 0..VOLUME_LEVELS. :class:`None` before calibration.
self._nad_volume = None
self._device = None
def on_start(self):
self._open_connection()
self._set_device_to_known_state()
def _open_connection(self):
logger.info('NAD amplifier: Connecting through "%s"', self.port)
self._device = serial.Serial(
port=self.port,
baudrate=self.BAUDRATE,
bytesize=self.BYTESIZE,
parity=self.PARITY,
stopbits=self.STOPBITS,
timeout=self.TIMEOUT)
self._get_device_model()
def _set_device_to_known_state(self):
self._power_device_on()
self._select_speakers()
self._select_input_source()
self.mute(False)
self.calibrate_volume()
def _get_device_model(self):
model = self._ask_device('Main.Model')
logger.info('NAD amplifier: Connected to model "%s"', model)
return model
def _power_device_on(self):
self._check_and_set('Main.Power', 'On')
def _select_speakers(self):
if self.speakers_a is not None:
self._check_and_set('Main.SpeakerA', self.speakers_a.title())
if self.speakers_b is not None:
self._check_and_set('Main.SpeakerB', self.speakers_b.title())
def _select_input_source(self):
if self.source is not None:
self._check_and_set('Main.Source', self.source.title())
def mute(self, mute):
if mute:
self._check_and_set('Main.Mute', 'On')
else:
self._check_and_set('Main.Mute', 'Off')
def calibrate_volume(self, current_nad_volume=None):
# The NAD C 355BEE amplifier has 40 different volume levels. We have no
# way of asking on which level we are. Thus, we must calibrate the
# mixer by decreasing the volume 39 times.
if current_nad_volume is None:
current_nad_volume = self.VOLUME_LEVELS
if current_nad_volume == self.VOLUME_LEVELS:
logger.info('NAD amplifier: Calibrating by setting volume to 0')
self._nad_volume = current_nad_volume
if self._decrease_volume():
current_nad_volume -= 1
if current_nad_volume == 0:
logger.info('NAD amplifier: Done calibrating')
else:
self.actor_ref.proxy().calibrate_volume(current_nad_volume)
def set_volume(self, volume):
# Increase or decrease the amplifier volume until it matches the given
# target volume.
logger.debug('Setting volume to %d' % volume)
target_nad_volume = int(round(volume * self.VOLUME_LEVELS / 100.0))
if self._nad_volume is None:
return # Calibration needed
while target_nad_volume > self._nad_volume:
if self._increase_volume():
self._nad_volume += 1
while target_nad_volume < self._nad_volume:
if self._decrease_volume():
self._nad_volume -= 1
def _increase_volume(self):
# Increase volume. Returns :class:`True` if confirmed by device.
self._write('Main.Volume+')
return self._readline() == 'Main.Volume+'
def _decrease_volume(self):
# Decrease volume. Returns :class:`True` if confirmed by device.
self._write('Main.Volume-')
return self._readline() == 'Main.Volume-'
def _check_and_set(self, key, value):
for attempt in range(1, 4):
if self._ask_device(key) == value:
return
logger.info(
'NAD amplifier: Setting "%s" to "%s" (attempt %d/3)',
key, value, attempt)
self._command_device(key, value)
if self._ask_device(key) != value:
logger.info(
'NAD amplifier: Gave up on setting "%s" to "%s"',
key, value)
def _ask_device(self, key):
self._write('%s?' % key)
return self._readline().replace('%s=' % key, '')
def _command_device(self, key, value):
if type(value) == unicode:
value = value.encode('utf-8')
self._write('%s=%s' % (key, value))
self._readline()
def _write(self, data):
# Write data to device. Prepends and appends a newline to the data, as
# recommended by the NAD documentation.
if not self._device.isOpen():
self._device.open()
self._device.write('\n%s\n' % data)
logger.debug('Write: %s', data)
def _readline(self):
# Read line from device. The result is stripped for leading and
# trailing whitespace.
if not self._device.isOpen():
self._device.open()
result = self._device.readline().strip()
if result:
logger.debug('Read: %s', result)
return result

View File

@ -19,7 +19,7 @@ class Extension(ext.Extension):
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.String(secret=True)
schema['password'] = config.Secret()
schema['bitrate'] = config.Integer(choices=(96, 160, 320))
schema['timeout'] = config.Integer(minimum=0)
schema['cache_dir'] = config.Path()

View File

@ -1,24 +1,22 @@
from __future__ import unicode_literals
import codecs
import ConfigParser as configparser
import io
import logging
import os.path
import sys
from mopidy.config.schemas import *
from mopidy.config.types import *
from mopidy.utils import path
logger = logging.getLogger('mopdiy.config')
logger = logging.getLogger('mopidy.config')
_logging_schema = ConfigSchema('logging')
_logging_schema['console_format'] = String()
_logging_schema['debug_format'] = String()
_logging_schema['debug_file'] = Path()
_loglevels_schema = LogLevelConfigSchema('logging.levels')
_loglevels_schema = LogLevelConfigSchema('loglevels')
_audio_schema = ConfigSchema('audio')
_audio_schema['mixer'] = String()
@ -28,12 +26,12 @@ _audio_schema['output'] = String()
_proxy_schema = ConfigSchema('proxy')
_proxy_schema['hostname'] = Hostname(optional=True)
_proxy_schema['username'] = String(optional=True)
_proxy_schema['password'] = String(optional=True, secret=True)
_proxy_schema['password'] = Secret(optional=True)
# NOTE: if multiple outputs ever comes something like LogLevelConfigSchema
#_outputs_schema = config.AudioOutputConfigSchema()
core_schemas = [_logging_schema, _loglevels_schema, _audio_schema, _proxy_schema]
_schemas = [_logging_schema, _loglevels_schema, _audio_schema, _proxy_schema]
def read(config_file):
@ -42,37 +40,48 @@ def read(config_file):
return filehandle.read()
def load(files, overrides, extensions=None):
def load(files, extensions, overrides):
# Helper to get configs, as the rest of our config system should not need
# to know about extensions.
config_dir = os.path.dirname(__file__)
defaults = [read(os.path.join(config_dir, 'default.conf'))]
if extensions:
defaults.extend(e.get_default_config() for e in extensions)
return _load(files, defaults, overrides)
defaults.extend(e.get_default_config() for e in extensions)
raw_config = _load(files, defaults, overrides)
schemas = _schemas[:]
schemas.extend(e.get_config_schema() for e in extensions)
return _validate(raw_config, schemas)
def format(config, extensions, comments=None, display=True):
# Helper to format configs, as the rest of our config system should not
# need to know about extensions.
schemas = _schemas[:]
schemas.extend(e.get_config_schema() for e in extensions)
return _format(config, comments or {}, schemas, display)
# TODO: replace load() with this version of API.
def _load(files, defaults, overrides):
parser = configparser.RawConfigParser()
files = [path.expand_path(f) for f in files]
sources = ['builtin-defaults'] + files + ['command-line']
logger.info('Loading config from: %s', ', '.join(sources))
for default in defaults: # TODO: remove decoding
parser.readfp(io.StringIO(default.decode('utf-8')))
# TODO: simply return path to config file for defaults so we can load it
# all in the same way?
for default in defaults:
parser.readfp(io.BytesIO(default))
# Load config from a series of config files
for filename in files:
# TODO: if this is the initial load of logging config we might not have
# a logger at this point, we might want to handle this better.
try:
with codecs.open(filename, encoding='utf-8') as filehandle:
with io.open(filename, 'rb') as filehandle:
parser.readfp(filehandle)
except IOError:
# TODO: if this is the initial load of logging config we might not
# have a logger at this point, we might want to handle this better.
logger.debug('Config file %s not found; skipping', filename)
continue
except UnicodeDecodeError:
logger.error('Config file %s is not UTF-8 encoded', filename)
sys.exit(1)
raw_config = {}
for section in parser.sections():
@ -84,39 +93,38 @@ def _load(files, defaults, overrides):
return raw_config
def validate(raw_config, schemas, extensions=None):
# Collect config schemas to validate against
extension_schemas = [e.get_config_schema() for e in extensions or []]
config, errors = _validate(raw_config, schemas + extension_schemas)
if errors:
# TODO: raise error instead.
#raise exceptions.ConfigError(errors)
for error in errors:
logger.error(error)
sys.exit(1)
return config
# TODO: replace validate() with this version of API.
def _validate(raw_config, schemas):
# Get validated config
config = {}
errors = []
errors = {}
for schema in schemas:
try:
items = raw_config[schema.name].items()
config[schema.name] = schema.convert(items)
except KeyError:
errors.append('%s: section not found.' % name)
except exceptions.ConfigError as error:
for key in error:
errors.append('%s/%s: %s' % (schema.name, key, error[key]))
# TODO: raise errors instead of return
values = raw_config.get(schema.name, {})
result, error = schema.deserialize(values)
if error:
errors[schema.name] = error
if result:
config[schema.name] = result
return config, errors
def _format(config, comments, schemas, display):
output = []
for schema in schemas:
serialized = schema.serialize(config.get(schema.name, {}), display=display)
if not serialized:
continue
output.append(b'[%s]' % bytes(schema.name))
for key, value in serialized.items():
comment = bytes(comments.get(schema.name, {}).get(key, ''))
output.append(b'%s =' % bytes(key))
if value is not None:
output[-1] += b' ' + value
if comment:
output[-1] += b' # ' + comment.capitalize()
output.append(b'')
return b'\n'.join(output)
def parse_override(override):
"""Parse ``section/key=value`` command line overrides"""
section, remainder = override.split('/', 1)

125
mopidy/config/convert.py Normal file
View File

@ -0,0 +1,125 @@
from __future__ import unicode_literals
import io
import os.path
import sys
from mopidy import config as config_lib, ext
from mopidy.utils import path
def load():
settings_file = path.expand_path('$XDG_CONFIG_DIR/mopidy/settings.py')
print 'Checking %s' % settings_file
setting_globals = {}
try:
execfile(settings_file, setting_globals)
except Exception as e:
print 'Problem loading settings: %s' % e
return setting_globals
def convert(settings):
config = {}
def helper(confval, setting_name):
if settings.get(setting_name) is not None:
section, key = confval.split('/')
config.setdefault(section, {})[key] = settings[setting_name]
# Perform all the simple mappings using our helper:
helper('logging/console_format', 'CONSOLE_LOG_FORMAT')
helper('logging/debug_format', 'DEBUG_LOG_FORMAT')
helper('logging/debug_file', 'DEBUG_LOG_FILENAME')
helper('audio/mixer', 'MIXER')
helper('audio/mixer_track', 'MIXER_TRACK')
helper('audio/output', 'OUTPUT')
helper('proxy/hostname', 'SPOTIFY_PROXY_HOST')
helper('proxy/username', 'SPOTIFY_PROXY_USERNAME')
helper('proxy/password', 'SPOTIFY_PROXY_PASSWORD')
helper('local/media_dir', 'LOCAL_MUSIC_PATH')
helper('local/playlists_dir', 'LOCAL_PLAYLIST_PATH')
helper('local/tag_cache_file', 'LOCAL_TAG_CACHE_FILE')
helper('spotify/username', 'SPOTIFY_USERNAME')
helper('spotify/password', 'SPOTIFY_PASSWORD')
helper('spotify/bitrate', 'SPOTIFY_BITRATE')
helper('spotify/timeout', 'SPOTIFY_TIMEOUT')
helper('spotify/cache_dir', 'SPOTIFY_CACHE_PATH')
helper('stream/protocols', 'STREAM_PROTOCOLS')
helper('http/hostname', 'HTTP_SERVER_HOSTNAME')
helper('http/port', 'HTTP_SERVER_PORT')
helper('http/static_dir', 'HTTP_SERVER_STATIC_DIR')
helper('mpd/hostname', 'MPD_SERVER_HOSTNAME')
helper('mpd/port', 'MPD_SERVER_PORT')
helper('mpd/password', 'MPD_SERVER_PASSWORD')
helper('mpd/max_connections', 'MPD_SERVER_MAX_CONNECTIONS')
helper('mpd/connection_timeout', 'MPD_SERVER_CONNECTION_TIMEOUT')
helper('mpris/desktop_file', 'DESKTOP_FILE')
helper('scrobbler/username', 'LASTFM_USERNAME')
helper('scrobbler/password', 'LASTFM_PASSWORD')
# Assume FRONTENDS/BACKENDS = None implies all enabled, otherwise disable
# if our module path is missing from the setting.
frontends = settings.get('FRONTENDS')
if frontends is not None:
if 'mopidy.frontends.http.HttpFrontend' not in frontends:
config.setdefault('http', {})['enabled'] = False
if 'mopidy.frontends.mpd.MpdFrontend' not in frontends:
config.setdefault('mpd', {})['enabled'] = False
if 'mopidy.frontends.lastfm.LastfmFrontend' not in frontends:
config.setdefault('scrobbler', {})['enabled'] = False
if 'mopidy.frontends.mpris.MprisFrontend' not in frontends:
config.setdefault('mpris', {})['enabled'] = False
backends = settings.get('BACKENDS')
if backends is not None:
if 'mopidy.backends.local.LocalBackend' not in backends:
config.setdefault('local', {})['enabled'] = False
if 'mopidy.backends.spotify.SpotifyBackend' not in backends:
config.setdefault('spotify', {})['enabled'] = False
if 'mopidy.backends.stream.StreamBackend' not in backends:
config.setdefault('stream', {})['enabled'] = False
return config
def main():
settings = load()
if not settings:
return
config = convert(settings)
known = [
'spotify', 'scrobbler', 'mpd', 'mpris', 'local', 'stream', 'http']
extensions = [e for e in ext.load_extensions() if e.ext_name in known]
print b'Converted config:\n'
print config_lib.format(config, extensions)
conf_file = path.expand_path('$XDG_CONFIG_DIR/mopidy/mopidy.conf')
if os.path.exists(conf_file):
print '%s exists, exiting.' % conf_file
sys.exit(1)
print 'Write new config to %s? [yN]' % conf_file,
if raw_input() != 'y':
print 'Not saving, exiting.'
sys.exit(0)
serialized_config = config_lib.format(config, extensions, display=False)
with io.open(conf_file, 'wb') as filehandle:
filehandle.write(serialized_config)
print 'Done.'

View File

@ -3,7 +3,7 @@ console_format = %(levelname)-8s %(message)s
debug_format = %(levelname)-8s %(asctime)s [%(process)d:%(threadName)s] %(name)s\n %(message)s
debug_file = mopidy.log
[logging.levels]
[loglevels]
pykka = info
[audio]

View File

@ -1,8 +1,12 @@
from __future__ import unicode_literals
from mopidy import exceptions
import collections
from mopidy.config import types
# TODO: 2.6 cleanup (#344).
ordered_dict = getattr(collections, 'OrderedDict', dict)
def _did_you_mean(name, choices):
"""Suggest most likely setting based on levenshtein."""
@ -40,9 +44,11 @@ class ConfigSchema(object):
"""Logical group of config values that correspond to a config section.
Schemas are set up by assigning config keys with config values to
instances. Once setup :meth:`convert` can be called with a list of
``(key, value)`` tuples to process. For convienience we also support
:meth:`format` method that can used for printing out the converted values.
instances. Once setup :meth:`deserialize` can be called with a dict of
values to process. For convienience we also support :meth:`format` method
that can used for converting the values to a dict that can be printed and
:meth:`serialize` for converting the values to a form suitable for
persistence.
"""
# TODO: Use collections.OrderedDict once 2.6 support is gone (#344)
def __init__(self, name):
@ -58,43 +64,38 @@ class ConfigSchema(object):
def __getitem__(self, key):
return self._schema[key]
def format(self, values):
"""Returns the schema as a config section with the given ``values``
filled in"""
# TODO: should the output be encoded utf-8 since we use that in
# serialize for strings?
lines = ['[%s]' % self.name]
for key in self._order:
value = values.get(key)
if value is not None:
lines.append('%s = %s' % (
key, self._schema[key].format(value)))
return '\n'.join(lines)
def deserialize(self, values):
"""Validates the given ``values`` using the config schema.
def convert(self, items):
"""Validates the given ``items`` using the config schema and returns
clean values"""
Returns a tuple with cleaned values and errors."""
errors = {}
values = {}
result = {}
for key, value in items:
for key, value in values.items():
try:
values[key] = self._schema[key].deserialize(value)
result[key] = self._schema[key].deserialize(value)
except KeyError: # not in our schema
errors[key] = 'unknown config key.'
suggestion = _did_you_mean(key, self._schema.keys())
if suggestion:
errors[key] += ' Did you mean %s?' % suggestion
except ValueError as e: # deserialization failed
result[key] = None
errors[key] = str(e)
for key in self._schema:
if key not in values and key not in errors:
if key not in result and key not in errors:
result[key] = None
errors[key] = 'config key not found.'
if errors:
raise exceptions.ConfigError(errors)
return values
return result, errors
def serialize(self, values, display=False):
result = ordered_dict() # TODO: 2.6 cleanup (#344).
for key in self._order:
if key in values:
result[key] = self._schema[key].serialize(values[key], display)
return result
class ExtensionConfigSchema(ConfigSchema):
@ -106,6 +107,8 @@ class ExtensionConfigSchema(ConfigSchema):
super(ExtensionConfigSchema, self).__init__(name)
self['enabled'] = types.Boolean()
# TODO: override serialize to gate on enabled=true?
class LogLevelConfigSchema(object):
"""Special cased schema for handling a config section with loglevels.
@ -118,25 +121,20 @@ class LogLevelConfigSchema(object):
self.name = name
self._config_value = types.LogLevel()
def format(self, values):
lines = ['[%s]' % self.name]
for key, value in sorted(values.items()):
if value is not None:
lines.append('%s = %s' % (
key, self._config_value.format(value)))
return '\n'.join(lines)
def convert(self, items):
def deserialize(self, values):
errors = {}
values = {}
result = {}
for key, value in items:
for key, value in values.items():
try:
if value.strip():
values[key] = self._config_value.deserialize(value)
result[key] = self._config_value.deserialize(value)
except ValueError as e: # deserialization failed
result[key] = None
errors[key] = str(e)
return result, errors
if errors:
raise exceptions.ConfigError(errors)
return values
def serialize(self, values, display=False):
result = ordered_dict() # TODO: 2.6 cleanup (#344)
for key in sorted(values.keys()):
result[key] = self._config_value.serialize(values[key], display)
return result

View File

@ -8,6 +8,30 @@ from mopidy.utils import path
from mopidy.config import validators
def decode(value):
if isinstance(value, unicode):
return value
# TODO: only unescape \n \t and \\?
return value.decode('string-escape').decode('utf-8')
def encode(value):
if not isinstance(value, unicode):
return value
for char in ('\\', '\n', '\t'): # TODO: more escapes?
value = value.replace(char, char.encode('unicode-escape'))
return value.encode('utf-8')
class ExpandedPath(bytes):
def __new__(self, value):
expanded = path.expand_path(value)
return super(ExpandedPath, self).__new__(self, expanded)
def __init__(self, value):
self.original = value
class ConfigValue(object):
"""Represents a config key's value and how to handle it.
@ -25,95 +49,90 @@ class ConfigValue(object):
the code interacting with the config should simply skip None config values.
"""
choices = None
"""
Collection of valid choices for converted value. Must be combined with
:func:`~mopidy.config.validators.validate_choice` in :meth:`deserialize`
do any thing.
"""
minimum = None
"""
Minimum of converted value. Must be combined with
:func:`~mopidy.config.validators.validate_minimum` in :meth:`deserialize`
do any thing.
"""
maximum = None
"""
Maximum of converted value. Must be combined with
:func:`~mopidy.config.validators.validate_maximum` in :meth:`deserialize`
do any thing.
"""
optional = None
"""Indicate if this field is required."""
secret = None
"""Indicate if we should mask the when printing for human consumption."""
def __init__(self, **kwargs):
self.choices = kwargs.get('choices')
self.minimum = kwargs.get('minimum')
self.maximum = kwargs.get('maximum')
self.optional = kwargs.get('optional')
self.secret = kwargs.get('secret')
def deserialize(self, value):
"""Cast raw string to appropriate type."""
return value
def serialize(self, value):
def serialize(self, value, display=False):
"""Convert value back to string for saving."""
return str(value)
def format(self, value):
"""Format value for display."""
if self.secret and value is not None:
return '********'
return self.serialize(value)
if value is None:
return b''
return bytes(value)
class String(ConfigValue):
"""String value
"""String value.
Supported kwargs: ``optional``, ``choices``, and ``secret``.
Is decoded as utf-8 and \\n \\t escapes should work and be preserved.
"""
def __init__(self, optional=False, choices=None):
self._required = not optional
self._choices = choices
def deserialize(self, value):
value = value.strip()
validators.validate_required(value, not self.optional)
validators.validate_choice(value, self.choices)
value = decode(value).strip()
validators.validate_required(value, self._required)
validators.validate_choice(value, self._choices)
if not value:
return None
return value
def serialize(self, value):
return value.encode('utf-8').encode('string-escape')
def serialize(self, value, display=False):
if value is None:
return b''
return encode(value)
class Secret(ConfigValue):
"""Secret value.
Should be used for passwords, auth tokens etc. Deserializing will not
convert to unicode. Will mask value when being displayed.
"""
def __init__(self, optional=False, choices=None):
self._required = not optional
def deserialize(self, value):
value = value.strip()
validators.validate_required(value, self._required)
if not value:
return None
return value
def serialize(self, value, display=False):
if isinstance(value, unicode):
value = value.encode('utf-8')
if value is None:
return b''
elif display:
return b'********'
return value
class Integer(ConfigValue):
"""Integer value
"""Integer value."""
def __init__(self, minimum=None, maximum=None, choices=None):
self._minimum = minimum
self._maximum = maximum
self._choices = choices
Supported kwargs: ``choices``, ``minimum``, ``maximum``, and ``secret``
"""
def deserialize(self, value):
value = int(value)
validators.validate_choice(value, self.choices)
validators.validate_minimum(value, self.minimum)
validators.validate_maximum(value, self.maximum)
validators.validate_choice(value, self._choices)
validators.validate_minimum(value, self._minimum)
validators.validate_maximum(value, self._maximum)
return value
class Boolean(ConfigValue):
"""Boolean value
"""Boolean value.
Accepts ``1``, ``yes``, ``true``, and ``on`` with any casing as
:class:`True`.
Accepts ``0``, ``no``, ``false``, and ``off`` with any casing as
:class:`False`.
Supported kwargs: ``secret``
"""
true_values = ('1', 'yes', 'true', 'on')
false_values = ('0', 'no', 'false', 'off')
@ -123,66 +142,71 @@ class Boolean(ConfigValue):
return True
elif value.lower() in self.false_values:
return False
raise ValueError('invalid value for boolean: %r' % value)
def serialize(self, value):
def serialize(self, value, display=False):
if value:
return 'true'
return b'true'
else:
return 'false'
return b'false'
class List(ConfigValue):
"""List value
"""List value.
Supports elements split by commas or newlines.
Supported kwargs: ``optional`` and ``secret``
Supports elements split by commas or newlines. Newlines take presedence and
empty list items will be filtered out.
"""
def deserialize(self, value):
validators.validate_required(value, not self.optional)
if '\n' in value:
values = re.split(r'\s*\n\s*', value.strip())
else:
values = re.split(r'\s*,\s*', value.strip())
return tuple([v for v in values if v])
def __init__(self, optional=False):
self._required = not optional
def serialize(self, value):
return '\n ' + '\n '.join(v.encode('utf-8') for v in value)
def deserialize(self, value):
if b'\n' in value:
values = re.split(r'\s*\n\s*', value)
else:
values = re.split(r'\s*,\s*', value)
values = (decode(v).strip() for v in values)
values = filter(None, values)
validators.validate_required(values, self._required)
return tuple(values)
def serialize(self, value, display=False):
return b'\n ' + b'\n '.join(encode(v) for v in value if v)
class LogLevel(ConfigValue):
"""Log level value
"""Log level value.
Expects one of ``critical``, ``error``, ``warning``, ``info``, ``debug``
with any casing.
Supported kwargs: ``secret``
"""
levels = {
'critical': logging.CRITICAL,
'error': logging.ERROR,
'warning': logging.WARNING,
'info': logging.INFO,
'debug': logging.DEBUG,
b'critical': logging.CRITICAL,
b'error': logging.ERROR,
b'warning': logging.WARNING,
b'info': logging.INFO,
b'debug': logging.DEBUG,
}
def deserialize(self, value):
validators.validate_choice(value.lower(), self.levels.keys())
return self.levels.get(value.lower())
def serialize(self, value):
return dict((v, k) for k, v in self.levels.items()).get(value)
def serialize(self, value, display=False):
lookup = dict((v, k) for k, v in self.levels.items())
if value in lookup:
return lookup[value]
return b''
class Hostname(ConfigValue):
"""Hostname value
"""Network hostname value."""
Supported kwargs: ``optional`` and ``secret``
"""
def deserialize(self, value):
validators.validate_required(value, not self.optional)
def __init__(self, optional=False):
self._required = not optional
def deserialize(self, value, display=False):
validators.validate_required(value, self._required)
if not value.strip():
return None
try:
@ -193,26 +217,14 @@ class Hostname(ConfigValue):
class Port(Integer):
"""Port value
"""Network port value.
Expects integer in the range 1-65535
Supported kwargs: ``choices`` and ``secret``
Expects integer in the range 0-65535, zero tells the kernel to simply
allocate a port for us.
"""
# TODO: consider probing if port is free or not?
def __init__(self, **kwargs):
super(Port, self).__init__(**kwargs)
self.minimum = 1
self.maximum = 2 ** 16 - 1
class ExpandedPath(bytes):
def __new__(self, value):
expanded = path.expand_path(value)
return super(ExpandedPath, self).__new__(self, expanded)
def __init__(self, value):
self.original = value
def __init__(self, choices=None):
super(Port, self).__init__(minimum=0, maximum=2**16-1, choices=choices)
class Path(ConfigValue):
@ -232,15 +244,19 @@ class Path(ConfigValue):
Supported kwargs: ``optional``, ``choices``, and ``secret``
"""
def __init__(self, optional=False, choices=None):
self._required = not optional
self._choices = choices
def deserialize(self, value):
value = value.strip()
validators.validate_required(value, not self.optional)
validators.validate_choice(value, self.choices)
validators.validate_required(value, self._required)
validators.validate_choice(value, self._choices)
if not value:
return None
return ExpandedPath(value)
def serialize(self, value):
def serialize(self, value, display=False):
if isinstance(value, ExpandedPath):
return value.original
return value

View File

@ -9,7 +9,7 @@ def validate_required(value, required):
Normally called in :meth:`~mopidy.config.types.ConfigValue.deserialize` on
the raw string, _not_ the converted value.
"""
if required and not value.strip():
if required and not value:
raise ValueError('must be set.')

View File

@ -16,23 +16,5 @@ class MopidyException(Exception):
self._message = message
class ConfigError(MopidyException):
def __init__(self, errors):
self._errors = errors
def __getitem__(self, key):
return self._errors[key]
def __iter__(self):
return self._errors.iterkeys()
@property
def message(self):
lines = []
for key, msg in self._errors.items():
lines.append('%s: %s' % (key, msg))
return '\n'.join(lines)
class ExtensionError(MopidyException):
pass

View File

@ -34,9 +34,9 @@ class Extension(object):
"""
def get_default_config(self):
"""The extension's default config as a string
"""The extension's default config as a bytestring
:returns: string
:returns: bytes
"""
raise NotImplementedError(
'Add at least a config section with "enabled = true"')
@ -91,61 +91,68 @@ class Extension(object):
def load_extensions():
extensions = []
"""Find all installed extensions.
:returns: list of installed extensions
"""
installed_extensions = []
for entry_point in pkg_resources.iter_entry_points('mopidy.ext'):
logger.debug('Loading entry point: %s', entry_point)
try:
extension_class = entry_point.load()
except pkg_resources.DistributionNotFound as ex:
logger.info(
'Disabled extension %s: Dependency %s not found',
entry_point.name, ex)
continue
extension_class = entry_point.load(require=False)
extension = extension_class()
extension.entry_point = entry_point
installed_extensions.append(extension)
logger.debug(
'Loaded extension: %s %s', extension.dist_name, extension.version)
if entry_point.name != extension.ext_name:
logger.warning(
'Disabled extension %(ep)s: entry point name (%(ep)s) '
'does not match extension name (%(ext)s)',
{'ep': entry_point.name, 'ext': extension.ext_name})
continue
try:
extension.validate_environment()
except exceptions.ExtensionError as ex:
logger.info(
'Disabled extension %s: %s', entry_point.name, ex.message)
continue
extensions.append(extension)
names = (e.ext_name for e in extensions)
names = (e.ext_name for e in installed_extensions)
logging.debug('Discovered extensions: %s', ', '.join(names))
return extensions
return installed_extensions
def filter_enabled_extensions(raw_config, extensions):
boolean = config_lib.Boolean()
enabled_extensions = []
enabled_names = []
disabled_names = []
def validate_extension(extension):
"""Verify extension's dependencies and environment.
for extension in extensions:
# TODO: handle key and value errors.
enabled = raw_config[extension.ext_name]['enabled']
if boolean.deserialize(enabled):
enabled_extensions.append(extension)
enabled_names.append(extension.ext_name)
else:
disabled_names.append(extension.ext_name)
:param extensions: an extension to check
:returns: if extension should be run
"""
logging.info(
'Enabled extensions: %s', ', '.join(enabled_names) or 'none')
logging.info(
'Disabled extensions: %s', ', '.join(disabled_names) or 'none')
return enabled_extensions
logger.debug('Validating extension: %s', extension.ext_name)
if extension.ext_name != extension.entry_point.name:
logger.warning(
'Disabled extension %(ep)s: entry point name (%(ep)s) '
'does not match extension name (%(ext)s)',
{'ep': extension.entry_point.name, 'ext': extension.ext_name})
return False
try:
extension.entry_point.require()
except pkg_resources.DistributionNotFound as ex:
logger.info(
'Disabled extension %s: Dependency %s not found',
extension.ext_name, ex)
return False
try:
extension.validate_environment()
except exceptions.ExtensionError as ex:
logger.info(
'Disabled extension %s: %s', extension.ext_name, ex.message)
return False
return True
def register_gstreamer_elements(enabled_extensions):
"""Registers custom GStreamer elements from extensions.
:params enabled_extensions: list of enabled extensions
"""
for extension in enabled_extensions:
logger.debug(
'Registering GStreamer elements for: %s', extension.ext_name)
extension.register_gstreamer_elements()

View File

@ -4,5 +4,5 @@ hostname = 127.0.0.1
port = 6680
static_dir =
[logging.levels]
[loglevels]
cherrypy = warning

View File

@ -20,7 +20,7 @@ class Extension(ext.Extension):
schema = super(Extension, self).get_config_schema()
schema['hostname'] = config.Hostname()
schema['port'] = config.Port()
schema['password'] = config.String(optional=True, secret=True)
schema['password'] = config.Secret(optional=True)
schema['max_connections'] = config.Integer(minimum=1)
schema['connection_timeout'] = config.Integer(minimum=1)
return schema

View File

@ -19,7 +19,7 @@ class Extension(ext.Extension):
def get_config_schema(self):
schema = super(Extension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.String(secret=True)
schema['password'] = config.Secret()
return schema
def validate_environment(self):

View File

@ -48,17 +48,17 @@ def main():
config_overrides = []
# TODO: decide if we want to avoid this boilerplate some how.
logging_config = config_lib.load(config_files, config_overrides)
# Initial config without extensions to bootstrap logging.
logging_config, _ = config_lib.load(config_files, [], config_overrides)
log.setup_root_logger()
log.setup_console_logging(logging_config, options.verbosity_level)
extensions = ext.load_extensions()
raw_config = config_lib.load(config_files, config_overrides, extensions)
extensions = ext.filter_enabled_extensions(raw_config, extensions)
config = config_lib.validate(
raw_config, config_lib.core_schemas, extensions)
config, errors = config_lib.load(config_files, extensions, config_overrides)
log.setup_log_levels(config)
# TODO: missing error checking and other default setup code.
tracks = []
def store(data):

View File

@ -1,5 +1,6 @@
from __future__ import unicode_literals
import functools
import os
import platform
import sys
@ -8,16 +9,16 @@ import pygst
pygst.require('0.10')
import gst
import pykka
import pkg_resources
from . import formatting
def list_deps_optparse_callback(*args):
def show_deps_optparse_callback(*args):
"""
Prints a list of all dependencies.
Called by optparse when Mopidy is run with the :option:`--list-deps`
Called by optparse when Mopidy is run with the :option:`--show-deps`
option.
"""
print format_dependency_list()
@ -26,32 +27,47 @@ def list_deps_optparse_callback(*args):
def format_dependency_list(adapters=None):
if adapters is None:
dist_names = set([
ep.dist.project_name for ep in
pkg_resources.iter_entry_points('mopidy.ext')
if ep.dist.project_name != 'Mopidy'])
dist_infos = [
functools.partial(pkg_info, dist_name)
for dist_name in dist_names]
adapters = [
platform_info,
python_info,
functools.partial(pkg_info, 'Mopidy', True)
] + dist_infos + [
gstreamer_info,
pykka_info,
pyspotify_info,
pylast_info,
dbus_info,
serial_info,
cherrypy_info,
ws4py_info,
]
return '\n'.join([_format_dependency(a()) for a in adapters])
def _format_dependency(dep_info):
lines = []
for adapter in adapters:
dep_info = adapter()
lines.append('%(name)s: %(version)s' % {
'name': dep_info['name'],
'version': dep_info.get('version', 'not found'),
})
if 'path' in dep_info:
lines.append(' Imported from: %s' % (
os.path.dirname(dep_info['path'])))
if 'other' in dep_info:
lines.append(' Other: %s' % (
formatting.indent(dep_info['other'])),)
if 'version' not in dep_info:
lines.append('%s: not found' % dep_info['name'])
else:
lines.append('%s: %s from %s' % (
dep_info['name'],
dep_info['version'],
os.path.dirname(dep_info.get('path', 'none')),
))
if 'other' in dep_info:
lines.append(' Detailed information: %s' % (
formatting.indent(dep_info['other'], places=4)),)
if dep_info.get('dependencies', []):
for sub_dep_info in dep_info['dependencies']:
sub_dep_lines = _format_dependency(sub_dep_info)
lines.append(
formatting.indent(sub_dep_lines, places=2, singles=True))
return '\n'.join(lines)
@ -71,13 +87,46 @@ def python_info():
}
def pkg_info(project_name=None, include_extras=False):
if project_name is None:
project_name = 'Mopidy'
distribution = pkg_resources.get_distribution(project_name)
extras = include_extras and distribution.extras or []
dependencies = [
pkg_info(d) for d in distribution.requires(extras)]
return {
'name': distribution.project_name,
'version': distribution.version,
'path': distribution.location,
'dependencies': dependencies,
}
def gstreamer_info():
other = []
other.append('Python wrapper: gst-python %s' % (
'.'.join(map(str, gst.get_pygst_version()))))
other.append('Relevant elements:')
found_elements = []
missing_elements = []
for name, status in _gstreamer_check_elements():
other.append(' %s: %s' % (name, 'OK' if status else 'not found'))
if status:
found_elements.append(name)
else:
missing_elements.append(name)
other.append('Relevant elements:')
other.append(' Found:')
for element in found_elements:
other.append(' %s' % element)
if not found_elements:
other.append(' none')
other.append(' Not found:')
for element in missing_elements:
other.append(' %s' % element)
if not missing_elements:
other.append(' none')
return {
'name': 'GStreamer',
'version': '.'.join(map(str, gst.get_gst_version())),
@ -134,82 +183,3 @@ def _gstreamer_check_elements():
gst.registry_get_default().get_feature_list(gst.TYPE_ELEMENT_FACTORY)]
return [
(element, element in known_elements) for element in elements_to_check]
def pykka_info():
return {
'name': 'Pykka',
'version': pykka.__version__,
'path': pykka.__file__,
}
def pyspotify_info():
dep_info = {'name': 'pyspotify'}
try:
import spotify
if hasattr(spotify, '__version__'):
dep_info['version'] = spotify.__version__
else:
dep_info['version'] = '< 1.3'
dep_info['path'] = spotify.__file__
dep_info['other'] = 'Built for libspotify API version %d' % (
spotify.api_version,)
except ImportError:
pass
return dep_info
def pylast_info():
dep_info = {'name': 'pylast'}
try:
import pylast
dep_info['version'] = pylast.__version__
dep_info['path'] = pylast.__file__
except ImportError:
pass
return dep_info
def dbus_info():
dep_info = {'name': 'dbus-python'}
try:
import dbus
dep_info['version'] = dbus.__version__
dep_info['path'] = dbus.__file__
except ImportError:
pass
return dep_info
def serial_info():
dep_info = {'name': 'pyserial'}
try:
import serial
dep_info['version'] = serial.VERSION
dep_info['path'] = serial.__file__
except ImportError:
pass
return dep_info
def cherrypy_info():
dep_info = {'name': 'cherrypy'}
try:
import cherrypy
dep_info['version'] = cherrypy.__version__
dep_info['path'] = cherrypy.__file__
except ImportError:
pass
return dep_info
def ws4py_info():
dep_info = {'name': 'ws4py'}
try:
import ws4py
dep_info['version'] = ws4py.__version__
dep_info['path'] = ws4py.__file__
except ImportError:
pass
return dep_info

View File

@ -4,13 +4,15 @@ import re
import unicodedata
def indent(string, places=4, linebreak='\n'):
def indent(string, places=4, linebreak='\n', singles=False):
lines = string.split(linebreak)
if len(lines) == 1:
if not singles and len(lines) == 1:
return string
result = ''
for line in lines:
result += linebreak + ' ' * places + line
for i, line in enumerate(lines):
lines[i] = ' ' * places + line
result = linebreak.join(lines)
if not singles:
result = linebreak + result
return result

View File

@ -20,7 +20,7 @@ def setup_logging(config, verbosity_level, save_debug_log):
def setup_log_levels(config):
for name, level in config['logging.levels'].items():
for name, level in config['loglevels'].items():
logging.getLogger(name).setLevel(level)

View File

@ -8,12 +8,8 @@ import threading
from pykka import ActorDeadError
from pykka.registry import ActorRegistry
from mopidy import exceptions
logger = logging.getLogger('mopidy.utils.process')
SIGNALS = dict(
(k, v) for v, k in signal.__dict__.iteritems()
if v.startswith('SIG') and not v.startswith('SIG_'))

View File

@ -31,7 +31,6 @@ setup(
'spotify': ['pyspotify >= 1.9, < 1.11'],
'scrobbler': ['pylast >= 0.5.7'],
'http': ['cherrypy >= 3.2.2', 'ws4py >= 0.2.3'],
'external_mixers': ['pyserial'],
},
test_suite='nose.collector',
tests_require=[
@ -43,6 +42,7 @@ setup(
'console_scripts': [
'mopidy = mopidy.__main__:main',
'mopidy-scan = mopidy.scanner:main',
'mopidy-convert-config = mopidy.config.convert:main',
],
'mopidy.ext': [
'http = mopidy.frontends.http:Extension [http]',

0
tests/config/__init__.py Normal file
View File

View File

@ -2,7 +2,7 @@ from __future__ import unicode_literals
import mock
from mopidy import config, exceptions
from mopidy import config
from tests import unittest, path_to_data_dir
@ -12,14 +12,14 @@ class LoadConfigTest(unittest.TestCase):
self.assertEqual({}, config._load([], [], []))
def test_load_single_default(self):
default = '[foo]\nbar = baz'
default = b'[foo]\nbar = baz'
expected = {'foo': {'bar': 'baz'}}
result = config._load([], [default], [])
self.assertEqual(expected, result)
def test_load_defaults(self):
default1 = '[foo]\nbar = baz'
default2 = '[foo2]\n'
default1 = b'[foo]\nbar = baz'
default2 = b'[foo2]\n'
expected = {'foo': {'bar': 'baz'}, 'foo2': {}}
result = config._load([], [default1, default2], [])
self.assertEqual(expected, result)
@ -53,38 +53,38 @@ class LoadConfigTest(unittest.TestCase):
class ValidateTest(unittest.TestCase):
def setUp(self):
self.schema = mock.Mock()
self.schema.name = 'foo'
self.schema = config.ConfigSchema('foo')
self.schema['bar'] = config.ConfigValue()
def test_empty_config_no_schemas(self):
conf, errors = config._validate({}, [])
self.assertEqual({}, conf)
self.assertEqual([], errors)
self.assertEqual({}, errors)
def test_config_no_schemas(self):
raw_config = {'foo': {'bar': 'baz'}}
conf, errors = config._validate(raw_config, [])
self.assertEqual({}, conf)
self.assertEqual([], errors)
self.assertEqual({}, errors)
def test_empty_config_single_schema(self):
conf, errors = config._validate({}, [self.schema])
self.assertEqual({}, conf)
self.assertEqual(['foo: section not found.'], errors)
self.assertEqual({'foo': {'bar': None}}, conf)
self.assertEqual({'foo': {'bar': 'config key not found.'}}, errors)
def test_config_single_schema(self):
raw_config = {'foo': {'bar': 'baz'}}
self.schema.convert.return_value = {'baz': 'bar'}
conf, errors = config._validate(raw_config, [self.schema])
self.assertEqual({'foo': {'baz': 'bar'}}, conf)
self.assertEqual([], errors)
self.assertEqual({'foo': {'bar': 'baz'}}, conf)
self.assertEqual({}, errors)
def test_config_single_schema_config_error(self):
raw_config = {'foo': {'bar': 'baz'}}
self.schema.convert.side_effect = exceptions.ConfigError({'bar': 'bad'})
self.schema['bar'] = mock.Mock()
self.schema['bar'].deserialize.side_effect = ValueError('bad')
conf, errors = config._validate(raw_config, [self.schema])
self.assertEqual(['foo/bar: bad'], errors)
self.assertEqual({}, conf)
self.assertEqual({'foo': {'bar': None}}, conf)
self.assertEqual({'foo': {'bar': 'bad'}}, errors)
# TODO: add more tests

View File

@ -3,10 +3,9 @@ from __future__ import unicode_literals
import logging
import mock
from mopidy import exceptions
from mopidy.config import schemas, types
from tests import unittest
from tests import unittest, any_unicode
class ConfigSchemaTest(unittest.TestCase):
@ -17,73 +16,65 @@ class ConfigSchemaTest(unittest.TestCase):
self.schema['baz'] = mock.Mock()
self.values = {'bar': '123', 'foo': '456', 'baz': '678'}
def test_format(self):
self.schema['foo'].format.return_value = 'qwe'
self.schema['bar'].format.return_value = 'asd'
self.schema['baz'].format.return_value = 'zxc'
def test_deserialize(self):
self.schema.deserialize(self.values)
expected = ['[test]', 'foo = qwe', 'bar = asd', 'baz = zxc']
result = self.schema.format(self.values)
self.assertEqual('\n'.join(expected), result)
def test_format_unkwown_value(self):
self.schema['foo'].format.return_value = 'qwe'
self.schema['bar'].format.return_value = 'asd'
self.schema['baz'].format.return_value = 'zxc'
self.values['unknown'] = 'rty'
result = self.schema.format(self.values)
self.assertNotIn('unknown = rty', result)
def test_convert(self):
self.schema.convert(self.values.items())
def test_convert_with_missing_value(self):
def test_deserialize_with_missing_value(self):
del self.values['foo']
with self.assertRaises(exceptions.ConfigError) as cm:
self.schema.convert(self.values.items())
result, errors = self.schema.deserialize(self.values)
self.assertEqual({'foo': any_unicode}, errors)
self.assertIsNone(result.pop('foo'))
self.assertIsNotNone(result.pop('bar'))
self.assertIsNotNone(result.pop('baz'))
self.assertEqual({}, result)
self.assertIn('not found', cm.exception['foo'])
def test_convert_with_extra_value(self):
def test_deserialize_with_extra_value(self):
self.values['extra'] = '123'
with self.assertRaises(exceptions.ConfigError) as cm:
self.schema.convert(self.values.items())
result, errors = self.schema.deserialize(self.values)
self.assertEqual({'extra': any_unicode}, errors)
self.assertIsNotNone(result.pop('foo'))
self.assertIsNotNone(result.pop('bar'))
self.assertIsNotNone(result.pop('baz'))
self.assertEqual({}, result)
self.assertIn('unknown', cm.exception['extra'])
def test_convert_with_deserialization_error(self):
def test_deserialize_with_deserialization_error(self):
self.schema['foo'].deserialize.side_effect = ValueError('failure')
with self.assertRaises(exceptions.ConfigError) as cm:
self.schema.convert(self.values.items())
result, errors = self.schema.deserialize(self.values)
self.assertEqual({'foo': 'failure'}, errors)
self.assertIsNone(result.pop('foo'))
self.assertIsNotNone(result.pop('bar'))
self.assertIsNotNone(result.pop('baz'))
self.assertEqual({}, result)
self.assertIn('failure', cm.exception['foo'])
def test_convert_with_multiple_deserialization_errors(self):
def test_deserialize_with_multiple_deserialization_errors(self):
self.schema['foo'].deserialize.side_effect = ValueError('failure')
self.schema['bar'].deserialize.side_effect = ValueError('other')
with self.assertRaises(exceptions.ConfigError) as cm:
self.schema.convert(self.values.items())
result, errors = self.schema.deserialize(self.values)
self.assertEqual({'foo': 'failure', 'bar': 'other'}, errors)
self.assertIsNone(result.pop('foo'))
self.assertIsNone(result.pop('bar'))
self.assertIsNotNone(result.pop('baz'))
self.assertEqual({}, result)
self.assertIn('failure', cm.exception['foo'])
self.assertIn('other', cm.exception['bar'])
def test_convert_deserialization_unknown_and_missing_errors(self):
def test_deserialize_deserialization_unknown_and_missing_errors(self):
self.values['extra'] = '123'
self.schema['bar'].deserialize.side_effect = ValueError('failure')
del self.values['baz']
with self.assertRaises(exceptions.ConfigError) as cm:
self.schema.convert(self.values.items())
result, errors = self.schema.deserialize(self.values)
self.assertIn('unknown', errors['extra'])
self.assertNotIn('foo', errors)
self.assertIn('failure', errors['bar'])
self.assertIn('not found', errors['baz'])
self.assertIn('unknown', cm.exception['extra'])
self.assertNotIn('foo', cm.exception)
self.assertIn('failure', cm.exception['bar'])
self.assertIn('not found', cm.exception['baz'])
self.assertNotIn('unknown', result)
self.assertIn('foo', result)
self.assertIsNone(result['bar'])
self.assertIsNone(result['baz'])
class ExtensionConfigSchemaTest(unittest.TestCase):
@ -94,19 +85,12 @@ class ExtensionConfigSchemaTest(unittest.TestCase):
class LogLevelConfigSchemaTest(unittest.TestCase):
def test_conversion(self):
schema = schemas.LogLevelConfigSchema()
result = schema.convert([('foo.bar', 'DEBUG'), ('baz', 'INFO')])
schema = schemas.LogLevelConfigSchema('test')
result, errors = schema.deserialize({'foo.bar': 'DEBUG', 'baz': 'INFO'})
self.assertEqual(logging.DEBUG, result['foo.bar'])
self.assertEqual(logging.INFO, result['baz'])
def test_format(self):
schema = schemas.LogLevelConfigSchema('test')
values = {'foo.bar': logging.DEBUG, 'baz': logging.INFO}
expected = ['[test]', 'baz = info', 'foo.bar = debug']
result = schema.format(values)
self.assertEqual('\n'.join(expected), result)
class DidYouMeanTest(unittest.TestCase):
def testSuggestoins(self):

View File

@ -1,3 +1,5 @@
# encoding: utf-8
from __future__ import unicode_literals
import logging
@ -8,72 +10,129 @@ from mopidy.config import types
from tests import unittest
# TODO: DecodeTest and EncodeTest
class ConfigValueTest(unittest.TestCase):
def test_init(self):
value = types.ConfigValue()
self.assertIsNone(value.choices)
self.assertIsNone(value.maximum)
self.assertIsNone(value.minimum)
self.assertIsNone(value.optional)
self.assertIsNone(value.secret)
def test_init_with_params(self):
kwargs = {'choices': ['foo'], 'minimum': 0, 'maximum': 10,
'secret': True, 'optional': True}
value = types.ConfigValue(**kwargs)
self.assertEqual(['foo'], value.choices)
self.assertEqual(0, value.minimum)
self.assertEqual(10, value.maximum)
self.assertEqual(True, value.optional)
self.assertEqual(True, value.secret)
def test_deserialize_passes_through(self):
value = types.ConfigValue()
obj = object()
self.assertEqual(obj, value.deserialize(obj))
sentinel = object()
self.assertEqual(sentinel, value.deserialize(sentinel))
def test_serialize_conversion_to_string(self):
value = types.ConfigValue()
self.assertIsInstance(value.serialize(object()), basestring)
self.assertIsInstance(value.serialize(object()), bytes)
def test_format_uses_serialize(self):
def test_serialize_none(self):
value = types.ConfigValue()
obj = object()
self.assertEqual(value.serialize(obj), value.format(obj))
result = value.serialize(None)
self.assertIsInstance(result, bytes)
self.assertEqual(b'', result)
def test_format_masks_secrets(self):
value = types.ConfigValue(secret=True)
self.assertEqual('********', value.format(object()))
def test_serialize_supports_display(self):
value = types.ConfigValue()
self.assertIsInstance(value.serialize(object(), display=True), bytes)
class StringTest(unittest.TestCase):
def test_deserialize_conversion_success(self):
value = types.String()
self.assertEqual('foo', value.deserialize(' foo '))
self.assertEqual('foo', value.deserialize(b' foo '))
self.assertIsInstance(value.deserialize(b'foo'), unicode)
def test_deserialize_decodes_utf8(self):
value = types.String()
result = value.deserialize('æøå'.encode('utf-8'))
self.assertEqual('æøå', result)
def test_deserialize_does_not_double_encode_unicode(self):
value = types.String()
result = value.deserialize('æøå')
self.assertEqual('æøå', result)
def test_deserialize_handles_escapes(self):
value = types.String(optional=True)
result = value.deserialize(b'a\\t\\nb')
self.assertEqual('a\t\nb', result)
def test_deserialize_enforces_choices(self):
value = types.String(choices=['foo', 'bar', 'baz'])
self.assertEqual('foo', value.deserialize('foo'))
self.assertRaises(ValueError, value.deserialize, 'foobar')
self.assertEqual('foo', value.deserialize(b'foo'))
self.assertRaises(ValueError, value.deserialize, b'foobar')
def test_deserialize_enforces_required(self):
value = types.String()
self.assertRaises(ValueError, value.deserialize, '')
self.assertRaises(ValueError, value.deserialize, ' ')
self.assertRaises(ValueError, value.deserialize, b'')
def test_deserialize_respects_optional(self):
value = types.String(optional=True)
self.assertIsNone(value.deserialize(''))
self.assertIsNone(value.deserialize(' '))
self.assertIsNone(value.deserialize(b''))
self.assertIsNone(value.deserialize(b' '))
def test_serialize_string_escapes(self):
def test_deserialize_decode_failure(self):
value = types.String()
self.assertEqual(r'\r\n\t', value.serialize('\r\n\t'))
incorrectly_encoded_bytes = u'æøå'.encode('iso-8859-1')
self.assertRaises(
ValueError, value.deserialize, incorrectly_encoded_bytes)
def test_format_masks_secrets(self):
value = types.String(secret=True)
self.assertEqual('********', value.format('s3cret'))
def test_serialize_encodes_utf8(self):
value = types.String()
result = value.serialize('æøå')
self.assertIsInstance(result, bytes)
self.assertEqual('æøå'.encode('utf-8'), result)
def test_serialize_does_not_encode_bytes(self):
value = types.String()
result = value.serialize('æøå'.encode('utf-8'))
self.assertIsInstance(result, bytes)
self.assertEqual('æøå'.encode('utf-8'), result)
def test_serialize_handles_escapes(self):
value = types.String()
result = value.serialize('a\n\tb')
self.assertIsInstance(result, bytes)
self.assertEqual(r'a\n\tb'.encode('utf-8'), result)
def test_serialize_none(self):
value = types.String()
result = value.serialize(None)
self.assertIsInstance(result, bytes)
self.assertEqual(b'', result)
class SecretTest(unittest.TestCase):
def test_deserialize_passes_through(self):
value = types.Secret()
result = value.deserialize(b'foo')
self.assertIsInstance(result, bytes)
self.assertEqual(b'foo', result)
def test_deserialize_enforces_required(self):
value = types.Secret()
self.assertRaises(ValueError, value.deserialize, b'')
def test_deserialize_respects_optional(self):
value = types.Secret(optional=True)
self.assertIsNone(value.deserialize(b''))
self.assertIsNone(value.deserialize(b' '))
def test_serialize_none(self):
value = types.Secret()
result = value.serialize(None)
self.assertIsInstance(result, bytes)
self.assertEqual(b'', result)
def test_serialize_for_display_masks_value(self):
value = types.Secret()
result = value.serialize('s3cret', display=True)
self.assertIsInstance(result, bytes)
self.assertEqual(b'********', result)
def test_serialize_none_for_display(self):
value = types.Secret()
result = value.serialize(None, display=True)
self.assertIsInstance(result, bytes)
self.assertEqual(b'', result)
class IntegerTest(unittest.TestCase):
@ -105,10 +164,6 @@ class IntegerTest(unittest.TestCase):
self.assertEqual(5, value.deserialize('5'))
self.assertRaises(ValueError, value.deserialize, '15')
def test_format_masks_secrets(self):
value = types.Integer(secret=True)
self.assertEqual('********', value.format('1337'))
class BooleanTest(unittest.TestCase):
def test_deserialize_conversion_success(self):
@ -128,39 +183,70 @@ class BooleanTest(unittest.TestCase):
self.assertRaises(ValueError, value.deserialize, 'sure')
self.assertRaises(ValueError, value.deserialize, '')
def test_serialize(self):
def test_serialize_true(self):
value = types.Boolean()
self.assertEqual('true', value.serialize(True))
self.assertEqual('false', value.serialize(False))
result = value.serialize(True)
self.assertEqual(b'true', result)
self.assertIsInstance(result, bytes)
def test_format_masks_secrets(self):
value = types.Boolean(secret=True)
self.assertEqual('********', value.format('true'))
def test_serialize_false(self):
value = types.Boolean()
result = value.serialize(False)
self.assertEqual(b'false', result)
self.assertIsInstance(result, bytes)
# TODO: test None or other invalid values into serialize?
class ListTest(unittest.TestCase):
# TODO: add test_deserialize_ignores_blank
# TODO: add test_serialize_ignores_blank
# TODO: add test_deserialize_handles_escapes
def test_deserialize_conversion_success(self):
value = types.List()
expected = ('foo', 'bar', 'baz')
self.assertEqual(expected, value.deserialize('foo, bar ,baz '))
self.assertEqual(expected, value.deserialize(b'foo, bar ,baz '))
expected = ('foo,bar', 'bar', 'baz')
self.assertEqual(expected, value.deserialize(' foo,bar\nbar\nbaz'))
self.assertEqual(expected, value.deserialize(b' foo,bar\nbar\nbaz'))
def test_deserialize_creates_tuples(self):
value = types.List(optional=True)
self.assertIsInstance(value.deserialize(b'foo,bar,baz'), tuple)
self.assertIsInstance(value.deserialize(b''), tuple)
def test_deserialize_decodes_utf8(self):
value = types.List()
result = value.deserialize('æ, ø, å'.encode('utf-8'))
self.assertEqual(('æ', 'ø', 'å'), result)
result = value.deserialize('æ\nø\nå'.encode('utf-8'))
self.assertEqual(('æ', 'ø', 'å'), result)
def test_deserialize_does_not_double_encode_unicode(self):
value = types.List()
result = value.deserialize('æ, ø, å')
self.assertEqual(('æ', 'ø', 'å'), result)
result = value.deserialize('æ\nø\nå')
self.assertEqual(('æ', 'ø', 'å'), result)
def test_deserialize_enforces_required(self):
value = types.List()
self.assertRaises(ValueError, value.deserialize, '')
self.assertRaises(ValueError, value.deserialize, ' ')
self.assertRaises(ValueError, value.deserialize, b'')
def test_deserialize_respects_optional(self):
value = types.List(optional=True)
self.assertEqual(tuple(), value.deserialize(''))
self.assertEqual(tuple(), value.deserialize(' '))
self.assertEqual(tuple(), value.deserialize(b''))
def test_serialize(self):
value = types.List()
result = value.serialize(('foo', 'bar', 'baz'))
self.assertIsInstance(result, bytes)
self.assertRegexpMatches(result, r'foo\n\s*bar\n\s*baz')
@ -189,7 +275,7 @@ class LogLevelTest(unittest.TestCase):
value = types.LogLevel()
for name, level in self.levels.items():
self.assertEqual(name, value.serialize(level))
self.assertIsNone(value.serialize(1337))
self.assertEqual(b'', value.serialize(1337))
class HostnameTest(unittest.TestCase):
@ -209,7 +295,6 @@ class HostnameTest(unittest.TestCase):
def test_deserialize_enforces_required(self, getaddrinfo_mock):
value = types.Hostname()
self.assertRaises(ValueError, value.deserialize, '')
self.assertRaises(ValueError, value.deserialize, ' ')
self.assertEqual(0, getaddrinfo_mock.call_count)
@mock.patch('socket.getaddrinfo')
@ -223,6 +308,7 @@ class HostnameTest(unittest.TestCase):
class PortTest(unittest.TestCase):
def test_valid_ports(self):
value = types.Port()
self.assertEqual(0, value.deserialize('0'))
self.assertEqual(1, value.deserialize('1'))
self.assertEqual(80, value.deserialize('80'))
self.assertEqual(6600, value.deserialize('6600'))
@ -232,7 +318,6 @@ class PortTest(unittest.TestCase):
value = types.Port()
self.assertRaises(ValueError, value.deserialize, '65536')
self.assertRaises(ValueError, value.deserialize, '100000')
self.assertRaises(ValueError, value.deserialize, '0')
self.assertRaises(ValueError, value.deserialize, '-1')
self.assertRaises(ValueError, value.deserialize, '')
@ -266,7 +351,6 @@ class PathTest(unittest.TestCase):
def test_deserialize_enforces_required(self):
value = types.Path()
self.assertRaises(ValueError, value.deserialize, '')
self.assertRaises(ValueError, value.deserialize, ' ')
def test_deserialize_respects_optional(self):
value = types.Path(optional=True)

View File

@ -57,11 +57,13 @@ class ValidateRequiredTest(unittest.TestCase):
validators.validate_required('foo', False)
validators.validate_required('', False)
validators.validate_required(' ', False)
validators.validate_required([], False)
def test_passes_when_required_and_set(self):
validators.validate_required('foo', True)
validators.validate_required(' foo ', True)
validators.validate_required([1], True)
def test_blocks_when_required_and_emtpy(self):
self.assertRaises(ValueError, validators.validate_required, '', True)
self.assertRaises(ValueError, validators.validate_required, ' ', True)
self.assertRaises(ValueError, validators.validate_required, [], True)

View File

@ -15,14 +15,3 @@ class ExceptionsTest(unittest.TestCase):
def test_extension_error_is_a_mopidy_exception(self):
self.assert_(issubclass(
exceptions.ExtensionError, exceptions.MopidyException))
def test_config_error_is_a_mopidy_exception(self):
self.assert_(issubclass(
exceptions.ConfigError, exceptions.MopidyException))
def test_config_error_provides_getitem(self):
exception = exceptions.ConfigError(
{'field1': 'msg1', 'field2': 'msg2'})
self.assertEqual('msg1', exception['field1'])
self.assertEqual('msg2', exception['field2'])
self.assertItemsEqual(['field1', 'field2'], exception)

View File

@ -5,37 +5,8 @@ import platform
import pygst
pygst.require('0.10')
import gst
import pykka
try:
import dbus
except ImportError:
dbus = False
try:
import pylast
except ImportError:
pylast = False
try:
import serial
except ImportError:
serial = False
try:
import spotify
except ImportError:
spotify = False
try:
import cherrypy
except ImportError:
cherrypy = False
try:
import ws4py
except ImportError:
ws4py = False
import mock
from mopidy.utils import deps
@ -47,17 +18,32 @@ class DepsTest(unittest.TestCase):
adapters = [
lambda: dict(name='Python', version='FooPython 2.7.3'),
lambda: dict(name='Platform', version='Loonix 4.0.1'),
lambda: dict(name='Pykka', path='/foo/bar/baz.py', other='Quux')
lambda: dict(
name='Pykka', version='1.1',
path='/foo/bar/baz.py', other='Quux'),
lambda: dict(name='Foo'),
lambda: dict(name='Mopidy', version='0.13', dependencies=[
dict(name='pylast', version='0.5', dependencies=[
dict(name='setuptools', version='0.6')
])
])
]
result = deps.format_dependency_list(adapters)
self.assertIn('Python: FooPython 2.7.3', result)
self.assertIn('Platform: Loonix 4.0.1', result)
self.assertIn('Pykka: not found', result)
self.assertIn('Imported from: /foo/bar', result)
self.assertIn('Pykka: 1.1 from /foo/bar', result)
self.assertNotIn('/baz.py', result)
self.assertIn('Quux', result)
self.assertIn('Detailed information: Quux', result)
self.assertIn('Foo: not found', result)
self.assertIn('Mopidy: 0.13', result)
self.assertIn(' pylast: 0.5', result)
self.assertIn(' setuptools: 0.6', result)
def test_platform_info(self):
result = deps.platform_info()
@ -85,59 +71,39 @@ class DepsTest(unittest.TestCase):
'.'.join(map(str, gst.get_pygst_version())), result['other'])
self.assertIn('Relevant elements:', result['other'])
def test_pykka_info(self):
result = deps.pykka_info()
@mock.patch('pkg_resources.get_distribution')
def test_pkg_info(self, get_distribution_mock):
dist_mopidy = mock.Mock()
dist_mopidy.project_name = 'Mopidy'
dist_mopidy.version = '0.13'
dist_mopidy.location = '/tmp/example/mopidy'
dist_mopidy.requires.return_value = ['Pykka']
self.assertEquals('Pykka', result['name'])
self.assertEquals(pykka.__version__, result['version'])
self.assertIn('pykka', result['path'])
dist_pykka = mock.Mock()
dist_pykka.project_name = 'Pykka'
dist_pykka.version = '1.1'
dist_pykka.location = '/tmp/example/pykka'
dist_pykka.requires.return_value = ['setuptools']
@unittest.skipUnless(spotify, 'pyspotify not found')
def test_pyspotify_info(self):
result = deps.pyspotify_info()
dist_setuptools = mock.Mock()
dist_setuptools.project_name = 'setuptools'
dist_setuptools.version = '0.6'
dist_setuptools.location = '/tmp/example/setuptools'
dist_setuptools.requires.return_value = []
self.assertEquals('pyspotify', result['name'])
self.assertEquals(spotify.__version__, result['version'])
self.assertIn('spotify', result['path'])
self.assertIn('Built for libspotify API version', result['other'])
self.assertIn(str(spotify.api_version), result['other'])
get_distribution_mock.side_effect = [
dist_mopidy, dist_pykka, dist_setuptools]
@unittest.skipUnless(pylast, 'pylast not found')
def test_pylast_info(self):
result = deps.pylast_info()
result = deps.pkg_info()
self.assertEquals('pylast', result['name'])
self.assertEquals(pylast.__version__, result['version'])
self.assertIn('pylast', result['path'])
self.assertEquals('Mopidy', result['name'])
self.assertEquals('0.13', result['version'])
self.assertIn('mopidy', result['path'])
@unittest.skipUnless(dbus, 'dbus not found')
def test_dbus_info(self):
result = deps.dbus_info()
dep_info_pykka = result['dependencies'][0]
self.assertEquals('Pykka', dep_info_pykka['name'])
self.assertEquals('1.1', dep_info_pykka['version'])
self.assertEquals('dbus-python', result['name'])
self.assertEquals(dbus.__version__, result['version'])
self.assertIn('dbus', result['path'])
@unittest.skipUnless(serial, 'serial not found')
def test_serial_info(self):
result = deps.serial_info()
self.assertEquals('pyserial', result['name'])
self.assertEquals(serial.VERSION, result['version'])
self.assertIn('serial', result['path'])
@unittest.skipUnless(cherrypy, 'cherrypy not found')
def test_cherrypy_info(self):
result = deps.cherrypy_info()
self.assertEquals('cherrypy', result['name'])
self.assertEquals(cherrypy.__version__, result['version'])
self.assertIn('cherrypy', result['path'])
@unittest.skipUnless(ws4py, 'ws4py not found')
def test_ws4py_info(self):
result = deps.ws4py_info()
self.assertEquals('ws4py', result['name'])
self.assertEquals(ws4py.__version__, result['version'])
self.assertIn('ws4py', result['path'])
dep_info_setuptools = dep_info_pykka['dependencies'][0]
self.assertEquals('setuptools', dep_info_setuptools['name'])
self.assertEquals('0.6', dep_info_setuptools['version'])