Merge branch 'develop' into feature/implement-gapless
This commit is contained in:
commit
e12ea662da
2
.mailmap
2
.mailmap
@ -24,4 +24,6 @@ Christopher Schirner <christopher@hackerspace-bamberg.de> <schinken@hackerspace-
|
||||
Laura Barber <laura.c.barber@gmail.com> <artzii.laura@gmail.com>
|
||||
John Cass <john.cass77@gmail.com>
|
||||
Ronald Zielaznicki <zielaznickizm@g.cofc.edu> <zielaznickiz@g.cofc.edu>
|
||||
Kyle Heyne <kyleheyne@gmail.com>
|
||||
Tom Roth <rawdlite@googlemail.com>
|
||||
Eric Jahn <ejahn@newstore.com>
|
||||
|
||||
8
AUTHORS
8
AUTHORS
@ -57,5 +57,13 @@
|
||||
- Camilo Nova <camilo.nova@gmail.com>
|
||||
- Dražen Lučanin <kermit666@gmail.com>
|
||||
- Naglis Jonaitis <njonaitis@gmail.com>
|
||||
- Kyle Heyne <kyleheyne@gmail.com>
|
||||
- Tom Roth <rawdlite@googlemail.com>
|
||||
- Mark Greenwood <fatgerman@gmail.com>
|
||||
- Stein Karlsen <karlsen.stein@gmail.com>
|
||||
- Dejan Prokić <dejanp@nordeus.eu>
|
||||
- Eric Jahn <ejahn@newstore.com>
|
||||
- Mikhail Golubev <qsolo825@gmail.com>
|
||||
- Danilo Bargen <mail@dbrgn.ch>
|
||||
- Bjørnar Snoksrud <bjornar@snoksrud.no>
|
||||
- Giorgos Logiotatidis <seadog@sealabs.net>
|
||||
|
||||
@ -10,9 +10,11 @@ flake8-import-order
|
||||
|
||||
# Mock dependencies in tests
|
||||
mock
|
||||
responses
|
||||
|
||||
# Test runners
|
||||
pytest
|
||||
pytest-capturelog
|
||||
pytest-cov
|
||||
pytest-xdist
|
||||
tox
|
||||
|
||||
@ -256,7 +256,7 @@ chain. The function will be called with the error object as the only argument:
|
||||
.. code-block:: js
|
||||
|
||||
mopidy.playback.getCurrentTrack()
|
||||
.catch(console.error.bind(console));
|
||||
.catch(console.error.bind(console))
|
||||
.done(printCurrentTrack);
|
||||
|
||||
You can also register the error handler at the end of the promise chain by
|
||||
|
||||
@ -4,19 +4,154 @@ Changelog
|
||||
|
||||
This changelog is used to track all major changes to Mopidy.
|
||||
|
||||
v1.1.0 (UNRELEASED)
|
||||
|
||||
v1.2.0 (UNRELEASED)
|
||||
===================
|
||||
|
||||
Feature release.
|
||||
|
||||
Local
|
||||
-----
|
||||
|
||||
- Made :confval:`local/data_dir` really deprecated. This change breaks older
|
||||
versions of Mopidy-Local-SQLite and Mopidy-Local-Images.
|
||||
|
||||
Cleanups
|
||||
--------
|
||||
|
||||
- Removed warning if :file:`~/.mopidy` exists. We stopped using this location
|
||||
in 0.6, released in October 2011.
|
||||
|
||||
- Removed warning if :file:`~/.config/mopidy/settings.py` exists. We stopped
|
||||
using this settings file in 0.14, released in April 2013.
|
||||
|
||||
|
||||
v1.1.1 (UNRELEASED)
|
||||
===================
|
||||
|
||||
Bug fix release.
|
||||
|
||||
- Core: Make :meth:`mopidy.core.LibraryController.refresh` work for all
|
||||
backends with a library provider. Previously, it wrongly worked for all
|
||||
backends with a playlists provider. (Fixes: :issue:`1257`)
|
||||
|
||||
- Core: Respect :confval:`core/cache_dir` and :confval:`core/data_dir` config
|
||||
values added in 1.1.0 when creating the dirs Mopidy need to store data. This
|
||||
should not change the behavior for desktop users running Mopidy. When running
|
||||
Mopidy as a system service installed from a package which sets the core dir
|
||||
configs properly (e.g. Debian and Arch packages), this fix avoids the
|
||||
creation of a couple of directories that should not be used, typically
|
||||
:file:`/var/lib/mopidy/.local` and :file:`/var/lib/mopidy/.cache`. (Fixes:
|
||||
:issue:`1259`, PR: :issue:`1266`)
|
||||
|
||||
- Core: Fix error in :meth:`~mopidy.core.TracklistController.get_eot_tlid`
|
||||
docstring. (Fixes: :issue:`1269`)
|
||||
|
||||
- Local: Deprecate :confval:`local/data_dir` and respect
|
||||
:confval:`core/data_dir` instead. This does not change the defaults for
|
||||
desktop users, only system services installed from packages that properly set
|
||||
:confval:`core/data_dir`, like the Debian and Arch packages. (Fixes:
|
||||
:issue:`1259`, PR: :issue:`1266`)
|
||||
|
||||
- Local: Change default value of :confval:`local/scan_flush_threshold` from
|
||||
1000 to 100 to shorten the time Mopidy-Local-SQLite blocks incoming requests
|
||||
while scanning the local library.
|
||||
|
||||
- M3U: Changed default for the :confval:`m3u/playlists_dir` from
|
||||
``$XDG_DATA_DIR/mopidy/m3u`` to unset, which now means the extension's data
|
||||
dir. This does not change the defaults for desktop users, only system
|
||||
services installed from packages that properly set :confval:`core/data_dir`,
|
||||
like the Debian and Arch pakages. (Fixes: :issue:`1259`, PR: :issue:`1266`)
|
||||
|
||||
- Stream: If "file" is present in the :confval:`stream/protocols` config value
|
||||
and the :ref:`ext-file` extension is enabled, we exited with an error because
|
||||
two extensions claimed the same URI scheme. We now log a warning recommending
|
||||
to remove "file" from the :confval:`stream/protocols` config, and then
|
||||
proceed startup. (Fixes: :issue:`1248`, PR: :issue:`1254`)
|
||||
|
||||
- Stream: Fix bug in new playlist parser. A non-ASCII char in an urilist
|
||||
comment would cause a crash while parsing due to comparision of a non-ASCII
|
||||
bytestring with a Unicode string. (Fixes: :issue:`1265`)
|
||||
|
||||
- File: Adjust log levels when failing to expand ``$XDG_MUSIC_DIR`` into a real
|
||||
path. This usually happens when running Mopidy as a system service, and thus
|
||||
with a limited set of environment variables. (Fixes: :issue:`1249`, PR:
|
||||
:issue:`1255`)
|
||||
|
||||
- File: When browsing files, we no longer scan the files to check if they're
|
||||
playable. This makes browsing of the file hierarchy instant for HTTP clients,
|
||||
which do no scanning of the files' metadata, and a bit faster for MPD
|
||||
clients, which no longer scan the files twice. (Fixes: :issue:`1260`, PR:
|
||||
:issue:`1261`)
|
||||
|
||||
- File: Allow looking up metadata about any ``file://`` URI, just like we did
|
||||
in Mopidy 1.0.x, where Mopidy-Stream handled ``file://`` URIs. In Mopidy
|
||||
1.1.0, Mopidy-File did not allow one to lookup files outside the directories
|
||||
listed in :confval:`file/media_dir`. This broke Mopidy-Local-SQLite when the
|
||||
:confval:`local/media_dir` directory was not within one of the
|
||||
:confval:`file/media_dirs` directories. For browsing of files, we still limit
|
||||
access to files inside the :confval:`file/media_dir` directories. For lookup,
|
||||
you can now read metadata for any file you know the path of. (Fixes:
|
||||
:issue:`1268`, PR: :issue:`1273`)
|
||||
|
||||
- Audio: Fix timeout handling in scanner. This regression caused timeouts to
|
||||
expire before it should, causing scans to fail.
|
||||
|
||||
- Audio: Update scanner to emit MIME type instead of an error when missing a
|
||||
plugin.
|
||||
|
||||
|
||||
v1.1.0 (2015-08-09)
|
||||
===================
|
||||
|
||||
Mopidy 1.1 is here!
|
||||
|
||||
Since the release of 1.0, we've closed or merged approximately 65 issues and
|
||||
pull requests through about 400 commits by a record high 20 extraordinary
|
||||
people, including 14 newcomers. That's less issues and commits than in the 1.0
|
||||
release, but even more contributors, and a doubling of the number of newcomers.
|
||||
Thanks to :ref:`everyone <authors>` who has :ref:`contributed <contributing>`,
|
||||
especially those that joined the sprint at EuroPython 2015 in Bilbao, Spain a
|
||||
couple of weeks ago!
|
||||
|
||||
As we promised with the release of Mopidy 1.0, any extension working with
|
||||
Mopidy 1.0 should continue working with all Mopidy 1.x releases. However, this
|
||||
release brings a lot stronger enforcement of our documented APIs. If an
|
||||
extension doesn't use the APIs properly, it may no longer work. The advantage
|
||||
of this change is that Mopidy is now more robust against errors in extensions,
|
||||
and also provides vastly better error messages when extensions misbehave. This
|
||||
should make it easier to create quality extensions.
|
||||
|
||||
The major features of Mopidy 1.1 are:
|
||||
|
||||
- Validation of the arguments to all core API methods, as well as all responses
|
||||
from backends and all data model attributes.
|
||||
|
||||
- New bundled backend, Mopidy-File. It is similar to Mopidy-Local, but allows
|
||||
you to browse and play music from local disk without running a scan to index
|
||||
the music first. The drawback is that it doesn't support searching.
|
||||
|
||||
- The Mopidy-MPD server should now be up to date with the 0.19 version of the
|
||||
MPD protocol.
|
||||
|
||||
Dependencies
|
||||
------------
|
||||
|
||||
- Mopidy now requires Requests.
|
||||
|
||||
- Heads up: Porting from GStreamer 0.10 to 1.x and support for running Mopidy
|
||||
with Python 3.4+ is not far off on our roadmap.
|
||||
|
||||
Core API
|
||||
--------
|
||||
|
||||
- Calling the following methods with ``kwargs`` is being deprecated.
|
||||
(PR: :issue:`1090`)
|
||||
- **Deprecated:** Calling the following methods with ``kwargs`` is being
|
||||
deprecated. (PR: :issue:`1090`)
|
||||
|
||||
- :meth:`mopidy.core.library.LibraryController.search`
|
||||
- :meth:`mopidy.core.library.PlaylistsController.filter`
|
||||
- :meth:`mopidy.core.library.TracklistController.filter`
|
||||
- :meth:`mopidy.core.library.TracklistController.remove`
|
||||
- :meth:`mopidy.core.LibraryController.search`
|
||||
- :meth:`mopidy.core.PlaylistsController.filter`
|
||||
- :meth:`mopidy.core.TracklistController.filter`
|
||||
- :meth:`mopidy.core.TracklistController.remove`
|
||||
|
||||
- Updated core controllers to handle backend exceptions in all calls that rely
|
||||
on multiple backends. (Issue: :issue:`667`)
|
||||
@ -25,16 +160,18 @@ Core API
|
||||
|
||||
- Add ``tlid`` alternatives to methods that take ``tl_track`` and also add
|
||||
``get_{eot,next,previous}_tlid`` methods as light weight alternatives to the
|
||||
``tl_track`` versions of the calls. (Fixes: :issue:`1131` PR: :issue:`1136`,
|
||||
``tl_track`` versions of the calls. (Fixes: :issue:`1131`, PR: :issue:`1136`,
|
||||
:issue:`1140`)
|
||||
|
||||
- Add :meth:`mopidy.core.playback.PlaybackController.get_current_tlid`.
|
||||
- Add :meth:`mopidy.core.PlaybackController.get_current_tlid`.
|
||||
(Part of: :issue:`1137`)
|
||||
|
||||
- Update core to handle backend crashes and bad data. (Fixes: :issue:`1161`)
|
||||
|
||||
- Add `max_tracklist_length` config and limitation. (Fixes: :issue:`997`
|
||||
PR: :issue:`1225`)
|
||||
- Add :confval:`core/max_tracklist_length` config and limitation. (Fixes:
|
||||
:issue:`997` PR: :issue:`1225`)
|
||||
|
||||
- Added ``playlist_deleted`` event. (Fixes: :issue:`996`)
|
||||
|
||||
Models
|
||||
------
|
||||
@ -43,9 +180,85 @@ Models
|
||||
serialization. (Fixes: :issue:`865`)
|
||||
|
||||
- Memory usage for models has been greatly improved. We now have a lower
|
||||
overhead per instance by using slots, intern identifiers and automatically
|
||||
overhead per instance by using slots, interned identifiers and automatically
|
||||
reuse instances. For the test data set this was developed against, a library
|
||||
of ~14000 tracks, went from needing ~75MB to ~17MB. (Fixes: :issue:`348`)
|
||||
of ~14.000 tracks, went from needing ~75MB to ~17MB. (Fixes: :issue:`348`)
|
||||
|
||||
- Added :attr:`mopidy.models.Artist.sortname` field that is mapped to
|
||||
``musicbrainz-sortname`` tag. (Fixes: :issue:`940`)
|
||||
|
||||
Configuration
|
||||
-------------
|
||||
|
||||
- Add new configurations to set base directories to be used by Mopidy and
|
||||
Mopidy extensions: :confval:`core/cache_dir`, :confval:`core/config_dir`, and
|
||||
:confval:`core/data_dir`. (Fixes: :issue:`843`, PR: :issue:`1232`)
|
||||
|
||||
Extension support
|
||||
-----------------
|
||||
|
||||
- Add new methods to :class:`~mopidy.ext.Extension` class for getting cache,
|
||||
config and data directories specific to your extension:
|
||||
|
||||
- :meth:`mopidy.ext.Extension.get_cache_dir`
|
||||
- :meth:`mopidy.ext.Extension.get_config_dir`
|
||||
- :meth:`mopidy.ext.Extension.get_data_dir`
|
||||
|
||||
Extensions should use these methods so that the correct directories are used
|
||||
both when Mopidy is run by a regular user and when run as a system service.
|
||||
(Fixes: :issue:`843`, PR: :issue:`1232`)
|
||||
|
||||
- Add :func:`mopidy.httpclient.format_proxy` and
|
||||
:func:`mopidy.httpclient.format_user_agent`. (Part of: :issue:`1156`)
|
||||
|
||||
- It is now possible to import :mod:`mopidy.backends` without having GObject or
|
||||
GStreamer installed. In other words, a lot of backend extensions should now
|
||||
be able to run tests in a virtualenv with global site-packages disabled. This
|
||||
removes a lot of potential error sources. (Fixes: :issue:`1068`, PR:
|
||||
:issue:`1115`)
|
||||
|
||||
Local backend
|
||||
-------------
|
||||
|
||||
- Filter out :class:`None` from
|
||||
:meth:`~mopidy.backend.LibraryProvider.get_distinct` results. All returned
|
||||
results should be strings. (Fixes: :issue:`1202`)
|
||||
|
||||
Stream backend
|
||||
--------------
|
||||
|
||||
- Move stream playlist parsing from GStreamer to the stream backend. (Fixes:
|
||||
:issue:`671`)
|
||||
|
||||
File backend
|
||||
------------
|
||||
|
||||
The :ref:`Mopidy-File <ext-file>` backend is a new bundled backend. It is
|
||||
similar to Mopidy-Local since it works with local files, but it differs in a
|
||||
few key ways:
|
||||
|
||||
- Mopidy-File lets you browse your media files by their file hierarchy.
|
||||
|
||||
- It supports multiple media directories, all exposed under the "Files"
|
||||
directory when you browse your library with e.g. an MPD client.
|
||||
|
||||
- There is no index of the media files, like the JSON or SQLite files used by
|
||||
Mopidy-Local. Thus no need to scan the music collection before starting
|
||||
Mopidy. Everything is read from the file system when needed and changes to
|
||||
the file system is thus immediately visible in Mopidy clients.
|
||||
|
||||
- Because there is no index, there is no support for search.
|
||||
|
||||
Our long term plan is to keep this very simple file backend in Mopidy, as it
|
||||
has a well defined and limited scope, while splitting the more feature rich
|
||||
Mopidy-Local extension out to an independent project. (Fixes: :issue:`1004`,
|
||||
PR: :issue:`1207`)
|
||||
|
||||
M3U backend
|
||||
-----------
|
||||
|
||||
- Support loading UTF-8 encoded M3U files with the ``.m3u8`` file extension.
|
||||
(PR: :issue:`1193`)
|
||||
|
||||
MPD frontend
|
||||
------------
|
||||
@ -80,42 +293,21 @@ MPD frontend
|
||||
- Track data now include the ``Last-Modified`` field if set on the track model.
|
||||
(Fixes: :issue:`1218`, PR: :issue:`1219`)
|
||||
|
||||
Local backend
|
||||
-------------
|
||||
- Implement ``tagtypes`` MPD command. (PR: :issue:`1235`)
|
||||
|
||||
- Filter out :class:`None` from
|
||||
:meth:`~mopidy.backend.LibraryProvider.get_distinct` results. All returned
|
||||
results should be strings. (Fixes: :issue:`1202`)
|
||||
- Exclude empty tags fields from metadata output. (Fixes: :issue:`1045`, PR:
|
||||
:issue:`1235`)
|
||||
|
||||
File backend
|
||||
------------
|
||||
- Implement protocol extensions to output Album URIs and Album Images when
|
||||
outputting track data to clients. (PR: :issue:`1230`)
|
||||
|
||||
The :ref:`Mopidy-File <ext-file>` backend is a new bundled backend. It is
|
||||
similar to Mopidy-Local since it works with local files, but it differs in a
|
||||
few key ways:
|
||||
|
||||
- Mopidy-File lets you browse your media files by their file hierarchy.
|
||||
|
||||
- It supports multiple media directories, all exposed under the "Files"
|
||||
directory when you browse your library with e.g. an MPD client.
|
||||
|
||||
- There is no index of the media files, like the JSON or SQLite files used by
|
||||
Mopidy-Local. Thus no need to scan the music collection before starting
|
||||
Mopidy. Everything is read from the file system when needed and changes to
|
||||
the file system is thus immediately visible in Mopidy clients.
|
||||
|
||||
- Because there is no index, there is no support for search.
|
||||
|
||||
Our long term plan is to keep this very simple file backend in Mopidy, as it
|
||||
has a well defined and limited scope, while splitting the more feature rich
|
||||
Mopidy-Local extension out to an independent project. (Fixes: :issue:`1004`,
|
||||
PR: :issue:`1207`)
|
||||
|
||||
Utils
|
||||
-----
|
||||
|
||||
- Add :func:`mopidy.httpclient.format_proxy` and
|
||||
:func:`mopidy.httpclient.format_user_agent`. (Part of: :issue:`1156`)
|
||||
- The MPD commands ``lsinfo`` and ``listplaylists`` are now implemented using
|
||||
the :meth:`~mopidy.core.PlaylistsController.as_list` method, which retrieves
|
||||
a lot less data and is thus much faster than the deprecated
|
||||
:meth:`~mopidy.core.PlaylistsController.get_playlists`. The drawback is that
|
||||
the ``Last-Modified`` timestamp is not available through this method, and the
|
||||
timestamps in the MPD command responses are now always set to the current
|
||||
time.
|
||||
|
||||
Internal changes
|
||||
----------------
|
||||
@ -123,12 +315,6 @@ Internal changes
|
||||
- Tests have been cleaned up to stop using deprecated APIs where feasible.
|
||||
(Partial fix: :issue:`1083`, PR: :issue:`1090`)
|
||||
|
||||
- It is now possible to import :mod:`mopidy.backends` without having GObject or
|
||||
GStreamer installed. In other words, a lot of backend extensions should now
|
||||
be able to run tests in a virtualenv with global site-packages disabled. This
|
||||
removes a lot of potential error sources. (Fixes: :issue:`1068`, PR:
|
||||
:issue:`1115`)
|
||||
|
||||
|
||||
v1.0.8 (2015-07-22)
|
||||
===================
|
||||
|
||||
@ -78,7 +78,7 @@ def setup(app):
|
||||
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
needs_sphinx = '1.0'
|
||||
needs_sphinx = '1.3'
|
||||
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
|
||||
@ -57,6 +57,60 @@ Core configuration values
|
||||
|
||||
Mopidy's core has the following configuration values that you can change.
|
||||
|
||||
|
||||
Core configuration
|
||||
------------------
|
||||
|
||||
.. confval:: core/cache_dir
|
||||
|
||||
Path to base directory for storing cached data.
|
||||
|
||||
Mopidy and extensions will use this path to cache data that can safely be
|
||||
thrown away.
|
||||
|
||||
If your system is running from an SD card, it can help avoid wear and
|
||||
corruption of your SD card by pointing this config to another location. If
|
||||
you have enough RAM, a tmpfs might be a good choice.
|
||||
|
||||
When running Mopidy as a regular user, this should usually be
|
||||
``$XDG_CACHE_DIR/mopidy``, i.e. :file:`~/.cache/mopidy`.
|
||||
|
||||
When running Mopidy as a system service, this should usually be
|
||||
:file:`/var/cache/mopidy`.
|
||||
|
||||
.. confval:: core/config_dir
|
||||
|
||||
Path to base directory for config files.
|
||||
|
||||
When running Mopidy as a regular user, this should usually be
|
||||
``$XDG_CONFIG_DIR/mopidy``, i.e. :file:`~/.config/mopidy`.
|
||||
|
||||
When running Mopidy as a system service, this should usually be
|
||||
:file:`/etc/mopidy`.
|
||||
|
||||
.. confval:: core/data_dir
|
||||
|
||||
Path to base directory for persistent data files.
|
||||
|
||||
Mopidy and extensions will use this path to store data that cannot be
|
||||
be thrown away and reproduced without some effort. Examples include
|
||||
Mopidy-Local's index of your media library and Mopidy-M3U's stored
|
||||
playlists.
|
||||
|
||||
When running Mopidy as a regular user, this should usually be
|
||||
``$XDG_DATA_DIR/mopidy``, i.e. :file:`~/.local/share/mopidy`.
|
||||
|
||||
When running Mopidy as a system service, this should usually be
|
||||
:file:`/var/lib/mopidy`.
|
||||
|
||||
.. confval:: core/max_tracklist_length
|
||||
|
||||
Max length of the tracklist. Defaults to 10000.
|
||||
|
||||
The original MPD server only supports 10000 tracks in the tracklist. Some
|
||||
MPD clients will crash if this limit is exceeded.
|
||||
|
||||
|
||||
Audio configuration
|
||||
-------------------
|
||||
|
||||
|
||||
@ -113,12 +113,17 @@ from a regular Mopidy setup you'll want to know about.
|
||||
|
||||
sudo service mopidy status
|
||||
|
||||
- Mopidy installed from a Debian package can use both Mopidy extensions
|
||||
installed both from Debian packages and extensions installed with pip.
|
||||
- Mopidy installed from a Debian package can use Mopidy extensions installed
|
||||
both from Debian packages and with pip. This has always been the case.
|
||||
|
||||
The other way around does not work: Mopidy installed with pip can use
|
||||
extensions installed with pip, but not extensions installed from a Debian
|
||||
package. This is because the Debian packages install extensions into
|
||||
Mopidy installed with pip can use extensions installed with pip, but
|
||||
not extensions installed from a Debian package released before August 2015.
|
||||
This is because the Debian packages used to install extensions into
|
||||
:file:`/usr/share/mopidy` which is normally not on your ``PYTHONPATH``.
|
||||
Thus, your pip-installed Mopidy will not find the Debian package-installed
|
||||
Thus, your pip-installed Mopidy would not find the Debian package-installed
|
||||
extensions.
|
||||
|
||||
In August 2015, all Mopidy extension Debian packages was modified to install
|
||||
into :file:`/usr/lib/python2.7/dist-packages`, like any other Python Debian
|
||||
package. Thus, Mopidy installed with pip can now use extensions installed
|
||||
from Debian.
|
||||
|
||||
@ -35,6 +35,23 @@ To make a local library for your music available for Mopidy:
|
||||
#. Start Mopidy, find the music library in a client, and play some local music!
|
||||
|
||||
|
||||
Updating the local library
|
||||
==========================
|
||||
|
||||
When you've added or removed music in your collection and want to update
|
||||
Mopidy's index of your local library, you need to rescan::
|
||||
|
||||
mopidy local scan
|
||||
|
||||
Note that if you are using the default local library storage, ``json``, you
|
||||
need to restart Mopidy after the scan completes for the updated index to be
|
||||
used.
|
||||
|
||||
If you want index updates to come into effect immediately, you can try out
|
||||
`Mopidy-Local-SQLite <https://github.com/mopidy/mopidy-local-sqlite>`_, which
|
||||
will probably become the default backend in the near future.
|
||||
|
||||
|
||||
Pluggable library support
|
||||
=========================
|
||||
|
||||
@ -47,8 +64,8 @@ active at a time.
|
||||
To create a new library provider you must create class that implements the
|
||||
:class:`mopidy.local.Library` interface and install it in the extension
|
||||
registry under ``local:library``. Any data that the library needs to store on
|
||||
disc should be stored in :confval:`local/data_dir` using the library name as
|
||||
part of the filename or directory to avoid any conflicts.
|
||||
disc should be stored in the extension's data dir, as returned by
|
||||
:meth:`~mopidy.ext.Extension.get_data_dir`.
|
||||
|
||||
|
||||
Configuration
|
||||
|
||||
@ -52,4 +52,5 @@ See :ref:`config` for general help on configuring Mopidy.
|
||||
|
||||
.. confval:: m3u/playlists_dir
|
||||
|
||||
Path to directory with M3U files.
|
||||
Path to directory with M3U files. Unset by default, in which case the
|
||||
extension's data dir is used to store playlists.
|
||||
|
||||
@ -438,7 +438,7 @@ When writing an extension, you should only use APIs documented at
|
||||
at any time and are not something extensions should use.
|
||||
|
||||
Mopidy performs type checking to help catch extension bugs. This applies to
|
||||
both to frontend calls into core and return values from backends. Additionally
|
||||
both frontend calls into core and return values from backends. Additionally
|
||||
model fields always get validated to further guard against bad data.
|
||||
|
||||
Logging in extensions
|
||||
|
||||
@ -74,10 +74,8 @@ If you want to stay up to date on Mopidy developments, you can follow `@mopidy
|
||||
announcements related to Mopidy and Mopidy extensions.
|
||||
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
.. toctree::
|
||||
:caption: Usage
|
||||
:maxdepth: 2
|
||||
|
||||
installation/index
|
||||
@ -89,10 +87,8 @@ Usage
|
||||
|
||||
.. _ext:
|
||||
|
||||
Extensions
|
||||
==========
|
||||
|
||||
.. toctree::
|
||||
:caption: Extensions
|
||||
:maxdepth: 2
|
||||
|
||||
ext/local
|
||||
@ -108,10 +104,8 @@ Extensions
|
||||
ext/web
|
||||
|
||||
|
||||
Clients
|
||||
=======
|
||||
|
||||
.. toctree::
|
||||
:caption: Clients
|
||||
:maxdepth: 2
|
||||
|
||||
clients/http
|
||||
@ -120,10 +114,8 @@ Clients
|
||||
clients/upnp
|
||||
|
||||
|
||||
About
|
||||
=====
|
||||
|
||||
.. toctree::
|
||||
:caption: About
|
||||
:maxdepth: 1
|
||||
|
||||
authors
|
||||
@ -132,10 +124,8 @@ About
|
||||
versioning
|
||||
|
||||
|
||||
Development
|
||||
===========
|
||||
|
||||
.. toctree::
|
||||
:caption: Development
|
||||
:maxdepth: 2
|
||||
|
||||
contributing
|
||||
@ -145,10 +135,8 @@ Development
|
||||
extensiondev
|
||||
|
||||
|
||||
Reference
|
||||
=========
|
||||
|
||||
.. toctree::
|
||||
:caption: Reference
|
||||
:maxdepth: 2
|
||||
|
||||
glossary
|
||||
|
||||
@ -20,13 +20,21 @@ for free. We use their services for the following sites:
|
||||
|
||||
- Mailgun for sending emails from the Discourse forum.
|
||||
|
||||
- CDN hosting at http://dl.mopidy.com, which is used to distribute Pi Musicbox
|
||||
|
||||
Fastly
|
||||
======
|
||||
|
||||
`Fastly <https://www.fastly.com/>`_ lets Mopidy use their CDN for free. We
|
||||
accelerate requests to all Mopidy services, including:
|
||||
|
||||
- https://apt.mopidy.com/dists/, which is used to distribute Debian packages.
|
||||
|
||||
- https://dl.mopidy.com/pimusicbox/, which is used to distribute Pi Musicbox
|
||||
images.
|
||||
|
||||
|
||||
GlobalSign
|
||||
==========
|
||||
|
||||
`GlobalSign <https://www.globalsign.com/>`_ provides Mopidy with a free
|
||||
wildcard SSL certificate for mopidy.com, which we use to secure access to all
|
||||
our web sites.
|
||||
`GlobalSign <https://www.globalsign.com/>`_ provides Mopidy with a free SSL
|
||||
certificate for mopidy.com, which we use to secure access to all our web sites.
|
||||
|
||||
@ -14,4 +14,4 @@ if not (2, 7) <= sys.version_info < (3,):
|
||||
warnings.filterwarnings('ignore', 'could not open display')
|
||||
|
||||
|
||||
__version__ = '1.0.8'
|
||||
__version__ = '1.1.0'
|
||||
|
||||
@ -75,15 +75,15 @@ def main():
|
||||
|
||||
args = root_cmd.parse(mopidy_args)
|
||||
|
||||
create_file_structures_and_config(args, extensions_data)
|
||||
check_old_locations()
|
||||
|
||||
config, config_errors = config_lib.load(
|
||||
args.config_files,
|
||||
[d.config_schema for d in extensions_data],
|
||||
[d.config_defaults for d in extensions_data],
|
||||
args.config_overrides)
|
||||
|
||||
create_core_dirs(config)
|
||||
create_initial_config_file(args, extensions_data)
|
||||
|
||||
verbosity_level = args.base_verbosity_level
|
||||
if args.verbosity_level:
|
||||
verbosity_level += args.verbosity_level
|
||||
@ -166,17 +166,22 @@ def main():
|
||||
raise
|
||||
|
||||
|
||||
def create_file_structures_and_config(args, extensions):
|
||||
path.get_or_create_dir(b'$XDG_DATA_DIR/mopidy')
|
||||
path.get_or_create_dir(b'$XDG_CONFIG_DIR/mopidy')
|
||||
def create_core_dirs(config):
|
||||
path.get_or_create_dir(config['core']['cache_dir'])
|
||||
path.get_or_create_dir(config['core']['config_dir'])
|
||||
path.get_or_create_dir(config['core']['data_dir'])
|
||||
|
||||
|
||||
def create_initial_config_file(args, extensions_data):
|
||||
"""Initialize whatever the last config file is with defaults"""
|
||||
|
||||
# Initialize whatever the last config file is with defaults
|
||||
config_file = args.config_files[-1]
|
||||
|
||||
if os.path.exists(path.expand_path(config_file)):
|
||||
return
|
||||
|
||||
try:
|
||||
default = config_lib.format_initial(extensions)
|
||||
default = config_lib.format_initial(extensions_data)
|
||||
path.get_or_create_file(config_file, mkdir=False, content=default)
|
||||
logger.info('Initialized %s with default config', config_file)
|
||||
except IOError as error:
|
||||
@ -185,22 +190,6 @@ def create_file_structures_and_config(args, extensions):
|
||||
config_file, encoding.locale_decode(error))
|
||||
|
||||
|
||||
def check_old_locations():
|
||||
dot_mopidy_dir = path.expand_path(b'~/.mopidy')
|
||||
if os.path.isdir(dot_mopidy_dir):
|
||||
logger.warning(
|
||||
'Old Mopidy dot dir found at %s. Please migrate your config to '
|
||||
'the ini-file based config format. See release notes for further '
|
||||
'instructions.', dot_mopidy_dir)
|
||||
|
||||
old_settings_file = path.expand_path(b'$XDG_CONFIG_DIR/mopidy/settings.py')
|
||||
if os.path.isfile(old_settings_file):
|
||||
logger.warning(
|
||||
'Old Mopidy settings file found at %s. Please migrate your '
|
||||
'config to the ini-file based config format. See release notes '
|
||||
'for further instructions.', old_settings_file)
|
||||
|
||||
|
||||
def log_extension_info(all_extensions, enabled_extensions):
|
||||
# TODO: distinguish disabled vs blocked by env?
|
||||
enabled_names = set(e.ext_name for e in enabled_extensions)
|
||||
|
||||
@ -13,7 +13,7 @@ import gst.pbutils # noqa
|
||||
import pykka
|
||||
|
||||
from mopidy import exceptions
|
||||
from mopidy.audio import playlists, utils
|
||||
from mopidy.audio import icy, utils
|
||||
from mopidy.audio.constants import PlaybackState
|
||||
from mopidy.audio.listener import AudioListener
|
||||
from mopidy.internal import deprecation, process
|
||||
@ -26,8 +26,7 @@ logger = logging.getLogger(__name__)
|
||||
# set_state on a pipeline.
|
||||
gst_logger = logging.getLogger('mopidy.audio.gst')
|
||||
|
||||
playlists.register_typefinders()
|
||||
playlists.register_elements()
|
||||
icy.register()
|
||||
|
||||
_GST_STATE_MAPPING = {
|
||||
gst.STATE_PLAYING: PlaybackState.PLAYING,
|
||||
|
||||
63
mopidy/audio/icy.py
Normal file
63
mopidy/audio/icy.py
Normal file
@ -0,0 +1,63 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import gobject
|
||||
|
||||
import pygst
|
||||
pygst.require('0.10')
|
||||
import gst # noqa
|
||||
|
||||
|
||||
class IcySrc(gst.Bin, gst.URIHandler):
|
||||
__gstdetails__ = ('IcySrc',
|
||||
'Src',
|
||||
'HTTP src wrapper for icy:// support.',
|
||||
'Mopidy')
|
||||
|
||||
srcpad_template = gst.PadTemplate(
|
||||
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
|
||||
gst.caps_new_any())
|
||||
|
||||
__gsttemplates__ = (srcpad_template,)
|
||||
|
||||
def __init__(self):
|
||||
super(IcySrc, self).__init__()
|
||||
self._httpsrc = gst.element_make_from_uri(gst.URI_SRC, 'http://')
|
||||
try:
|
||||
self._httpsrc.set_property('iradio-mode', True)
|
||||
except TypeError:
|
||||
pass
|
||||
self.add(self._httpsrc)
|
||||
|
||||
self._srcpad = gst.GhostPad('src', self._httpsrc.get_pad('src'))
|
||||
self.add_pad(self._srcpad)
|
||||
|
||||
@classmethod
|
||||
def do_get_type_full(cls):
|
||||
return gst.URI_SRC
|
||||
|
||||
@classmethod
|
||||
def do_get_protocols_full(cls):
|
||||
return [b'icy', b'icyx']
|
||||
|
||||
def do_set_uri(self, uri):
|
||||
if uri.startswith('icy://'):
|
||||
return self._httpsrc.set_uri(b'http://' + uri[len('icy://'):])
|
||||
elif uri.startswith('icyx://'):
|
||||
return self._httpsrc.set_uri(b'https://' + uri[len('icyx://'):])
|
||||
else:
|
||||
return False
|
||||
|
||||
def do_get_uri(self):
|
||||
uri = self._httpsrc.get_uri()
|
||||
if uri.startswith('http://'):
|
||||
return b'icy://' + uri[len('http://'):]
|
||||
else:
|
||||
return b'icyx://' + uri[len('https://'):]
|
||||
|
||||
|
||||
def register():
|
||||
# Only register icy if gst install can't handle it on it's own.
|
||||
if not gst.element_make_from_uri(gst.URI_SRC, 'icy://'):
|
||||
gobject.type_register(IcySrc)
|
||||
gst.element_register(
|
||||
IcySrc, IcySrc.__name__.lower(), gst.RANK_MARGINAL)
|
||||
@ -1,420 +0,0 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import io
|
||||
|
||||
import gobject
|
||||
|
||||
import pygst
|
||||
pygst.require('0.10')
|
||||
import gst # noqa
|
||||
|
||||
from mopidy.compat import configparser
|
||||
|
||||
try:
|
||||
import xml.etree.cElementTree as elementtree
|
||||
except ImportError:
|
||||
import xml.etree.ElementTree as elementtree
|
||||
|
||||
|
||||
# TODO: make detect_FOO_header reusable in general mopidy code.
|
||||
# i.e. give it just a "peek" like function.
|
||||
def detect_m3u_header(typefind):
|
||||
return typefind.peek(0, 7).upper() == b'#EXTM3U'
|
||||
|
||||
|
||||
def detect_pls_header(typefind):
|
||||
return typefind.peek(0, 10).lower() == b'[playlist]'
|
||||
|
||||
|
||||
def detect_xspf_header(typefind):
|
||||
data = typefind.peek(0, 150)
|
||||
if b'xspf' not in data.lower():
|
||||
return False
|
||||
|
||||
try:
|
||||
data = io.BytesIO(data)
|
||||
for event, element in elementtree.iterparse(data, events=(b'start',)):
|
||||
return element.tag.lower() == '{http://xspf.org/ns/0/}playlist'
|
||||
except elementtree.ParseError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def detect_asx_header(typefind):
|
||||
data = typefind.peek(0, 50)
|
||||
if b'asx' not in data.lower():
|
||||
return False
|
||||
|
||||
try:
|
||||
data = io.BytesIO(data)
|
||||
for event, element in elementtree.iterparse(data, events=(b'start',)):
|
||||
return element.tag.lower() == 'asx'
|
||||
except elementtree.ParseError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def parse_m3u(data):
|
||||
# TODO: convert non URIs to file URIs.
|
||||
found_header = False
|
||||
for line in data.readlines():
|
||||
if found_header or line.startswith(b'#EXTM3U'):
|
||||
found_header = True
|
||||
else:
|
||||
continue
|
||||
if not line.startswith(b'#') and line.strip():
|
||||
yield line.strip()
|
||||
|
||||
|
||||
def parse_pls(data):
|
||||
# TODO: convert non URIs to file URIs.
|
||||
try:
|
||||
cp = configparser.RawConfigParser()
|
||||
cp.readfp(data)
|
||||
except configparser.Error:
|
||||
return
|
||||
|
||||
for section in cp.sections():
|
||||
if section.lower() != 'playlist':
|
||||
continue
|
||||
for i in range(cp.getint(section, 'numberofentries')):
|
||||
yield cp.get(section, 'file%d' % (i + 1))
|
||||
|
||||
|
||||
def parse_xspf(data):
|
||||
try:
|
||||
# Last element will be root.
|
||||
for event, element in elementtree.iterparse(data):
|
||||
element.tag = element.tag.lower() # normalize
|
||||
except elementtree.ParseError:
|
||||
return
|
||||
|
||||
ns = 'http://xspf.org/ns/0/'
|
||||
for track in element.iterfind('{%s}tracklist/{%s}track' % (ns, ns)):
|
||||
yield track.findtext('{%s}location' % ns)
|
||||
|
||||
|
||||
def parse_asx(data):
|
||||
try:
|
||||
# Last element will be root.
|
||||
for event, element in elementtree.iterparse(data):
|
||||
element.tag = element.tag.lower() # normalize
|
||||
except elementtree.ParseError:
|
||||
return
|
||||
|
||||
for ref in element.findall('entry/ref[@href]'):
|
||||
yield ref.get('href', '').strip()
|
||||
|
||||
for entry in element.findall('entry[@href]'):
|
||||
yield entry.get('href', '').strip()
|
||||
|
||||
|
||||
def parse_urilist(data):
|
||||
for line in data.readlines():
|
||||
if not line.startswith('#') and gst.uri_is_valid(line.strip()):
|
||||
yield line
|
||||
|
||||
|
||||
def playlist_typefinder(typefind, func, caps):
|
||||
if func(typefind):
|
||||
typefind.suggest(gst.TYPE_FIND_MAXIMUM, caps)
|
||||
|
||||
|
||||
def register_typefind(mimetype, func, extensions):
|
||||
caps = gst.caps_from_string(mimetype)
|
||||
gst.type_find_register(mimetype, gst.RANK_PRIMARY, playlist_typefinder,
|
||||
extensions, caps, func, caps)
|
||||
|
||||
|
||||
def register_typefinders():
|
||||
register_typefind('audio/x-mpegurl', detect_m3u_header, [b'm3u', b'm3u8'])
|
||||
register_typefind('audio/x-scpls', detect_pls_header, [b'pls'])
|
||||
register_typefind('application/xspf+xml', detect_xspf_header, [b'xspf'])
|
||||
# NOTE: seems we can't use video/x-ms-asf which is the correct mime for asx
|
||||
# as it is shared with asf for streaming videos :/
|
||||
register_typefind('audio/x-ms-asx', detect_asx_header, [b'asx'])
|
||||
|
||||
|
||||
class BasePlaylistElement(gst.Bin):
|
||||
|
||||
"""Base class for creating GStreamer elements for playlist support.
|
||||
|
||||
This element performs the following steps:
|
||||
|
||||
1. Initializes src and sink pads for the element.
|
||||
2. Collects data from the sink until EOS is reached.
|
||||
3. Passes the collected data to :meth:`convert` to get a list of URIs.
|
||||
4. Passes the list of URIs to :meth:`handle`, default handling is to pass
|
||||
the URIs to the src element as a uri-list.
|
||||
5. If handle returned true, the EOS consumed and nothing more happens, if
|
||||
it is not consumed it flows on to the next element downstream, which is
|
||||
likely our uri-list consumer which needs the EOS to know we are done
|
||||
sending URIs.
|
||||
"""
|
||||
|
||||
sinkpad_template = None
|
||||
"""GStreamer pad template to use for sink, must be overriden."""
|
||||
|
||||
srcpad_template = None
|
||||
"""GStreamer pad template to use for src, must be overriden."""
|
||||
|
||||
ghost_srcpad = False
|
||||
"""Indicates if src pad should be ghosted or not."""
|
||||
|
||||
def __init__(self):
|
||||
"""Sets up src and sink pads plus behaviour."""
|
||||
super(BasePlaylistElement, self).__init__()
|
||||
self._data = io.BytesIO()
|
||||
self._done = False
|
||||
|
||||
self.sinkpad = gst.Pad(self.sinkpad_template)
|
||||
self.sinkpad.set_chain_function(self._chain)
|
||||
self.sinkpad.set_event_function(self._event)
|
||||
self.add_pad(self.sinkpad)
|
||||
|
||||
if self.ghost_srcpad:
|
||||
self.srcpad = gst.ghost_pad_new_notarget('src', gst.PAD_SRC)
|
||||
else:
|
||||
self.srcpad = gst.Pad(self.srcpad_template)
|
||||
self.add_pad(self.srcpad)
|
||||
|
||||
def convert(self, data):
|
||||
"""Convert the data we have colleted to URIs.
|
||||
|
||||
:param data: collected data buffer
|
||||
:type data: :class:`io.BytesIO`
|
||||
:returns: iterable or generator of URIs
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def handle(self, uris):
|
||||
"""Do something useful with the URIs.
|
||||
|
||||
:param uris: list of URIs
|
||||
:type uris: :type:`list`
|
||||
:returns: boolean indicating if EOS should be consumed
|
||||
"""
|
||||
# TODO: handle unicode uris which we can get out of elementtree
|
||||
self.srcpad.push(gst.Buffer('\n'.join(uris)))
|
||||
return False
|
||||
|
||||
def _chain(self, pad, buf):
|
||||
if not self._done:
|
||||
self._data.write(buf.data)
|
||||
return gst.FLOW_OK
|
||||
return gst.FLOW_EOS
|
||||
|
||||
def _event(self, pad, event):
|
||||
if event.type == gst.EVENT_NEWSEGMENT:
|
||||
return True
|
||||
|
||||
if event.type == gst.EVENT_EOS:
|
||||
self._done = True
|
||||
self._data.seek(0)
|
||||
if self.handle(list(self.convert(self._data))):
|
||||
return True
|
||||
|
||||
# Ensure we handle remaining events in a sane way.
|
||||
return pad.event_default(event)
|
||||
|
||||
|
||||
class M3uDecoder(BasePlaylistElement):
|
||||
__gstdetails__ = ('M3U Decoder',
|
||||
'Decoder',
|
||||
'Convert .m3u to text/uri-list',
|
||||
'Mopidy')
|
||||
|
||||
sinkpad_template = gst.PadTemplate(
|
||||
'sink', gst.PAD_SINK, gst.PAD_ALWAYS,
|
||||
gst.caps_from_string('audio/x-mpegurl'))
|
||||
|
||||
srcpad_template = gst.PadTemplate(
|
||||
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
|
||||
gst.caps_from_string('text/uri-list'))
|
||||
|
||||
__gsttemplates__ = (sinkpad_template, srcpad_template)
|
||||
|
||||
def convert(self, data):
|
||||
return parse_m3u(data)
|
||||
|
||||
|
||||
class PlsDecoder(BasePlaylistElement):
|
||||
__gstdetails__ = ('PLS Decoder',
|
||||
'Decoder',
|
||||
'Convert .pls to text/uri-list',
|
||||
'Mopidy')
|
||||
|
||||
sinkpad_template = gst.PadTemplate(
|
||||
'sink', gst.PAD_SINK, gst.PAD_ALWAYS,
|
||||
gst.caps_from_string('audio/x-scpls'))
|
||||
|
||||
srcpad_template = gst.PadTemplate(
|
||||
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
|
||||
gst.caps_from_string('text/uri-list'))
|
||||
|
||||
__gsttemplates__ = (sinkpad_template, srcpad_template)
|
||||
|
||||
def convert(self, data):
|
||||
return parse_pls(data)
|
||||
|
||||
|
||||
class XspfDecoder(BasePlaylistElement):
|
||||
__gstdetails__ = ('XSPF Decoder',
|
||||
'Decoder',
|
||||
'Convert .pls to text/uri-list',
|
||||
'Mopidy')
|
||||
|
||||
sinkpad_template = gst.PadTemplate(
|
||||
'sink', gst.PAD_SINK, gst.PAD_ALWAYS,
|
||||
gst.caps_from_string('application/xspf+xml'))
|
||||
|
||||
srcpad_template = gst.PadTemplate(
|
||||
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
|
||||
gst.caps_from_string('text/uri-list'))
|
||||
|
||||
__gsttemplates__ = (sinkpad_template, srcpad_template)
|
||||
|
||||
def convert(self, data):
|
||||
return parse_xspf(data)
|
||||
|
||||
|
||||
class AsxDecoder(BasePlaylistElement):
|
||||
__gstdetails__ = ('ASX Decoder',
|
||||
'Decoder',
|
||||
'Convert .asx to text/uri-list',
|
||||
'Mopidy')
|
||||
|
||||
sinkpad_template = gst.PadTemplate(
|
||||
'sink', gst.PAD_SINK, gst.PAD_ALWAYS,
|
||||
gst.caps_from_string('audio/x-ms-asx'))
|
||||
|
||||
srcpad_template = gst.PadTemplate(
|
||||
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
|
||||
gst.caps_from_string('text/uri-list'))
|
||||
|
||||
__gsttemplates__ = (sinkpad_template, srcpad_template)
|
||||
|
||||
def convert(self, data):
|
||||
return parse_asx(data)
|
||||
|
||||
|
||||
class UriListElement(BasePlaylistElement):
|
||||
__gstdetails__ = ('URIListDemuxer',
|
||||
'Demuxer',
|
||||
'Convert a text/uri-list to a stream',
|
||||
'Mopidy')
|
||||
|
||||
sinkpad_template = gst.PadTemplate(
|
||||
'sink', gst.PAD_SINK, gst.PAD_ALWAYS,
|
||||
gst.caps_from_string('text/uri-list'))
|
||||
|
||||
srcpad_template = gst.PadTemplate(
|
||||
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
|
||||
gst.caps_new_any())
|
||||
|
||||
ghost_srcpad = True # We need to hook this up to our internal decodebin
|
||||
|
||||
__gsttemplates__ = (sinkpad_template, srcpad_template)
|
||||
|
||||
def __init__(self):
|
||||
super(UriListElement, self).__init__()
|
||||
self.uridecodebin = gst.element_factory_make('uridecodebin')
|
||||
self.uridecodebin.connect('pad-added', self.pad_added)
|
||||
# Limit to anycaps so we get a single stream out, letting other
|
||||
# elements downstream figure out actual muxing
|
||||
self.uridecodebin.set_property('caps', gst.caps_new_any())
|
||||
|
||||
def pad_added(self, src, pad):
|
||||
self.srcpad.set_target(pad)
|
||||
pad.add_event_probe(self.pad_event)
|
||||
|
||||
def pad_event(self, pad, event):
|
||||
if event.has_name('urilist-played'):
|
||||
error = gst.GError(gst.RESOURCE_ERROR, gst.RESOURCE_ERROR_FAILED,
|
||||
b'Nested playlists not supported.')
|
||||
message = b'Playlists pointing to other playlists is not supported'
|
||||
self.post_message(gst.message_new_error(self, error, message))
|
||||
return 1 # GST_PAD_PROBE_OK
|
||||
|
||||
def handle(self, uris):
|
||||
struct = gst.Structure('urilist-played')
|
||||
event = gst.event_new_custom(gst.EVENT_CUSTOM_UPSTREAM, struct)
|
||||
self.sinkpad.push_event(event)
|
||||
|
||||
# TODO: hookup about to finish and errors to rest of URIs so we
|
||||
# round robin, only giving up once all have been tried.
|
||||
# TODO: uris could be empty.
|
||||
self.add(self.uridecodebin)
|
||||
self.uridecodebin.set_state(gst.STATE_READY)
|
||||
self.uridecodebin.set_property('uri', uris[0])
|
||||
self.uridecodebin.sync_state_with_parent()
|
||||
return True # Make sure we consume the EOS that triggered us.
|
||||
|
||||
def convert(self, data):
|
||||
return parse_urilist(data)
|
||||
|
||||
|
||||
class IcySrc(gst.Bin, gst.URIHandler):
|
||||
__gstdetails__ = ('IcySrc',
|
||||
'Src',
|
||||
'HTTP src wrapper for icy:// support.',
|
||||
'Mopidy')
|
||||
|
||||
srcpad_template = gst.PadTemplate(
|
||||
'src', gst.PAD_SRC, gst.PAD_ALWAYS,
|
||||
gst.caps_new_any())
|
||||
|
||||
__gsttemplates__ = (srcpad_template,)
|
||||
|
||||
def __init__(self):
|
||||
super(IcySrc, self).__init__()
|
||||
self._httpsrc = gst.element_make_from_uri(gst.URI_SRC, 'http://')
|
||||
try:
|
||||
self._httpsrc.set_property('iradio-mode', True)
|
||||
except TypeError:
|
||||
pass
|
||||
self.add(self._httpsrc)
|
||||
|
||||
self._srcpad = gst.GhostPad('src', self._httpsrc.get_pad('src'))
|
||||
self.add_pad(self._srcpad)
|
||||
|
||||
@classmethod
|
||||
def do_get_type_full(cls):
|
||||
return gst.URI_SRC
|
||||
|
||||
@classmethod
|
||||
def do_get_protocols_full(cls):
|
||||
return [b'icy', b'icyx']
|
||||
|
||||
def do_set_uri(self, uri):
|
||||
if uri.startswith('icy://'):
|
||||
return self._httpsrc.set_uri(b'http://' + uri[len('icy://'):])
|
||||
elif uri.startswith('icyx://'):
|
||||
return self._httpsrc.set_uri(b'https://' + uri[len('icyx://'):])
|
||||
else:
|
||||
return False
|
||||
|
||||
def do_get_uri(self):
|
||||
uri = self._httpsrc.get_uri()
|
||||
if uri.startswith('http://'):
|
||||
return b'icy://' + uri[len('http://'):]
|
||||
else:
|
||||
return b'icyx://' + uri[len('https://'):]
|
||||
|
||||
|
||||
def register_element(element_class):
|
||||
gobject.type_register(element_class)
|
||||
gst.element_register(
|
||||
element_class, element_class.__name__.lower(), gst.RANK_MARGINAL)
|
||||
|
||||
|
||||
def register_elements():
|
||||
register_element(M3uDecoder)
|
||||
register_element(PlsDecoder)
|
||||
register_element(XspfDecoder)
|
||||
register_element(AsxDecoder)
|
||||
register_element(UriListElement)
|
||||
|
||||
# Only register icy if gst install can't handle it on it's own.
|
||||
if not gst.element_make_from_uri(gst.URI_SRC, 'icy://'):
|
||||
register_element(IcySrc)
|
||||
@ -12,8 +12,6 @@ from mopidy import exceptions
|
||||
from mopidy.audio import utils
|
||||
from mopidy.internal import encoding
|
||||
|
||||
_missing_plugin_desc = gst.pbutils.missing_plugin_message_get_description
|
||||
|
||||
_Result = collections.namedtuple(
|
||||
'Result', ('uri', 'tags', 'duration', 'seekable', 'mime', 'playable'))
|
||||
|
||||
@ -134,12 +132,12 @@ def _process(pipeline, timeout_ms):
|
||||
clock = pipeline.get_clock()
|
||||
bus = pipeline.get_bus()
|
||||
timeout = timeout_ms * gst.MSECOND
|
||||
tags, mime, have_audio, missing_description = {}, None, False, None
|
||||
tags, mime, have_audio, missing_message = {}, None, False, None
|
||||
|
||||
types = (gst.MESSAGE_ELEMENT | gst.MESSAGE_APPLICATION | gst.MESSAGE_ERROR
|
||||
| gst.MESSAGE_EOS | gst.MESSAGE_ASYNC_DONE | gst.MESSAGE_TAG)
|
||||
|
||||
start = clock.get_time()
|
||||
previous = clock.get_time()
|
||||
while timeout > 0:
|
||||
message = bus.timed_pop_filtered(timeout, types)
|
||||
|
||||
@ -147,8 +145,7 @@ def _process(pipeline, timeout_ms):
|
||||
break
|
||||
elif message.type == gst.MESSAGE_ELEMENT:
|
||||
if gst.pbutils.is_missing_plugin_message(message):
|
||||
missing_description = encoding.locale_decode(
|
||||
_missing_plugin_desc(message))
|
||||
missing_message = message
|
||||
elif message.type == gst.MESSAGE_APPLICATION:
|
||||
if message.structure.get_name() == 'have-type':
|
||||
mime = message.structure['caps'].get_name()
|
||||
@ -158,8 +155,10 @@ def _process(pipeline, timeout_ms):
|
||||
have_audio = True
|
||||
elif message.type == gst.MESSAGE_ERROR:
|
||||
error = encoding.locale_decode(message.parse_error()[0])
|
||||
if missing_description:
|
||||
error = '%s (%s)' % (missing_description, error)
|
||||
if missing_message and not mime:
|
||||
caps = missing_message.structure['detail']
|
||||
mime = caps.get_structure(0).get_name()
|
||||
return tags, mime, have_audio
|
||||
raise exceptions.ScannerError(error)
|
||||
elif message.type == gst.MESSAGE_EOS:
|
||||
return tags, mime, have_audio
|
||||
@ -171,7 +170,9 @@ def _process(pipeline, timeout_ms):
|
||||
# Note that this will only keep the last tag.
|
||||
tags.update(utils.convert_taglist(taglist))
|
||||
|
||||
timeout -= clock.get_time() - start
|
||||
now = clock.get_time()
|
||||
timeout -= now - previous
|
||||
previous = now
|
||||
|
||||
raise exceptions.ScannerError('Timeout after %dms' % timeout_ms)
|
||||
|
||||
|
||||
@ -65,15 +65,21 @@ def supported_uri_schemes(uri_schemes):
|
||||
return supported_schemes
|
||||
|
||||
|
||||
def _artists(tags, artist_name, artist_id=None):
|
||||
def _artists(tags, artist_name, artist_id=None, artist_sortname=None):
|
||||
# Name missing, don't set artist
|
||||
if not tags.get(artist_name):
|
||||
return None
|
||||
# One artist name and id, provide artist with id.
|
||||
if len(tags[artist_name]) == 1 and artist_id in tags:
|
||||
return [Artist(name=tags[artist_name][0],
|
||||
musicbrainz_id=tags[artist_id][0])]
|
||||
# Multiple artist, provide artists without id.
|
||||
# One artist name and either id or sortname, include all available fields
|
||||
if len(tags[artist_name]) == 1 and \
|
||||
(artist_id in tags or artist_sortname in tags):
|
||||
attrs = {'name': tags[artist_name][0]}
|
||||
if artist_id in tags:
|
||||
attrs['musicbrainz_id'] = tags[artist_id][0]
|
||||
if artist_sortname in tags:
|
||||
attrs['sortname'] = tags[artist_sortname][0]
|
||||
return [Artist(**attrs)]
|
||||
|
||||
# Multiple artist, provide artists with name only to avoid ambiguity.
|
||||
return [Artist(name=name) for name in tags[artist_name]]
|
||||
|
||||
|
||||
@ -91,8 +97,9 @@ def convert_tags_to_track(tags):
|
||||
|
||||
track_kwargs['composers'] = _artists(tags, gst.TAG_COMPOSER)
|
||||
track_kwargs['performers'] = _artists(tags, gst.TAG_PERFORMER)
|
||||
track_kwargs['artists'] = _artists(
|
||||
tags, gst.TAG_ARTIST, 'musicbrainz-artistid')
|
||||
track_kwargs['artists'] = _artists(tags, gst.TAG_ARTIST,
|
||||
'musicbrainz-artistid',
|
||||
'musicbrainz-sortname')
|
||||
album_kwargs['artists'] = _artists(
|
||||
tags, gst.TAG_ALBUM_ARTIST, 'musicbrainz-albumartistid')
|
||||
|
||||
|
||||
@ -1,8 +1,13 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from mopidy import listener, models
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Backend(object):
|
||||
|
||||
"""Backend API
|
||||
@ -238,6 +243,9 @@ class PlaybackProvider(object):
|
||||
:rtype: :class:`True` if successful, else :class:`False`
|
||||
"""
|
||||
uri = self.translate_uri(track.uri)
|
||||
if uri != track.uri:
|
||||
logger.debug(
|
||||
'Backend translated URI from %s to %s', track.uri, uri)
|
||||
if not uri:
|
||||
return False
|
||||
self.audio.set_uri(uri).get()
|
||||
|
||||
@ -16,6 +16,9 @@ from mopidy.internal import path, versioning
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_core_schema = ConfigSchema('core')
|
||||
_core_schema['cache_dir'] = Path()
|
||||
_core_schema['config_dir'] = Path()
|
||||
_core_schema['data_dir'] = Path()
|
||||
# MPD supports at most 10k tracks, some clients segfault when this is exceeded.
|
||||
_core_schema['max_tracklist_length'] = Integer(minimum=1, maximum=10000)
|
||||
|
||||
@ -87,20 +90,23 @@ def format(config, ext_schemas, comments=None, display=True):
|
||||
return _format(config, comments or {}, schemas, display, False)
|
||||
|
||||
|
||||
def format_initial(extensions):
|
||||
def format_initial(extensions_data):
|
||||
config_dir = os.path.dirname(__file__)
|
||||
defaults = [read(os.path.join(config_dir, 'default.conf'))]
|
||||
defaults.extend(e.get_default_config() for e in extensions)
|
||||
defaults.extend(d.extension.get_default_config() for d in extensions_data)
|
||||
raw_config = _load([], defaults, [])
|
||||
|
||||
schemas = _schemas[:]
|
||||
schemas.extend(e.get_config_schema() for e in extensions)
|
||||
schemas.extend(d.extension.get_config_schema() for d in extensions_data)
|
||||
|
||||
config, errors = _validate(raw_config, schemas)
|
||||
|
||||
versions = ['Mopidy %s' % versioning.get_version()]
|
||||
for extension in sorted(extensions, key=lambda ext: ext.dist_name):
|
||||
versions.append('%s %s' % (extension.dist_name, extension.version))
|
||||
extensions_data = sorted(
|
||||
extensions_data, key=lambda d: d.extension.dist_name)
|
||||
for data in extensions_data:
|
||||
versions.append('%s %s' % (
|
||||
data.extension.dist_name, data.extension.version))
|
||||
|
||||
header = _INITIAL_HELP.strip() % {'versions': '\n# '.join(versions)}
|
||||
formatted_config = _format(
|
||||
|
||||
@ -1,4 +1,7 @@
|
||||
[core]
|
||||
cache_dir = $XDG_CACHE_DIR/mopidy
|
||||
config_dir = $XDG_CONFIG_DIR/mopidy
|
||||
data_dir = $XDG_DATA_DIR/mopidy
|
||||
max_tracklist_length = 10000
|
||||
|
||||
[logging]
|
||||
|
||||
@ -162,7 +162,7 @@ class Backends(list):
|
||||
|
||||
for scheme in b.uri_schemes.get():
|
||||
assert scheme not in backends_by_scheme, (
|
||||
'Cannot add URI scheme %s for %s, '
|
||||
'Cannot add URI scheme "%s" for %s, '
|
||||
'it is already handled by %s'
|
||||
) % (scheme, name(b), name(backends_by_scheme[scheme]))
|
||||
backends_by_scheme[scheme] = b
|
||||
|
||||
@ -255,7 +255,7 @@ class LibraryController(object):
|
||||
backends = {}
|
||||
uri_scheme = urlparse.urlparse(uri).scheme if uri else None
|
||||
|
||||
for backend_scheme, backend in self.backends.with_playlists.items():
|
||||
for backend_scheme, backend in self.backends.with_library.items():
|
||||
backends.setdefault(backend, set()).add(backend_scheme)
|
||||
|
||||
for backend, backend_schemes in backends.items():
|
||||
|
||||
@ -123,6 +123,17 @@ class CoreListener(listener.Listener):
|
||||
"""
|
||||
pass
|
||||
|
||||
def playlist_deleted(self, uri):
|
||||
"""
|
||||
Called whenever a playlist is deleted.
|
||||
|
||||
*MAY* be implemented by actor.
|
||||
|
||||
:param uri: the URI of the deleted playlist
|
||||
:type uri: string
|
||||
"""
|
||||
pass
|
||||
|
||||
def options_changed(self):
|
||||
"""
|
||||
Called whenever an option is changed.
|
||||
|
||||
@ -345,8 +345,11 @@ class PlaybackController(object):
|
||||
backend.playback.change_track(tl_track.track).get() and
|
||||
backend.playback.play().get())
|
||||
except TypeError:
|
||||
logger.error('%s needs to be updated to work with this '
|
||||
'version of Mopidy.', backend)
|
||||
logger.error(
|
||||
'%s needs to be updated to work with this '
|
||||
'version of Mopidy.',
|
||||
backend.actor_ref.actor_class.__name__)
|
||||
logger.debug('Backend exception', exc_info=True)
|
||||
|
||||
if success:
|
||||
self.core.tracklist._mark_playing(tl_track)
|
||||
|
||||
@ -178,11 +178,12 @@ class PlaylistsController(object):
|
||||
uri_scheme = urlparse.urlparse(uri).scheme
|
||||
backend = self.backends.with_playlists.get(uri_scheme, None)
|
||||
if not backend:
|
||||
return
|
||||
return None # TODO: error reporting to user
|
||||
|
||||
with _backend_error_handling(backend):
|
||||
backend.playlists.delete(uri).get()
|
||||
# TODO: emit playlist changed?
|
||||
# TODO: error detection and reporting to user
|
||||
listener.CoreListener.send('playlist_deleted', uri=uri)
|
||||
|
||||
# TODO: return value?
|
||||
|
||||
|
||||
@ -236,7 +236,7 @@ class TracklistController(object):
|
||||
|
||||
def get_eot_tlid(self):
|
||||
"""
|
||||
The TLID of the track that will be played after the given track.
|
||||
The TLID of the track that will be played after the current track.
|
||||
|
||||
Not necessarily the same TLID as returned by :meth:`get_next_tlid`.
|
||||
|
||||
@ -554,7 +554,7 @@ class TracklistController(object):
|
||||
:rtype: list of :class:`mopidy.models.TlTrack` that was removed
|
||||
|
||||
.. deprecated:: 1.1
|
||||
Providing the criteria via ``kwargs`` is no longer supported.
|
||||
Providing the criteria via ``kwargs``.
|
||||
"""
|
||||
if kwargs:
|
||||
deprecation.warn('core.tracklist.remove:kwargs_criteria')
|
||||
|
||||
@ -2,10 +2,12 @@ from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from mopidy import config as config_lib, exceptions
|
||||
from mopidy.internal import path
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -58,6 +60,46 @@ class Extension(object):
|
||||
schema['enabled'] = config_lib.Boolean()
|
||||
return schema
|
||||
|
||||
def get_cache_dir(self, config):
|
||||
"""Get or create cache directory for the extension.
|
||||
|
||||
Use this directory to cache data that can safely be thrown away.
|
||||
|
||||
:param config: the Mopidy config object
|
||||
:return: string
|
||||
"""
|
||||
assert self.ext_name is not None
|
||||
cache_dir_path = bytes(os.path.join(config['core']['cache_dir'],
|
||||
self.ext_name))
|
||||
path.get_or_create_dir(cache_dir_path)
|
||||
return cache_dir_path
|
||||
|
||||
def get_config_dir(self, config):
|
||||
"""Get or create configuration directory for the extension.
|
||||
|
||||
:param config: the Mopidy config object
|
||||
:return: string
|
||||
"""
|
||||
assert self.ext_name is not None
|
||||
config_dir_path = bytes(os.path.join(config['core']['config_dir'],
|
||||
self.ext_name))
|
||||
path.get_or_create_dir(config_dir_path)
|
||||
return config_dir_path
|
||||
|
||||
def get_data_dir(self, config):
|
||||
"""Get or create data directory for the extension.
|
||||
|
||||
Use this directory to store data that should be persistent.
|
||||
|
||||
:param config: the Mopidy config object
|
||||
:returns: string
|
||||
"""
|
||||
assert self.ext_name is not None
|
||||
data_dir_path = bytes(os.path.join(config['core']['data_dir'],
|
||||
self.ext_name))
|
||||
path.get_or_create_dir(data_dir_path)
|
||||
return data_dir_path
|
||||
|
||||
def get_command(self):
|
||||
"""Command to expose to command line users running ``mopidy``.
|
||||
|
||||
|
||||
@ -71,7 +71,7 @@ class FileLibraryProvider(backend.LibraryProvider):
|
||||
name = dir_entry.decode(FS_ENCODING, 'replace')
|
||||
if os.path.isdir(child_path):
|
||||
result.append(models.Ref.directory(name=name, uri=uri))
|
||||
elif os.path.isfile(child_path) and self._is_audio_file(uri):
|
||||
elif os.path.isfile(child_path):
|
||||
result.append(models.Ref.track(name=name, uri=uri))
|
||||
|
||||
result.sort(key=operator.attrgetter('name'))
|
||||
@ -81,10 +81,6 @@ class FileLibraryProvider(backend.LibraryProvider):
|
||||
logger.debug('Looking up file URI: %s', uri)
|
||||
local_path = path.uri_to_path(uri)
|
||||
|
||||
if not self._is_in_basedir(local_path):
|
||||
logger.warning('Ignoring URI outside base dir: %s', local_path)
|
||||
return []
|
||||
|
||||
try:
|
||||
result = self._scanner.scan(uri)
|
||||
track = utils.convert_tags_to_track(result.tags).copy(
|
||||
@ -108,12 +104,15 @@ class FileLibraryProvider(backend.LibraryProvider):
|
||||
media_dir_split[0].encode(FS_ENCODING))
|
||||
|
||||
if not local_path:
|
||||
logger.warning('Failed expanding path (%s) from'
|
||||
'file/media_dirs config value.',
|
||||
logger.debug(
|
||||
'Failed expanding path (%s) from file/media_dirs config '
|
||||
'value.',
|
||||
media_dir_split[0])
|
||||
continue
|
||||
elif not os.path.isdir(local_path):
|
||||
logger.warning('%s is not a directory', local_path)
|
||||
logger.warning(
|
||||
'%s is not a directory. Please create the directory or '
|
||||
'update the file/media_dirs config value.', local_path)
|
||||
continue
|
||||
|
||||
media_dir['path'] = local_path
|
||||
@ -131,18 +130,6 @@ class FileLibraryProvider(backend.LibraryProvider):
|
||||
name=media_dir['name'],
|
||||
uri=path.path_to_uri(media_dir['path']))
|
||||
|
||||
def _is_audio_file(self, uri):
|
||||
try:
|
||||
result = self._scanner.scan(uri)
|
||||
if result.playable:
|
||||
logger.debug('Playable file: %s', result.uri)
|
||||
else:
|
||||
logger.debug('Unplayable file: %s (not audio)', result.uri)
|
||||
return result.playable
|
||||
except exceptions.ScannerError as e:
|
||||
logger.debug('Unplayable file: %s (%s)', uri, e)
|
||||
return False
|
||||
|
||||
def _is_in_basedir(self, local_path):
|
||||
return any(
|
||||
path.is_path_inside_base_dir(local_path, media_dir['path'])
|
||||
|
||||
16
mopidy/internal/http.py
Normal file
16
mopidy/internal/http.py
Normal file
@ -0,0 +1,16 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import requests
|
||||
|
||||
from mopidy import httpclient
|
||||
|
||||
|
||||
def get_requests_session(proxy_config, user_agent):
|
||||
proxy = httpclient.format_proxy(proxy_config)
|
||||
full_user_agent = httpclient.format_user_agent(user_agent)
|
||||
|
||||
session = requests.Session()
|
||||
session.proxies.update({'http': proxy, 'https': proxy})
|
||||
session.headers.update({'user-agent': full_user_agent})
|
||||
|
||||
return session
|
||||
@ -192,7 +192,8 @@ def _find(root, thread_count=10, relative=False, follow=False):
|
||||
|
||||
def find_mtimes(root, follow=False):
|
||||
results, errors = _find(root, relative=False, follow=follow)
|
||||
mtimes = dict((f, int(st.st_mtime * 1000)) for f, st in results.items())
|
||||
# return the mtimes as integer milliseconds
|
||||
mtimes = {f: int(st.st_mtime * 1000) for f, st in results.items()}
|
||||
return mtimes, errors
|
||||
|
||||
|
||||
|
||||
132
mopidy/internal/playlists.py
Normal file
132
mopidy/internal/playlists.py
Normal file
@ -0,0 +1,132 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import io
|
||||
|
||||
import pygst
|
||||
pygst.require('0.10')
|
||||
import gst # noqa
|
||||
|
||||
from mopidy.compat import configparser
|
||||
from mopidy.internal import validation
|
||||
|
||||
try:
|
||||
import xml.etree.cElementTree as elementtree
|
||||
except ImportError:
|
||||
import xml.etree.ElementTree as elementtree
|
||||
|
||||
|
||||
def parse(data):
|
||||
handlers = {
|
||||
detect_extm3u_header: parse_extm3u,
|
||||
detect_pls_header: parse_pls,
|
||||
detect_asx_header: parse_asx,
|
||||
detect_xspf_header: parse_xspf,
|
||||
}
|
||||
for detector, parser in handlers.items():
|
||||
if detector(data):
|
||||
return list(parser(data))
|
||||
return parse_urilist(data) # Fallback
|
||||
|
||||
|
||||
def detect_extm3u_header(data):
|
||||
return data[0:7].upper() == b'#EXTM3U'
|
||||
|
||||
|
||||
def detect_pls_header(data):
|
||||
return data[0:10].lower() == b'[playlist]'
|
||||
|
||||
|
||||
def detect_xspf_header(data):
|
||||
data = data[0:150]
|
||||
if b'xspf' not in data.lower():
|
||||
return False
|
||||
|
||||
try:
|
||||
data = io.BytesIO(data)
|
||||
for event, element in elementtree.iterparse(data, events=(b'start',)):
|
||||
return element.tag.lower() == '{http://xspf.org/ns/0/}playlist'
|
||||
except elementtree.ParseError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def detect_asx_header(data):
|
||||
data = data[0:50]
|
||||
if b'asx' not in data.lower():
|
||||
return False
|
||||
|
||||
try:
|
||||
data = io.BytesIO(data)
|
||||
for event, element in elementtree.iterparse(data, events=(b'start',)):
|
||||
return element.tag.lower() == 'asx'
|
||||
except elementtree.ParseError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def parse_extm3u(data):
|
||||
# TODO: convert non URIs to file URIs.
|
||||
found_header = False
|
||||
for line in data.splitlines():
|
||||
if found_header or line.startswith(b'#EXTM3U'):
|
||||
found_header = True
|
||||
else:
|
||||
continue
|
||||
if not line.startswith(b'#') and line.strip():
|
||||
yield line.strip()
|
||||
|
||||
|
||||
def parse_pls(data):
|
||||
# TODO: convert non URIs to file URIs.
|
||||
try:
|
||||
cp = configparser.RawConfigParser()
|
||||
cp.readfp(io.BytesIO(data))
|
||||
except configparser.Error:
|
||||
return
|
||||
|
||||
for section in cp.sections():
|
||||
if section.lower() != 'playlist':
|
||||
continue
|
||||
for i in range(cp.getint(section, 'numberofentries')):
|
||||
yield cp.get(section, 'file%d' % (i + 1))
|
||||
|
||||
|
||||
def parse_xspf(data):
|
||||
try:
|
||||
# Last element will be root.
|
||||
for event, element in elementtree.iterparse(io.BytesIO(data)):
|
||||
element.tag = element.tag.lower() # normalize
|
||||
except elementtree.ParseError:
|
||||
return
|
||||
|
||||
ns = 'http://xspf.org/ns/0/'
|
||||
for track in element.iterfind('{%s}tracklist/{%s}track' % (ns, ns)):
|
||||
yield track.findtext('{%s}location' % ns)
|
||||
|
||||
|
||||
def parse_asx(data):
|
||||
try:
|
||||
# Last element will be root.
|
||||
for event, element in elementtree.iterparse(io.BytesIO(data)):
|
||||
element.tag = element.tag.lower() # normalize
|
||||
except elementtree.ParseError:
|
||||
return
|
||||
|
||||
for ref in element.findall('entry/ref[@href]'):
|
||||
yield ref.get('href', '').strip()
|
||||
|
||||
for entry in element.findall('entry[@href]'):
|
||||
yield entry.get('href', '').strip()
|
||||
|
||||
|
||||
def parse_urilist(data):
|
||||
result = []
|
||||
for line in data.splitlines():
|
||||
if not line.strip() or line.startswith(b'#'):
|
||||
continue
|
||||
try:
|
||||
validation.check_uri(line)
|
||||
except ValueError:
|
||||
return []
|
||||
result.append(line)
|
||||
return result
|
||||
@ -23,7 +23,7 @@ class Extension(ext.Extension):
|
||||
schema = super(Extension, self).get_config_schema()
|
||||
schema['library'] = config.String()
|
||||
schema['media_dir'] = config.Path()
|
||||
schema['data_dir'] = config.Path()
|
||||
schema['data_dir'] = config.Deprecated()
|
||||
schema['playlists_dir'] = config.Deprecated()
|
||||
schema['tag_cache_file'] = config.Deprecated()
|
||||
schema['scan_timeout'] = config.Integer(
|
||||
|
||||
@ -2,9 +2,8 @@
|
||||
enabled = true
|
||||
library = json
|
||||
media_dir = $XDG_MUSIC_DIR
|
||||
data_dir = $XDG_DATA_DIR/mopidy/local
|
||||
scan_timeout = 1000
|
||||
scan_flush_threshold = 1000
|
||||
scan_flush_threshold = 100
|
||||
scan_follow_symlinks = false
|
||||
excluded_file_extensions =
|
||||
.directory
|
||||
|
||||
@ -12,7 +12,7 @@ import tempfile
|
||||
import mopidy
|
||||
from mopidy import compat, local, models
|
||||
from mopidy.internal import encoding, timer
|
||||
from mopidy.local import search, storage, translator
|
||||
from mopidy.local import Extension, search, storage, translator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -116,7 +116,7 @@ class JsonLibrary(local.Library):
|
||||
self._browse_cache = None
|
||||
self._media_dir = config['local']['media_dir']
|
||||
self._json_file = os.path.join(
|
||||
config['local']['data_dir'], b'library.json.gz')
|
||||
Extension().get_data_dir(config), b'library.json.gz')
|
||||
|
||||
storage.check_dirs_and_files(config)
|
||||
|
||||
|
||||
@ -3,8 +3,6 @@ from __future__ import absolute_import, unicode_literals
|
||||
import logging
|
||||
import os
|
||||
|
||||
from mopidy.internal import encoding, path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -13,10 +11,3 @@ def check_dirs_and_files(config):
|
||||
logger.warning(
|
||||
'Local media dir %s does not exist.' %
|
||||
config['local']['media_dir'])
|
||||
|
||||
try:
|
||||
path.get_or_create_dir(config['local']['data_dir'])
|
||||
except EnvironmentError as error:
|
||||
logger.warning(
|
||||
'Could not create local data dir: %s',
|
||||
encoding.locale_decode(error))
|
||||
|
||||
@ -21,7 +21,7 @@ class Extension(ext.Extension):
|
||||
|
||||
def get_config_schema(self):
|
||||
schema = super(Extension, self).get_config_schema()
|
||||
schema['playlists_dir'] = config.Path()
|
||||
schema['playlists_dir'] = config.Path(optional=True)
|
||||
return schema
|
||||
|
||||
def setup(self, registry):
|
||||
|
||||
@ -4,7 +4,7 @@ import logging
|
||||
|
||||
import pykka
|
||||
|
||||
from mopidy import backend
|
||||
from mopidy import backend, m3u
|
||||
from mopidy.internal import encoding, path
|
||||
from mopidy.m3u.library import M3ULibraryProvider
|
||||
from mopidy.m3u.playlists import M3UPlaylistsProvider
|
||||
@ -21,12 +21,16 @@ class M3UBackend(pykka.ThreadingActor, backend.Backend):
|
||||
|
||||
self._config = config
|
||||
|
||||
if config['m3u']['playlists_dir'] is not None:
|
||||
self._playlists_dir = config['m3u']['playlists_dir']
|
||||
try:
|
||||
path.get_or_create_dir(config['m3u']['playlists_dir'])
|
||||
path.get_or_create_dir(self._playlists_dir)
|
||||
except EnvironmentError as error:
|
||||
logger.warning(
|
||||
'Could not create M3U playlists dir: %s',
|
||||
encoding.locale_decode(error))
|
||||
else:
|
||||
self._playlists_dir = m3u.Extension().get_data_dir(config)
|
||||
|
||||
self.playlists = M3UPlaylistsProvider(backend=self)
|
||||
self.library = M3ULibraryProvider(backend=self)
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
[m3u]
|
||||
enabled = true
|
||||
playlists_dir = $XDG_DATA_DIR/mopidy/m3u
|
||||
playlists_dir =
|
||||
|
||||
@ -23,7 +23,7 @@ class M3UPlaylistsProvider(backend.PlaylistsProvider):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(M3UPlaylistsProvider, self).__init__(*args, **kwargs)
|
||||
|
||||
self._playlists_dir = self.backend._config['m3u']['playlists_dir']
|
||||
self._playlists_dir = self.backend._playlists_dir
|
||||
self._playlists = {}
|
||||
self.refresh()
|
||||
|
||||
@ -54,6 +54,7 @@ class M3UPlaylistsProvider(backend.PlaylistsProvider):
|
||||
logger.warning(
|
||||
'Trying to delete missing playlist file %s', path)
|
||||
del self._playlists[uri]
|
||||
logger.info('Deleted playlist %s', uri)
|
||||
else:
|
||||
logger.warning('Trying to delete unknown playlist %s', uri)
|
||||
|
||||
@ -64,7 +65,7 @@ class M3UPlaylistsProvider(backend.PlaylistsProvider):
|
||||
playlists = {}
|
||||
|
||||
encoding = sys.getfilesystemencoding()
|
||||
for path in glob.glob(os.path.join(self._playlists_dir, b'*.m3u')):
|
||||
for path in glob.glob(os.path.join(self._playlists_dir, b'*.m3u*')):
|
||||
relpath = os.path.basename(path)
|
||||
uri = translator.path_to_playlist_uri(relpath)
|
||||
name = os.path.splitext(relpath)[0].decode(encoding, 'replace')
|
||||
|
||||
@ -73,11 +73,14 @@ def parse_m3u(file_path, media_dir=None):
|
||||
- Lines starting with # are ignored, except for extended M3U directives.
|
||||
- Track.name and Track.length are set from extended M3U directives.
|
||||
- m3u files are latin-1.
|
||||
- m3u8 files are utf-8
|
||||
"""
|
||||
# TODO: uris as bytes
|
||||
file_encoding = 'utf-8' if file_path.endswith(b'.m3u8') else 'latin1'
|
||||
|
||||
tracks = []
|
||||
try:
|
||||
with open(file_path) as m3u:
|
||||
with codecs.open(file_path, 'rb', file_encoding, 'replace') as m3u:
|
||||
contents = m3u.readlines()
|
||||
except IOError as error:
|
||||
logger.warning('Couldn\'t open m3u: %s', encoding.locale_decode(error))
|
||||
@ -86,12 +89,13 @@ def parse_m3u(file_path, media_dir=None):
|
||||
if not contents:
|
||||
return tracks
|
||||
|
||||
extended = contents[0].decode('latin1').startswith('#EXTM3U')
|
||||
# Strip newlines left by codecs
|
||||
contents = [line.strip() for line in contents]
|
||||
|
||||
extended = contents[0].startswith('#EXTM3U')
|
||||
|
||||
track = Track()
|
||||
for line in contents:
|
||||
line = line.strip().decode('latin1')
|
||||
|
||||
if line.startswith('#'):
|
||||
if extended and line.startswith('#EXTINF'):
|
||||
track = m3u_extinf_to_track(line)
|
||||
|
||||
@ -107,6 +107,8 @@ class Artist(ValidatedImmutableObject):
|
||||
:type uri: string
|
||||
:param name: artist name
|
||||
:type name: string
|
||||
:param sortname: artist name for sorting
|
||||
:type sortname: string
|
||||
:param musicbrainz_id: MusicBrainz ID
|
||||
:type musicbrainz_id: string
|
||||
"""
|
||||
@ -117,6 +119,9 @@ class Artist(ValidatedImmutableObject):
|
||||
#: The artist name. Read-only.
|
||||
name = fields.String()
|
||||
|
||||
#: Artist name for better sorting, e.g. with articles stripped
|
||||
sortname = fields.String()
|
||||
|
||||
#: The MusicBrainz ID of the artist. Read-only.
|
||||
musicbrainz_id = fields.Identifier()
|
||||
|
||||
|
||||
@ -271,9 +271,9 @@ class MpdContext(object):
|
||||
|
||||
If ``lookup`` is true and the ``path`` is to a track, the returned
|
||||
``data`` is a future which will contain the results from looking up
|
||||
the URI with :meth:`mopidy.core.LibraryController.lookup` If ``lookup``
|
||||
is false and the ``path`` is to a track, the returned ``data`` will be
|
||||
a :class:`mopidy.models.Ref` for the track.
|
||||
the URI with :meth:`mopidy.core.LibraryController.lookup`. If
|
||||
``lookup`` is false and the ``path`` is to a track, the returned
|
||||
``data`` will be a :class:`mopidy.models.Ref` for the track.
|
||||
|
||||
For all entries that are not tracks, the returned ``data`` will be
|
||||
:class:`None`.
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from mopidy.mpd import exceptions, protocol
|
||||
from mopidy.mpd.protocol import tagtype_list
|
||||
|
||||
|
||||
@protocol.commands.add('config', list_command=False)
|
||||
@ -93,7 +94,9 @@ def tagtypes(context):
|
||||
|
||||
Shows a list of available song metadata.
|
||||
"""
|
||||
pass # TODO
|
||||
return [
|
||||
('tagtype', tagtype) for tagtype in tagtype_list.TAGTYPE_LIST
|
||||
]
|
||||
|
||||
|
||||
@protocol.commands.add('urlhandlers')
|
||||
|
||||
@ -75,29 +75,29 @@ def listplaylists(context):
|
||||
- ncmpcpp 0.5.10 segfaults if we return 'playlist: ' on a line, so we must
|
||||
ignore playlists without names, which isn't very useful anyway.
|
||||
"""
|
||||
last_modified = _get_last_modified()
|
||||
result = []
|
||||
for playlist in context.core.playlists.get_playlists().get():
|
||||
if not playlist.name:
|
||||
for playlist_ref in context.core.playlists.as_list().get():
|
||||
if not playlist_ref.name:
|
||||
continue
|
||||
name = context.lookup_playlist_name_from_uri(playlist.uri)
|
||||
name = context.lookup_playlist_name_from_uri(playlist_ref.uri)
|
||||
result.append(('playlist', name))
|
||||
result.append(('Last-Modified', _get_last_modified(playlist)))
|
||||
result.append(('Last-Modified', last_modified))
|
||||
return result
|
||||
|
||||
|
||||
# TODO: move to translators?
|
||||
def _get_last_modified(playlist):
|
||||
def _get_last_modified(last_modified=None):
|
||||
"""Formats last modified timestamp of a playlist for MPD.
|
||||
|
||||
Time in UTC with second precision, formatted in the ISO 8601 format, with
|
||||
the "Z" time zone marker for UTC. For example, "1970-01-01T00:00:00Z".
|
||||
"""
|
||||
if playlist.last_modified is None:
|
||||
if last_modified is None:
|
||||
# If unknown, assume the playlist is modified
|
||||
dt = datetime.datetime.utcnow()
|
||||
else:
|
||||
dt = datetime.datetime.utcfromtimestamp(
|
||||
playlist.last_modified / 1000.0)
|
||||
dt = datetime.datetime.utcfromtimestamp(last_modified / 1000.0)
|
||||
dt = dt.replace(microsecond=0)
|
||||
return '%sZ' % dt.isoformat()
|
||||
|
||||
|
||||
24
mopidy/mpd/protocol/tagtype_list.py
Normal file
24
mopidy/mpd/protocol/tagtype_list.py
Normal file
@ -0,0 +1,24 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
TAGTYPE_LIST = [
|
||||
'Artist',
|
||||
'ArtistSort',
|
||||
'Album',
|
||||
'AlbumArtist',
|
||||
'AlbumArtistSort',
|
||||
'Title',
|
||||
'Track',
|
||||
'Name',
|
||||
'Genre',
|
||||
'Date',
|
||||
'Composer',
|
||||
'Performer',
|
||||
'Disc',
|
||||
'MUSICBRAINZ_ARTISTID',
|
||||
'MUSICBRAINZ_ALBUMID',
|
||||
'MUSICBRAINZ_ALBUMARTISTID',
|
||||
'MUSICBRAINZ_TRACKID',
|
||||
'X-AlbumUri',
|
||||
'X-AlbumImage',
|
||||
]
|
||||
@ -4,6 +4,7 @@ import datetime
|
||||
import re
|
||||
|
||||
from mopidy.models import TlTrack
|
||||
from mopidy.mpd.protocol import tagtype_list
|
||||
|
||||
# TODO: special handling of local:// uri scheme
|
||||
normalize_path_re = re.compile(r'[^/]+')
|
||||
@ -35,8 +36,6 @@ def track_to_mpd_format(track, position=None, stream_title=None):
|
||||
|
||||
result = [
|
||||
('file', track.uri or ''),
|
||||
# TODO: only show length if not none, see:
|
||||
# https://github.com/mopidy/mopidy/issues/923#issuecomment-79584110
|
||||
('Time', track.length and (track.length // 1000) or 0),
|
||||
('Artist', concat_multi_values(track.artists, 'name')),
|
||||
('Album', track.album and track.album.name or ''),
|
||||
@ -97,9 +96,32 @@ def track_to_mpd_format(track, position=None, stream_title=None):
|
||||
|
||||
if track.musicbrainz_id is not None:
|
||||
result.append(('MUSICBRAINZ_TRACKID', track.musicbrainz_id))
|
||||
|
||||
if track.album and track.album.uri:
|
||||
result.append(('X-AlbumUri', track.album.uri))
|
||||
if track.album and track.album.images:
|
||||
images = ';'.join(i for i in track.album.images if i is not '')
|
||||
result.append(('X-AlbumImage', images))
|
||||
|
||||
result = [element for element in result if _has_value(*element)]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _has_value(tagtype, value):
|
||||
"""
|
||||
Determine whether to add the tagtype to the output or not.
|
||||
|
||||
:param tagtype: the MPD tagtype
|
||||
:type tagtype: string
|
||||
:param value: the tag value
|
||||
:rtype: bool
|
||||
"""
|
||||
if tagtype in tagtype_list.TAGTYPE_LIST:
|
||||
return bool(value)
|
||||
return True
|
||||
|
||||
|
||||
def concat_multi_values(models, attribute):
|
||||
"""
|
||||
Format Mopidy model values for output to MPD client.
|
||||
|
||||
@ -3,12 +3,16 @@ from __future__ import absolute_import, unicode_literals
|
||||
import fnmatch
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
import urlparse
|
||||
|
||||
import pykka
|
||||
|
||||
from mopidy import audio as audio_lib, backend, exceptions
|
||||
import requests
|
||||
|
||||
from mopidy import audio as audio_lib, backend, exceptions, stream
|
||||
from mopidy.audio import scan, utils
|
||||
from mopidy.internal import http, playlists
|
||||
from mopidy.models import Track
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -19,22 +23,32 @@ class StreamBackend(pykka.ThreadingActor, backend.Backend):
|
||||
def __init__(self, config, audio):
|
||||
super(StreamBackend, self).__init__()
|
||||
|
||||
self._scanner = scan.Scanner(
|
||||
timeout=config['stream']['timeout'],
|
||||
proxy_config=config['proxy'])
|
||||
|
||||
self.library = StreamLibraryProvider(
|
||||
backend=self, timeout=config['stream']['timeout'],
|
||||
blacklist=config['stream']['metadata_blacklist'],
|
||||
proxy=config['proxy'])
|
||||
self.playback = backend.PlaybackProvider(audio=audio, backend=self)
|
||||
backend=self, blacklist=config['stream']['metadata_blacklist'])
|
||||
self.playback = StreamPlaybackProvider(
|
||||
audio=audio, backend=self, config=config)
|
||||
self.playlists = None
|
||||
|
||||
self.uri_schemes = audio_lib.supported_uri_schemes(
|
||||
config['stream']['protocols'])
|
||||
|
||||
if 'file' in self.uri_schemes and config['file']['enabled']:
|
||||
logger.warning(
|
||||
'The stream/protocols config value includes the "file" '
|
||||
'protocol. "file" playback is now handled by Mopidy-File. '
|
||||
'Please remove it from the stream/protocols config.')
|
||||
self.uri_schemes -= {'file'}
|
||||
|
||||
|
||||
class StreamLibraryProvider(backend.LibraryProvider):
|
||||
|
||||
def __init__(self, backend, timeout, blacklist, proxy):
|
||||
def __init__(self, backend, blacklist):
|
||||
super(StreamLibraryProvider, self).__init__(backend)
|
||||
self._scanner = scan.Scanner(timeout=timeout, proxy_config=proxy)
|
||||
self._scanner = backend._scanner
|
||||
self._blacklist_re = re.compile(
|
||||
r'^(%s)$' % '|'.join(fnmatch.translate(u) for u in blacklist))
|
||||
|
||||
@ -55,3 +69,67 @@ class StreamLibraryProvider(backend.LibraryProvider):
|
||||
track = Track(uri=uri)
|
||||
|
||||
return [track]
|
||||
|
||||
|
||||
class StreamPlaybackProvider(backend.PlaybackProvider):
|
||||
|
||||
def __init__(self, audio, backend, config):
|
||||
super(StreamPlaybackProvider, self).__init__(audio, backend)
|
||||
self._config = config
|
||||
self._scanner = backend._scanner
|
||||
|
||||
def translate_uri(self, uri):
|
||||
try:
|
||||
scan_result = self._scanner.scan(uri)
|
||||
except exceptions.ScannerError as e:
|
||||
logger.warning(
|
||||
'Problem scanning URI %s: %s', uri, e)
|
||||
return None
|
||||
|
||||
if not (scan_result.mime.startswith('text/') or
|
||||
scan_result.mime.startswith('application/')):
|
||||
return uri
|
||||
|
||||
content = self._download(uri)
|
||||
if content is None:
|
||||
return None
|
||||
|
||||
tracks = list(playlists.parse(content))
|
||||
if tracks:
|
||||
# TODO Test streams and return first that seems to be playable
|
||||
return tracks[0]
|
||||
|
||||
def _download(self, uri):
|
||||
timeout = self._config['stream']['timeout'] / 1000.0
|
||||
|
||||
session = http.get_requests_session(
|
||||
proxy_config=self._config['proxy'],
|
||||
user_agent='%s/%s' % (
|
||||
stream.Extension.dist_name, stream.Extension.version))
|
||||
|
||||
try:
|
||||
response = session.get(
|
||||
uri, stream=True, timeout=timeout)
|
||||
except requests.exceptions.Timeout:
|
||||
logger.warning(
|
||||
'Download of stream playlist (%s) failed due to connection '
|
||||
'timeout after %.3fs', uri, timeout)
|
||||
return None
|
||||
|
||||
deadline = time.time() + timeout
|
||||
content = []
|
||||
for chunk in response.iter_content(4096):
|
||||
content.append(chunk)
|
||||
if time.time() > deadline:
|
||||
logger.warning(
|
||||
'Download of stream playlist (%s) failed due to download '
|
||||
'taking more than %.3fs', uri, timeout)
|
||||
return None
|
||||
|
||||
if not response.ok:
|
||||
logger.warning(
|
||||
'Problem downloading stream playlist %s: %s',
|
||||
uri, response.reason)
|
||||
return None
|
||||
|
||||
return b''.join(content)
|
||||
|
||||
3
setup.py
3
setup.py
@ -24,8 +24,9 @@ setup(
|
||||
zip_safe=False,
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'setuptools',
|
||||
'Pykka >= 1.1',
|
||||
'requests',
|
||||
'setuptools',
|
||||
'tornado >= 2.3',
|
||||
],
|
||||
extras_require={'http': []},
|
||||
|
||||
@ -32,6 +32,6 @@ class IsA(object):
|
||||
return str(self.klass)
|
||||
|
||||
|
||||
any_int = IsA(int)
|
||||
any_int = IsA((int, long))
|
||||
any_str = IsA(str)
|
||||
any_unicode = IsA(compat.text_type)
|
||||
|
||||
@ -40,7 +40,10 @@ class ScannerTest(unittest.TestCase):
|
||||
self.assertEqual(self.result[name].tags[key], value)
|
||||
|
||||
def check_if_missing_plugin(self):
|
||||
if any(['missing a plug-in' in str(e) for e in self.errors.values()]):
|
||||
for path, result in self.result.items():
|
||||
if not path.endswith('.mp3'):
|
||||
continue
|
||||
if not result.playable and result.mime == 'audio/mpeg':
|
||||
raise unittest.SkipTest('Missing MP3 support?')
|
||||
|
||||
def test_tags_is_set(self):
|
||||
@ -109,6 +112,17 @@ class ScannerTest(unittest.TestCase):
|
||||
wav = path_to_data_dir('scanner/empty.wav')
|
||||
self.assertEqual(self.result[wav].duration, 0)
|
||||
|
||||
def test_uri_list(self):
|
||||
path = path_to_data_dir('scanner/playlist.m3u')
|
||||
self.scan([path])
|
||||
self.assertEqual(self.result[path].mime, 'text/uri-list')
|
||||
|
||||
def test_text_plain(self):
|
||||
# GStreamer decode bin hardcodes bad handling of text plain :/
|
||||
path = path_to_data_dir('scanner/plain.txt')
|
||||
self.scan([path])
|
||||
self.assertIn(path, self.errors)
|
||||
|
||||
@unittest.SkipTest
|
||||
def test_song_without_time_is_handeled(self):
|
||||
pass
|
||||
|
||||
@ -31,11 +31,13 @@ class TagsToTrackTest(unittest.TestCase):
|
||||
'musicbrainz-trackid': ['trackid'],
|
||||
'musicbrainz-albumid': ['albumid'],
|
||||
'musicbrainz-artistid': ['artistid'],
|
||||
'musicbrainz-sortname': ['sortname'],
|
||||
'musicbrainz-albumartistid': ['albumartistid'],
|
||||
'bitrate': [1000],
|
||||
}
|
||||
|
||||
artist = Artist(name='artist', musicbrainz_id='artistid')
|
||||
artist = Artist(name='artist', musicbrainz_id='artistid',
|
||||
sortname='sortname')
|
||||
composer = Artist(name='composer')
|
||||
performer = Artist(name='performer')
|
||||
albumartist = Artist(name='albumartist',
|
||||
@ -245,3 +247,15 @@ class TagsToTrackTest(unittest.TestCase):
|
||||
del self.tags['comment']
|
||||
self.tags['copyright'] = ['copyright1', 'copyright2']
|
||||
self.check(self.track.replace(comment='copyright1; copyright2'))
|
||||
|
||||
def test_sortname(self):
|
||||
self.tags['musicbrainz-sortname'] = ['another_sortname']
|
||||
artist = Artist(name='artist', sortname='another_sortname',
|
||||
musicbrainz_id='artistid')
|
||||
self.check(self.track.replace(artists=[artist]))
|
||||
|
||||
def test_missing_sortname(self):
|
||||
del self.tags['musicbrainz-sortname']
|
||||
artist = Artist(name='artist', sortname=None,
|
||||
musicbrainz_id='artistid')
|
||||
self.check(self.track.replace(artists=[artist]))
|
||||
|
||||
@ -6,7 +6,7 @@ import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from mopidy import config
|
||||
from mopidy import config, ext
|
||||
|
||||
from tests import path_to_data_dir
|
||||
|
||||
@ -292,3 +292,23 @@ class PostProcessorTest(unittest.TestCase):
|
||||
def test_conversion(self):
|
||||
result = config._postprocess(PROCESSED_CONFIG)
|
||||
self.assertEqual(result, INPUT_CONFIG)
|
||||
|
||||
|
||||
def test_format_initial():
|
||||
extension = ext.Extension()
|
||||
extension.ext_name = 'foo'
|
||||
extension.get_default_config = lambda: None
|
||||
extensions_data = [
|
||||
ext.ExtensionData(
|
||||
extension=extension,
|
||||
entry_point=None,
|
||||
config_schema=None,
|
||||
config_defaults=None,
|
||||
command=None,
|
||||
),
|
||||
]
|
||||
|
||||
result = config.format_initial(extensions_data)
|
||||
|
||||
assert '# For further information' in result
|
||||
assert '[foo]\n' in result
|
||||
|
||||
26
tests/config/test_defaults.py
Normal file
26
tests/config/test_defaults.py
Normal file
@ -0,0 +1,26 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from mopidy import config
|
||||
|
||||
|
||||
def test_core_schema_has_cache_dir():
|
||||
assert 'cache_dir' in config._core_schema
|
||||
assert isinstance(config._core_schema['cache_dir'], config.Path)
|
||||
|
||||
|
||||
def test_core_schema_has_config_dir():
|
||||
assert 'config_dir' in config._core_schema
|
||||
assert isinstance(config._core_schema['config_dir'], config.Path)
|
||||
|
||||
|
||||
def test_core_schema_has_data_dir():
|
||||
assert 'data_dir' in config._core_schema
|
||||
assert isinstance(config._core_schema['data_dir'], config.Path)
|
||||
|
||||
|
||||
def test_core_schema_has_max_tracklist_length():
|
||||
assert 'max_tracklist_length' in config._core_schema
|
||||
max_tracklist_length_schema = config._core_schema['max_tracklist_length']
|
||||
assert isinstance(max_tracklist_length_schema, config.Integer)
|
||||
assert max_tracklist_length_schema._minimum == 1
|
||||
assert max_tracklist_length_schema._maximum == 10000
|
||||
@ -37,7 +37,8 @@ class CoreActorTest(unittest.TestCase):
|
||||
|
||||
self.assertRaisesRegexp(
|
||||
AssertionError,
|
||||
'Cannot add URI scheme dummy1 for B2, it is already handled by B1',
|
||||
'Cannot add URI scheme "dummy1" for B2, '
|
||||
'it is already handled by B1',
|
||||
Core, mixer=None, backends=[self.backend1, self.backend2])
|
||||
|
||||
def test_version(self):
|
||||
|
||||
@ -99,10 +99,11 @@ class BackendEventsTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(send.call_args[0][0], 'playlist_changed')
|
||||
|
||||
@unittest.SkipTest
|
||||
def test_playlists_delete_sends_playlist_deleted_event(self, send):
|
||||
# TODO We should probably add a playlist_deleted event
|
||||
pass
|
||||
playlist = self.core.playlists.create('foo').get()
|
||||
self.core.playlists.delete(playlist.uri).get()
|
||||
|
||||
self.assertEqual(send.call_args[0][0], 'playlist_deleted')
|
||||
|
||||
def test_playlists_save_sends_playlist_changed_event(self, send):
|
||||
playlist = self.core.playlists.create('foo').get()
|
||||
|
||||
@ -40,7 +40,7 @@ class PlaybackHistoryTest(unittest.TestCase):
|
||||
result = self.history.get_history()
|
||||
(timestamp, ref) = result[0]
|
||||
|
||||
self.assertIsInstance(timestamp, int)
|
||||
self.assertIsInstance(timestamp, (int, long))
|
||||
self.assertEqual(track.uri, ref.uri)
|
||||
self.assertIn(track.name, ref.name)
|
||||
for artist in track.artists:
|
||||
|
||||
@ -20,6 +20,7 @@ class BaseCoreLibraryTest(unittest.TestCase):
|
||||
self.library1.get_images.return_value.get.return_value = {}
|
||||
self.library1.root_directory.get.return_value = dummy1_root
|
||||
self.backend1.library = self.library1
|
||||
self.backend1.has_playlists.return_value.get.return_value = False
|
||||
|
||||
dummy2_root = Ref.directory(uri='dummy2:directory', name='dummy2')
|
||||
self.backend2 = mock.Mock()
|
||||
@ -29,13 +30,14 @@ class BaseCoreLibraryTest(unittest.TestCase):
|
||||
self.library2.get_images.return_value.get.return_value = {}
|
||||
self.library2.root_directory.get.return_value = dummy2_root
|
||||
self.backend2.library = self.library2
|
||||
self.backend2.has_playlists.return_value.get.return_value = False
|
||||
|
||||
# A backend without the optional library provider
|
||||
self.backend3 = mock.Mock()
|
||||
self.backend3.uri_schemes.get.return_value = ['dummy3']
|
||||
self.backend3.actor_ref.actor_class.__name__ = 'DummyBackend3'
|
||||
self.backend3.has_library().get.return_value = False
|
||||
self.backend3.has_library_browse().get.return_value = False
|
||||
self.backend3.has_library.return_value.get.return_value = False
|
||||
self.backend3.has_library_browse.return_value.get.return_value = False
|
||||
|
||||
self.core = core.Core(mixer=None, backends=[
|
||||
self.backend1, self.backend2, self.backend3])
|
||||
|
||||
@ -47,6 +47,9 @@ class CoreListenerTest(unittest.TestCase):
|
||||
def test_listener_has_default_impl_for_playlist_changed(self):
|
||||
self.listener.playlist_changed(Playlist())
|
||||
|
||||
def test_listener_has_default_impl_for_playlist_deleted(self):
|
||||
self.listener.playlist_deleted(Playlist())
|
||||
|
||||
def test_listener_has_default_impl_for_options_changed(self):
|
||||
self.listener.options_changed()
|
||||
|
||||
|
||||
@ -794,6 +794,7 @@ class CorePlaybackWithOldBackendTest(unittest.TestCase):
|
||||
}
|
||||
|
||||
b = mock.Mock()
|
||||
b.actor_ref.actor_class.__name__ = 'DummyBackend'
|
||||
b.uri_schemes.get.return_value = ['dummy1']
|
||||
b.playback = mock.Mock(spec=backend.PlaybackProvider)
|
||||
b.playback.play.side_effect = TypeError
|
||||
|
||||
1
tests/data/scanner/plain.txt
Normal file
1
tests/data/scanner/plain.txt
Normal file
@ -0,0 +1 @@
|
||||
Some plain text file with nothing special in it.
|
||||
1
tests/data/scanner/playlist.m3u
Normal file
1
tests/data/scanner/playlist.m3u
Normal file
@ -0,0 +1 @@
|
||||
http://example.com/
|
||||
0
tests/file/__init__.py
Normal file
0
tests/file/__init__.py
Normal file
20
tests/file/conftest.py
Normal file
20
tests/file/conftest.py
Normal file
@ -0,0 +1,20 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def file_config():
|
||||
return {
|
||||
'file': {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def file_library(file_config):
|
||||
# Import library, thus scanner, thus gobject as late as possible to avoid
|
||||
# hard to track import errors during conftest setup.
|
||||
from mopidy.file import library
|
||||
|
||||
return library.FileLibraryProvider(backend=None, config=file_config)
|
||||
3
tests/file/test_browse.py
Normal file
3
tests/file/test_browse.py
Normal file
@ -0,0 +1,3 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# TODO Test browse()
|
||||
3
tests/file/test_lookup.py
Normal file
3
tests/file/test_lookup.py
Normal file
@ -0,0 +1,3 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# TODO Test lookup()
|
||||
@ -380,6 +380,18 @@ class FindMTimesTest(unittest.TestCase):
|
||||
self.assertEqual(expected, result)
|
||||
self.assertEqual({}, errors)
|
||||
|
||||
def test_gives_mtime_in_milliseconds(self):
|
||||
fname = self.touch('foobar')
|
||||
|
||||
os.utime(fname, (1, 3.14159265))
|
||||
|
||||
result, errors = path.find_mtimes(fname)
|
||||
|
||||
self.assertEqual(len(result), 1)
|
||||
mtime, = result.values()
|
||||
self.assertEqual(mtime, 3141)
|
||||
self.assertEqual(errors, {})
|
||||
|
||||
|
||||
# TODO: kill this in favour of just os.path.getmtime + mocks
|
||||
class MtimeTest(unittest.TestCase):
|
||||
|
||||
@ -2,28 +2,39 @@
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import io
|
||||
import unittest
|
||||
|
||||
from mopidy.audio import playlists
|
||||
import pytest
|
||||
|
||||
from mopidy.internal import playlists
|
||||
|
||||
|
||||
BAD = b'foobarbaz'
|
||||
|
||||
M3U = b"""#EXTM3U
|
||||
EXTM3U = b"""#EXTM3U
|
||||
#EXTINF:123, Sample artist - Sample title
|
||||
file:///tmp/foo
|
||||
#EXTINF:321,Example Artist - Example \xc5\xa7\xc5\x95
|
||||
file:///tmp/bar
|
||||
|
||||
#EXTINF:213,Some Artist - Other title
|
||||
file:///tmp/baz
|
||||
"""
|
||||
|
||||
URILIST = b"""
|
||||
file:///tmp/foo
|
||||
# a comment \xc5\xa7\xc5\x95
|
||||
file:///tmp/bar
|
||||
|
||||
file:///tmp/baz
|
||||
"""
|
||||
|
||||
PLS = b"""[Playlist]
|
||||
NumberOfEntries=3
|
||||
File1=file:///tmp/foo
|
||||
Title1=Sample Title
|
||||
Length1=123
|
||||
|
||||
File2=file:///tmp/bar
|
||||
Title2=Example \xc5\xa7\xc5\x95
|
||||
Length2=321
|
||||
@ -76,14 +87,20 @@ XSPF = b"""<?xml version="1.0" encoding="UTF-8"?>
|
||||
</playlist>
|
||||
"""
|
||||
|
||||
EXPECTED = [b'file:///tmp/foo', b'file:///tmp/bar', b'file:///tmp/baz']
|
||||
|
||||
class TypeFind(object):
|
||||
|
||||
def __init__(self, data):
|
||||
self.data = data
|
||||
|
||||
def peek(self, start, end):
|
||||
return self.data[start:end]
|
||||
@pytest.mark.parametrize('data,result', [
|
||||
(BAD, []),
|
||||
(URILIST, EXPECTED),
|
||||
(EXTM3U, EXPECTED),
|
||||
(PLS, EXPECTED),
|
||||
(ASX, EXPECTED),
|
||||
(SIMPLE_ASX, EXPECTED),
|
||||
(XSPF, EXPECTED),
|
||||
])
|
||||
def test_parse(data, result):
|
||||
assert playlists.parse(data) == result
|
||||
|
||||
|
||||
class BasePlaylistTest(object):
|
||||
@ -93,26 +110,25 @@ class BasePlaylistTest(object):
|
||||
parse = None
|
||||
|
||||
def test_detect_valid_header(self):
|
||||
self.assertTrue(self.detect(TypeFind(self.valid)))
|
||||
self.assertTrue(self.detect(self.valid))
|
||||
|
||||
def test_detect_invalid_header(self):
|
||||
self.assertFalse(self.detect(TypeFind(self.invalid)))
|
||||
self.assertFalse(self.detect(self.invalid))
|
||||
|
||||
def test_parse_valid_playlist(self):
|
||||
uris = list(self.parse(io.BytesIO(self.valid)))
|
||||
expected = [b'file:///tmp/foo', b'file:///tmp/bar', b'file:///tmp/baz']
|
||||
self.assertEqual(uris, expected)
|
||||
uris = list(self.parse(self.valid))
|
||||
self.assertEqual(uris, EXPECTED)
|
||||
|
||||
def test_parse_invalid_playlist(self):
|
||||
uris = list(self.parse(io.BytesIO(self.invalid)))
|
||||
uris = list(self.parse(self.invalid))
|
||||
self.assertEqual(uris, [])
|
||||
|
||||
|
||||
class M3uPlaylistTest(BasePlaylistTest, unittest.TestCase):
|
||||
valid = M3U
|
||||
class ExtM3uPlaylistTest(BasePlaylistTest, unittest.TestCase):
|
||||
valid = EXTM3U
|
||||
invalid = BAD
|
||||
detect = staticmethod(playlists.detect_m3u_header)
|
||||
parse = staticmethod(playlists.parse_m3u)
|
||||
detect = staticmethod(playlists.detect_extm3u_header)
|
||||
parse = staticmethod(playlists.parse_extm3u)
|
||||
|
||||
|
||||
class PlsPlaylistTest(BasePlaylistTest, unittest.TestCase):
|
||||
@ -141,3 +157,17 @@ class XspfPlaylistTest(BasePlaylistTest, unittest.TestCase):
|
||||
invalid = BAD
|
||||
detect = staticmethod(playlists.detect_xspf_header)
|
||||
parse = staticmethod(playlists.parse_xspf)
|
||||
|
||||
|
||||
class UriListPlaylistTest(unittest.TestCase):
|
||||
valid = URILIST
|
||||
invalid = BAD
|
||||
parse = staticmethod(playlists.parse_urilist)
|
||||
|
||||
def test_parse_valid_playlist(self):
|
||||
uris = list(self.parse(self.valid))
|
||||
self.assertEqual(uris, EXPECTED)
|
||||
|
||||
def test_parse_invalid_playlist(self):
|
||||
uris = list(self.parse(self.invalid))
|
||||
self.assertEqual(uris, [])
|
||||
@ -45,10 +45,11 @@ class BrowseCacheTest(unittest.TestCase):
|
||||
class JsonLibraryTest(unittest.TestCase):
|
||||
|
||||
config = {
|
||||
'core': {
|
||||
'data_dir': path_to_data_dir(''),
|
||||
},
|
||||
'local': {
|
||||
'media_dir': path_to_data_dir(''),
|
||||
'data_dir': path_to_data_dir(''),
|
||||
'playlists_dir': b'',
|
||||
'library': 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@ -65,10 +65,11 @@ class LocalLibraryProviderTest(unittest.TestCase):
|
||||
]
|
||||
|
||||
config = {
|
||||
'core': {
|
||||
'data_dir': path_to_data_dir(''),
|
||||
},
|
||||
'local': {
|
||||
'media_dir': path_to_data_dir(''),
|
||||
'data_dir': path_to_data_dir(''),
|
||||
'playlists_dir': b'',
|
||||
'library': 'json',
|
||||
},
|
||||
}
|
||||
@ -105,11 +106,15 @@ class LocalLibraryProviderTest(unittest.TestCase):
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
tmplib = os.path.join(tmpdir, 'library.json.gz')
|
||||
shutil.copy(path_to_data_dir('library.json.gz'), tmplib)
|
||||
tmpdir_local = os.path.join(tmpdir, 'local')
|
||||
shutil.copytree(path_to_data_dir('local'), tmpdir_local)
|
||||
|
||||
config = {'local': self.config['local'].copy()}
|
||||
config['local']['data_dir'] = tmpdir
|
||||
config = {
|
||||
'core': {
|
||||
'data_dir': tmpdir,
|
||||
},
|
||||
'local': self.config['local'],
|
||||
}
|
||||
backend = actor.LocalBackend(config=config, audio=None)
|
||||
|
||||
# Sanity check that value is in the library
|
||||
@ -117,6 +122,7 @@ class LocalLibraryProviderTest(unittest.TestCase):
|
||||
self.assertEqual(result, self.tracks[0:1])
|
||||
|
||||
# Clear and refresh.
|
||||
tmplib = os.path.join(tmpdir_local, 'library.json.gz')
|
||||
open(tmplib, 'w').close()
|
||||
backend.library.refresh()
|
||||
|
||||
|
||||
@ -25,12 +25,11 @@ logger = logging.getLogger(__name__)
|
||||
class LocalPlaybackProviderTest(unittest.TestCase):
|
||||
config = {
|
||||
'core': {
|
||||
'data_dir': path_to_data_dir(''),
|
||||
'max_tracklist_length': 10000,
|
||||
},
|
||||
'local': {
|
||||
'media_dir': path_to_data_dir(''),
|
||||
'data_dir': path_to_data_dir(''),
|
||||
'playlists_dir': b'',
|
||||
'library': 'json',
|
||||
}
|
||||
}
|
||||
|
||||
@ -18,11 +18,11 @@ from tests.local import generate_song, populate_tracklist
|
||||
class LocalTracklistProviderTest(unittest.TestCase):
|
||||
config = {
|
||||
'core': {
|
||||
'data_dir': path_to_data_dir(''),
|
||||
'max_tracklist_length': 10000
|
||||
},
|
||||
'local': {
|
||||
'media_dir': path_to_data_dir(''),
|
||||
'data_dir': path_to_data_dir(''),
|
||||
'playlists_dir': b'',
|
||||
'library': 'json',
|
||||
}
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
@ -156,8 +157,11 @@ class M3UPlaylistsProviderTest(unittest.TestCase):
|
||||
self.core.playlists.refresh()
|
||||
|
||||
self.assertEqual(len(self.core.playlists.as_list()), 1)
|
||||
result = self.core.playlists.lookup(uri)
|
||||
self.assertEqual('\ufffd\ufffd\ufffd', result.name)
|
||||
result = self.core.playlists.as_list()
|
||||
if platform.system() == 'Darwin':
|
||||
self.assertEqual('%F8%E6%E5', result[0].name)
|
||||
else:
|
||||
self.assertEqual('\ufffd\ufffd\ufffd', result[0].name)
|
||||
|
||||
@unittest.SkipTest
|
||||
def test_playlists_dir_is_created(self):
|
||||
|
||||
@ -15,12 +15,15 @@ from tests import path_to_data_dir
|
||||
data_dir = path_to_data_dir('')
|
||||
song1_path = path_to_data_dir('song1.mp3')
|
||||
song2_path = path_to_data_dir('song2.mp3')
|
||||
song3_path = path_to_data_dir('φοο.mp3')
|
||||
encoded_path = path_to_data_dir('æøå.mp3')
|
||||
song1_uri = path.path_to_uri(song1_path)
|
||||
song2_uri = path.path_to_uri(song2_path)
|
||||
song3_uri = path.path_to_uri(song3_path)
|
||||
encoded_uri = path.path_to_uri(encoded_path)
|
||||
song1_track = Track(uri=song1_uri)
|
||||
song2_track = Track(uri=song2_uri)
|
||||
song3_track = Track(uri=song3_uri)
|
||||
encoded_track = Track(uri=encoded_uri)
|
||||
song1_ext_track = song1_track.replace(name='song1')
|
||||
song2_ext_track = song2_track.replace(name='song2', length=60000)
|
||||
@ -116,6 +119,16 @@ class M3UToUriTest(unittest.TestCase):
|
||||
tracks = self.parse(path_to_data_dir('encoding-ext.m3u'))
|
||||
self.assertEqual([encoded_ext_track], tracks)
|
||||
|
||||
def test_m3u8_file(self):
|
||||
with tempfile.NamedTemporaryFile(suffix='.m3u8', delete=False) as tmp:
|
||||
tmp.write(song3_path)
|
||||
try:
|
||||
tracks = self.parse(tmp.name)
|
||||
self.assertEqual([song3_track], tracks)
|
||||
finally:
|
||||
if os.path.exists(tmp.name):
|
||||
os.remove(tmp.name)
|
||||
|
||||
|
||||
class URItoM3UTest(unittest.TestCase):
|
||||
pass
|
||||
|
||||
@ -2,8 +2,10 @@ from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import unittest
|
||||
|
||||
import mock
|
||||
|
||||
from mopidy.models import Album, Artist, Playlist, Ref, SearchResult, Track
|
||||
from mopidy.mpd.protocol import music_db
|
||||
from mopidy.mpd.protocol import music_db, stored_playlists
|
||||
|
||||
from tests.mpd import protocol
|
||||
|
||||
@ -299,33 +301,37 @@ class MusicDatabaseHandlerTest(protocol.BaseTestCase):
|
||||
self.send_request('listfiles')
|
||||
self.assertEqualResponse('ACK [0@0] {listfiles} Not implemented')
|
||||
|
||||
def test_lsinfo_without_path_returns_same_as_for_root(self):
|
||||
last_modified = 1390942873222
|
||||
@mock.patch.object(stored_playlists, '_get_last_modified')
|
||||
def test_lsinfo_without_path_returns_same_as_for_root(
|
||||
self, last_modified_mock):
|
||||
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
|
||||
self.backend.playlists.set_dummy_playlists([
|
||||
Playlist(name='a', uri='dummy:/a', last_modified=last_modified)])
|
||||
Playlist(name='a', uri='dummy:/a')])
|
||||
|
||||
response1 = self.send_request('lsinfo')
|
||||
response2 = self.send_request('lsinfo "/"')
|
||||
self.assertEqual(response1, response2)
|
||||
|
||||
def test_lsinfo_with_empty_path_returns_same_as_for_root(self):
|
||||
last_modified = 1390942873222
|
||||
@mock.patch.object(stored_playlists, '_get_last_modified')
|
||||
def test_lsinfo_with_empty_path_returns_same_as_for_root(
|
||||
self, last_modified_mock):
|
||||
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
|
||||
self.backend.playlists.set_dummy_playlists([
|
||||
Playlist(name='a', uri='dummy:/a', last_modified=last_modified)])
|
||||
Playlist(name='a', uri='dummy:/a')])
|
||||
|
||||
response1 = self.send_request('lsinfo ""')
|
||||
response2 = self.send_request('lsinfo "/"')
|
||||
self.assertEqual(response1, response2)
|
||||
|
||||
def test_lsinfo_for_root_includes_playlists(self):
|
||||
last_modified = 1390942873222
|
||||
@mock.patch.object(stored_playlists, '_get_last_modified')
|
||||
def test_lsinfo_for_root_includes_playlists(self, last_modified_mock):
|
||||
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
|
||||
self.backend.playlists.set_dummy_playlists([
|
||||
Playlist(name='a', uri='dummy:/a', last_modified=last_modified)])
|
||||
Playlist(name='a', uri='dummy:/a')])
|
||||
|
||||
self.send_request('lsinfo "/"')
|
||||
self.assertInResponse('playlist: a')
|
||||
# Date without milliseconds and with time zone information
|
||||
self.assertInResponse('Last-Modified: 2014-01-28T21:01:13Z')
|
||||
self.assertInResponse('Last-Modified: 2015-08-05T22:51:06Z')
|
||||
self.assertInResponse('OK')
|
||||
|
||||
def test_lsinfo_for_root_includes_dirs_for_each_lib_with_content(self):
|
||||
@ -337,7 +343,10 @@ class MusicDatabaseHandlerTest(protocol.BaseTestCase):
|
||||
self.assertInResponse('directory: dummy')
|
||||
self.assertInResponse('OK')
|
||||
|
||||
def test_lsinfo_for_dir_with_and_without_leading_slash_is_the_same(self):
|
||||
@mock.patch.object(stored_playlists, '_get_last_modified')
|
||||
def test_lsinfo_for_dir_with_and_without_leading_slash_is_the_same(
|
||||
self, last_modified_mock):
|
||||
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
|
||||
self.backend.library.dummy_browse_result = {
|
||||
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
|
||||
Ref.directory(uri='dummy:/foo', name='foo')]}
|
||||
@ -346,7 +355,10 @@ class MusicDatabaseHandlerTest(protocol.BaseTestCase):
|
||||
response2 = self.send_request('lsinfo "/dummy"')
|
||||
self.assertEqual(response1, response2)
|
||||
|
||||
def test_lsinfo_for_dir_with_and_without_trailing_slash_is_the_same(self):
|
||||
@mock.patch.object(stored_playlists, '_get_last_modified')
|
||||
def test_lsinfo_for_dir_with_and_without_trailing_slash_is_the_same(
|
||||
self, last_modified_mock):
|
||||
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
|
||||
self.backend.library.dummy_browse_result = {
|
||||
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
|
||||
Ref.directory(uri='dummy:/foo', name='foo')]}
|
||||
@ -404,12 +416,11 @@ class MusicDatabaseHandlerTest(protocol.BaseTestCase):
|
||||
self.assertInResponse('OK')
|
||||
|
||||
def test_lsinfo_for_root_returns_browse_result_before_playlists(self):
|
||||
last_modified = 1390942873222
|
||||
self.backend.library.dummy_browse_result = {
|
||||
'dummy:/': [Ref.track(uri='dummy:/a', name='a'),
|
||||
Ref.directory(uri='dummy:/foo', name='foo')]}
|
||||
self.backend.playlists.set_dummy_playlists([
|
||||
Playlist(name='a', uri='dummy:/a', last_modified=last_modified)])
|
||||
Playlist(name='a', uri='dummy:/a')])
|
||||
|
||||
response = self.send_request('lsinfo "/"')
|
||||
self.assertLess(response.index('directory: dummy'),
|
||||
|
||||
@ -41,6 +41,23 @@ class ReflectionHandlerTest(protocol.BaseTestCase):
|
||||
|
||||
def test_tagtypes(self):
|
||||
self.send_request('tagtypes')
|
||||
self.assertInResponse('tagtype: Artist')
|
||||
self.assertInResponse('tagtype: ArtistSort')
|
||||
self.assertInResponse('tagtype: Album')
|
||||
self.assertInResponse('tagtype: AlbumArtist')
|
||||
self.assertInResponse('tagtype: AlbumArtistSort')
|
||||
self.assertInResponse('tagtype: Title')
|
||||
self.assertInResponse('tagtype: Track')
|
||||
self.assertInResponse('tagtype: Name')
|
||||
self.assertInResponse('tagtype: Genre')
|
||||
self.assertInResponse('tagtype: Date')
|
||||
self.assertInResponse('tagtype: Composer')
|
||||
self.assertInResponse('tagtype: Performer')
|
||||
self.assertInResponse('tagtype: Disc')
|
||||
self.assertInResponse('tagtype: MUSICBRAINZ_ARTISTID')
|
||||
self.assertInResponse('tagtype: MUSICBRAINZ_ALBUMID')
|
||||
self.assertInResponse('tagtype: MUSICBRAINZ_ALBUMARTISTID')
|
||||
self.assertInResponse('tagtype: MUSICBRAINZ_TRACKID')
|
||||
self.assertInResponse('OK')
|
||||
|
||||
def test_urlhandlers(self):
|
||||
|
||||
@ -2,7 +2,10 @@ from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import random
|
||||
|
||||
import mock
|
||||
|
||||
from mopidy.models import Playlist, Ref, Track
|
||||
from mopidy.mpd.protocol import stored_playlists
|
||||
|
||||
from tests.mpd import protocol
|
||||
|
||||
@ -214,12 +217,14 @@ class IssueGH1120RegressionTest(protocol.BaseTestCase):
|
||||
|
||||
"""
|
||||
|
||||
def test(self):
|
||||
@mock.patch.object(stored_playlists, '_get_last_modified')
|
||||
def test(self, last_modified_mock):
|
||||
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
|
||||
self.backend.library.dummy_browse_result = {
|
||||
'dummy:/': [Ref.playlist(name='Top 100 tracks', uri='dummy:/1')],
|
||||
}
|
||||
self.backend.playlists.set_dummy_playlists([
|
||||
Playlist(name='Top 100 tracks', uri='dummy:/1', last_modified=123),
|
||||
Playlist(name='Top 100 tracks', uri='dummy:/1'),
|
||||
])
|
||||
|
||||
response1 = self.send_request('lsinfo "/"')
|
||||
|
||||
@ -20,10 +20,10 @@ class StatusHandlerTest(protocol.BaseTestCase):
|
||||
self.send_request('currentsong')
|
||||
self.assertInResponse('file: dummy:/a')
|
||||
self.assertInResponse('Time: 0')
|
||||
self.assertInResponse('Artist: ')
|
||||
self.assertInResponse('Title: ')
|
||||
self.assertInResponse('Album: ')
|
||||
self.assertInResponse('Track: 0')
|
||||
self.assertNotInResponse('Artist: ')
|
||||
self.assertNotInResponse('Title: ')
|
||||
self.assertNotInResponse('Album: ')
|
||||
self.assertNotInResponse('Track: 0')
|
||||
self.assertNotInResponse('Date: ')
|
||||
self.assertInResponse('Pos: 0')
|
||||
self.assertInResponse('Id: 0')
|
||||
|
||||
@ -1,6 +1,9 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import mock
|
||||
|
||||
from mopidy.models import Playlist, Track
|
||||
from mopidy.mpd.protocol import stored_playlists
|
||||
|
||||
from tests.mpd import protocol
|
||||
|
||||
@ -45,7 +48,7 @@ class PlaylistsHandlerTest(protocol.BaseTestCase):
|
||||
|
||||
self.send_request('listplaylistinfo "name"')
|
||||
self.assertInResponse('file: dummy:a')
|
||||
self.assertInResponse('Track: 0')
|
||||
self.assertNotInResponse('Track: 0')
|
||||
self.assertNotInResponse('Pos: 0')
|
||||
self.assertInResponse('OK')
|
||||
|
||||
@ -56,7 +59,7 @@ class PlaylistsHandlerTest(protocol.BaseTestCase):
|
||||
|
||||
self.send_request('listplaylistinfo name')
|
||||
self.assertInResponse('file: dummy:a')
|
||||
self.assertInResponse('Track: 0')
|
||||
self.assertNotInResponse('Track: 0')
|
||||
self.assertNotInResponse('Pos: 0')
|
||||
self.assertInResponse('OK')
|
||||
|
||||
@ -72,19 +75,20 @@ class PlaylistsHandlerTest(protocol.BaseTestCase):
|
||||
|
||||
self.send_request('listplaylistinfo "a [2]"')
|
||||
self.assertInResponse('file: c')
|
||||
self.assertInResponse('Track: 0')
|
||||
self.assertNotInResponse('Track: 0')
|
||||
self.assertNotInResponse('Pos: 0')
|
||||
self.assertInResponse('OK')
|
||||
|
||||
def test_listplaylists(self):
|
||||
last_modified = 1390942873222
|
||||
@mock.patch.object(stored_playlists, '_get_last_modified')
|
||||
def test_listplaylists(self, last_modified_mock):
|
||||
last_modified_mock.return_value = '2015-08-05T22:51:06Z'
|
||||
self.backend.playlists.set_dummy_playlists([
|
||||
Playlist(name='a', uri='dummy:a', last_modified=last_modified)])
|
||||
Playlist(name='a', uri='dummy:a')])
|
||||
|
||||
self.send_request('listplaylists')
|
||||
self.assertInResponse('playlist: a')
|
||||
# Date without milliseconds and with time zone information
|
||||
self.assertInResponse('Last-Modified: 2014-01-28T21:01:13Z')
|
||||
self.assertInResponse('Last-Modified: 2015-08-05T22:51:06Z')
|
||||
self.assertInResponse('OK')
|
||||
|
||||
def test_listplaylists_duplicate(self):
|
||||
|
||||
@ -14,7 +14,8 @@ class TrackMpdFormatTest(unittest.TestCase):
|
||||
name='a name',
|
||||
album=Album(
|
||||
name='an album', num_tracks=13,
|
||||
artists=[Artist(name='an other artist')]),
|
||||
artists=[Artist(name='an other artist')],
|
||||
uri='urischeme:album:12345', images=['image1']),
|
||||
track_no=7,
|
||||
composers=[Artist(name='a composer')],
|
||||
performers=[Artist(name='a performer')],
|
||||
@ -33,17 +34,17 @@ class TrackMpdFormatTest(unittest.TestCase):
|
||||
path.mtime.undo_fake()
|
||||
|
||||
def test_track_to_mpd_format_for_empty_track(self):
|
||||
# TODO: this is likely wrong, see:
|
||||
# https://github.com/mopidy/mopidy/issues/923#issuecomment-79584110
|
||||
result = translator.track_to_mpd_format(Track())
|
||||
self.assertIn(('file', ''), result)
|
||||
self.assertIn(('Time', 0), result)
|
||||
self.assertIn(('Artist', ''), result)
|
||||
self.assertIn(('Title', ''), result)
|
||||
self.assertIn(('Album', ''), result)
|
||||
self.assertIn(('Track', 0), result)
|
||||
result = translator.track_to_mpd_format(
|
||||
Track(uri='a uri', length=137000)
|
||||
)
|
||||
self.assertIn(('file', 'a uri'), result)
|
||||
self.assertIn(('Time', 137), result)
|
||||
self.assertNotIn(('Artist', ''), result)
|
||||
self.assertNotIn(('Title', ''), result)
|
||||
self.assertNotIn(('Album', ''), result)
|
||||
self.assertNotIn(('Track', 0), result)
|
||||
self.assertNotIn(('Date', ''), result)
|
||||
self.assertEqual(len(result), 6)
|
||||
self.assertEqual(len(result), 2)
|
||||
|
||||
def test_track_to_mpd_format_with_position(self):
|
||||
result = translator.track_to_mpd_format(Track(), position=1)
|
||||
@ -76,8 +77,10 @@ class TrackMpdFormatTest(unittest.TestCase):
|
||||
self.assertIn(('Disc', 1), result)
|
||||
self.assertIn(('Pos', 9), result)
|
||||
self.assertIn(('Id', 122), result)
|
||||
self.assertIn(('X-AlbumUri', 'urischeme:album:12345'), result)
|
||||
self.assertIn(('X-AlbumImage', 'image1'), result)
|
||||
self.assertNotIn(('Comment', 'a comment'), result)
|
||||
self.assertEqual(len(result), 14)
|
||||
self.assertEqual(len(result), 16)
|
||||
|
||||
def test_track_to_mpd_format_with_last_modified(self):
|
||||
track = self.track.replace(last_modified=995303899000)
|
||||
@ -137,7 +140,7 @@ class TrackMpdFormatTest(unittest.TestCase):
|
||||
def test_track_to_mpd_format_with_empty_stream_title(self):
|
||||
result = translator.track_to_mpd_format(self.track, stream_title='')
|
||||
self.assertIn(('Name', 'a name'), result)
|
||||
self.assertIn(('Title', ''), result)
|
||||
self.assertNotIn(('Title', ''), result)
|
||||
|
||||
def test_track_to_mpd_format_with_stream_and_no_track_name(self):
|
||||
track = self.track.replace(name=None)
|
||||
|
||||
@ -1,16 +1,10 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import unittest
|
||||
|
||||
import gobject
|
||||
gobject.threads_init()
|
||||
|
||||
import mock
|
||||
|
||||
import pygst
|
||||
pygst.require('0.10')
|
||||
import gst # noqa: pygst magic is needed to import correct gst
|
||||
import pytest
|
||||
|
||||
from mopidy.audio import scan
|
||||
from mopidy.internal import path
|
||||
from mopidy.models import Track
|
||||
from mopidy.stream import actor
|
||||
@ -18,27 +12,44 @@ from mopidy.stream import actor
|
||||
from tests import path_to_data_dir
|
||||
|
||||
|
||||
class LibraryProviderTest(unittest.TestCase):
|
||||
@pytest.fixture
|
||||
def scanner():
|
||||
return scan.Scanner(timeout=100, proxy_config={})
|
||||
|
||||
def setUp(self): # noqa: N802
|
||||
self.backend = mock.Mock()
|
||||
self.backend.uri_schemes = ['file']
|
||||
self.uri = path.path_to_uri(path_to_data_dir('song1.wav'))
|
||||
|
||||
def test_lookup_ignores_unknown_scheme(self):
|
||||
library = actor.StreamLibraryProvider(self.backend, 1000, [], {})
|
||||
self.assertFalse(library.lookup('http://example.com'))
|
||||
@pytest.fixture
|
||||
def backend(scanner):
|
||||
backend = mock.Mock()
|
||||
backend.uri_schemes = ['file']
|
||||
backend._scanner = scanner
|
||||
return backend
|
||||
|
||||
def test_lookup_respects_blacklist(self):
|
||||
library = actor.StreamLibraryProvider(self.backend, 10, [self.uri], {})
|
||||
self.assertEqual([Track(uri=self.uri)], library.lookup(self.uri))
|
||||
|
||||
def test_lookup_respects_blacklist_globbing(self):
|
||||
@pytest.fixture
|
||||
def track_uri():
|
||||
return path.path_to_uri(path_to_data_dir('song1.wav'))
|
||||
|
||||
|
||||
def test_lookup_ignores_unknown_scheme(backend):
|
||||
library = actor.StreamLibraryProvider(backend, [])
|
||||
|
||||
assert library.lookup('http://example.com') == []
|
||||
|
||||
|
||||
def test_lookup_respects_blacklist(backend, track_uri):
|
||||
library = actor.StreamLibraryProvider(backend, [track_uri])
|
||||
|
||||
assert library.lookup(track_uri) == [Track(uri=track_uri)]
|
||||
|
||||
|
||||
def test_lookup_respects_blacklist_globbing(backend, track_uri):
|
||||
blacklist = [path.path_to_uri(path_to_data_dir('')) + '*']
|
||||
library = actor.StreamLibraryProvider(self.backend, 100, blacklist, {})
|
||||
self.assertEqual([Track(uri=self.uri)], library.lookup(self.uri))
|
||||
library = actor.StreamLibraryProvider(backend, blacklist)
|
||||
|
||||
def test_lookup_converts_uri_metadata_to_track(self):
|
||||
library = actor.StreamLibraryProvider(self.backend, 100, [], {})
|
||||
self.assertEqual([Track(length=4406, uri=self.uri)],
|
||||
library.lookup(self.uri))
|
||||
assert library.lookup(track_uri) == [Track(uri=track_uri)]
|
||||
|
||||
|
||||
def test_lookup_converts_uri_metadata_to_track(backend, track_uri):
|
||||
library = actor.StreamLibraryProvider(backend, [])
|
||||
|
||||
assert library.lookup(track_uri) == [Track(length=4406, uri=track_uri)]
|
||||
|
||||
145
tests/stream/test_playback.py
Normal file
145
tests/stream/test_playback.py
Normal file
@ -0,0 +1,145 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import mock
|
||||
|
||||
import pytest
|
||||
|
||||
import requests
|
||||
|
||||
import responses
|
||||
|
||||
from mopidy import exceptions
|
||||
from mopidy.audio import scan
|
||||
from mopidy.stream import actor
|
||||
|
||||
|
||||
TIMEOUT = 1000
|
||||
URI = 'http://example.com/listen.m3u'
|
||||
BODY = """
|
||||
#EXTM3U
|
||||
http://example.com/stream.mp3
|
||||
http://foo.bar/baz
|
||||
""".strip()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config():
|
||||
return {
|
||||
'proxy': {},
|
||||
'stream': {
|
||||
'timeout': TIMEOUT,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def audio():
|
||||
return mock.Mock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def scanner():
|
||||
scanner = mock.Mock(spec=scan.Scanner)
|
||||
scanner.scan.return_value.mime = 'text/foo'
|
||||
return scanner
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def backend(scanner):
|
||||
backend = mock.Mock()
|
||||
backend.uri_schemes = ['file']
|
||||
backend._scanner = scanner
|
||||
return backend
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def provider(audio, backend, config):
|
||||
return actor.StreamPlaybackProvider(audio, backend, config)
|
||||
|
||||
|
||||
@responses.activate
|
||||
def test_translate_uri_of_audio_stream_returns_same_uri(
|
||||
scanner, provider):
|
||||
|
||||
scanner.scan.return_value.mime = 'audio/ogg'
|
||||
|
||||
result = provider.translate_uri(URI)
|
||||
|
||||
scanner.scan.assert_called_once_with(URI)
|
||||
assert result == URI
|
||||
|
||||
|
||||
@responses.activate
|
||||
def test_translate_uri_of_playlist_returns_first_uri_in_list(
|
||||
scanner, provider):
|
||||
|
||||
responses.add(
|
||||
responses.GET, URI, body=BODY, content_type='audio/x-mpegurl')
|
||||
|
||||
result = provider.translate_uri(URI)
|
||||
|
||||
scanner.scan.assert_called_once_with(URI)
|
||||
assert result == 'http://example.com/stream.mp3'
|
||||
assert responses.calls[0].request.headers['User-Agent'].startswith(
|
||||
'Mopidy-Stream/')
|
||||
|
||||
|
||||
@responses.activate
|
||||
def test_translate_uri_of_playlist_with_xml_mimetype(scanner, provider):
|
||||
scanner.scan.return_value.mime = 'application/xspf+xml'
|
||||
responses.add(
|
||||
responses.GET, URI, body=BODY, content_type='application/xspf+xml')
|
||||
|
||||
result = provider.translate_uri(URI)
|
||||
|
||||
scanner.scan.assert_called_once_with(URI)
|
||||
assert result == 'http://example.com/stream.mp3'
|
||||
|
||||
|
||||
def test_translate_uri_when_scanner_fails(scanner, provider, caplog):
|
||||
scanner.scan.side_effect = exceptions.ScannerError('foo failed')
|
||||
|
||||
result = provider.translate_uri('bar')
|
||||
|
||||
assert result is None
|
||||
assert 'Problem scanning URI bar: foo failed' in caplog.text()
|
||||
|
||||
|
||||
@responses.activate
|
||||
def test_translate_uri_when_playlist_download_fails(provider, caplog):
|
||||
responses.add(responses.GET, URI, body=BODY, status=500)
|
||||
|
||||
result = provider.translate_uri(URI)
|
||||
|
||||
assert result is None
|
||||
assert 'Problem downloading stream playlist' in caplog.text()
|
||||
|
||||
|
||||
def test_translate_uri_times_out_if_connection_times_out(provider, caplog):
|
||||
with mock.patch.object(actor.requests, 'Session') as session_mock:
|
||||
get_mock = session_mock.return_value.get
|
||||
get_mock.side_effect = requests.exceptions.Timeout
|
||||
|
||||
result = provider.translate_uri(URI)
|
||||
|
||||
get_mock.assert_called_once_with(URI, timeout=1.0, stream=True)
|
||||
assert result is None
|
||||
assert (
|
||||
'Download of stream playlist (%s) failed due to connection '
|
||||
'timeout after 1.000s' % URI in caplog.text())
|
||||
|
||||
|
||||
@responses.activate
|
||||
def test_translate_uri_times_out_if_download_is_slow(provider, caplog):
|
||||
responses.add(
|
||||
responses.GET, URI, body=BODY, content_type='audio/x-mpegurl')
|
||||
|
||||
with mock.patch.object(actor, 'time') as time_mock:
|
||||
time_mock.time.side_effect = [0, TIMEOUT + 1]
|
||||
|
||||
result = provider.translate_uri(URI)
|
||||
|
||||
assert result is None
|
||||
assert (
|
||||
'Download of stream playlist (%s) failed due to download taking '
|
||||
'more than 1.000s' % URI in caplog.text())
|
||||
@ -1,5 +1,7 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
import mock
|
||||
|
||||
import pkg_resources
|
||||
@ -11,7 +13,7 @@ from mopidy import config, exceptions, ext
|
||||
from tests import IsA, any_unicode
|
||||
|
||||
|
||||
class TestExtension(ext.Extension):
|
||||
class DummyExtension(ext.Extension):
|
||||
dist_name = 'Mopidy-Foobar'
|
||||
ext_name = 'foobar'
|
||||
version = '1.2.3'
|
||||
@ -20,10 +22,10 @@ class TestExtension(ext.Extension):
|
||||
return '[foobar]\nenabled = true'
|
||||
|
||||
|
||||
any_testextension = IsA(TestExtension)
|
||||
any_testextension = IsA(DummyExtension)
|
||||
|
||||
|
||||
class ExtensionTest(object):
|
||||
class TestExtension(object):
|
||||
|
||||
@pytest.fixture
|
||||
def extension(self):
|
||||
@ -53,8 +55,23 @@ class ExtensionTest(object):
|
||||
with pytest.raises(NotImplementedError):
|
||||
extension.setup(None)
|
||||
|
||||
def test_get_cache_dir_raises_assertion_error(self, extension):
|
||||
config = {'core': {'cache_dir': '/tmp'}}
|
||||
with pytest.raises(AssertionError): # ext_name not set
|
||||
extension.get_cache_dir(config)
|
||||
|
||||
class LoadExtensionsTest(object):
|
||||
def test_get_config_dir_raises_assertion_error(self, extension):
|
||||
config = {'core': {'config_dir': '/tmp'}}
|
||||
with pytest.raises(AssertionError): # ext_name not set
|
||||
extension.get_config_dir(config)
|
||||
|
||||
def test_get_data_dir_raises_assertion_error(self, extension):
|
||||
config = {'core': {'data_dir': '/tmp'}}
|
||||
with pytest.raises(AssertionError): # ext_name not set
|
||||
extension.get_data_dir(config)
|
||||
|
||||
|
||||
class TestLoadExtensions(object):
|
||||
|
||||
@pytest.yield_fixture
|
||||
def iter_entry_points_mock(self, request):
|
||||
@ -70,7 +87,7 @@ class LoadExtensionsTest(object):
|
||||
|
||||
def test_load_extensions(self, iter_entry_points_mock):
|
||||
mock_entry_point = mock.Mock()
|
||||
mock_entry_point.load.return_value = TestExtension
|
||||
mock_entry_point.load.return_value = DummyExtension
|
||||
|
||||
iter_entry_points_mock.return_value = [mock_entry_point]
|
||||
|
||||
@ -94,7 +111,7 @@ class LoadExtensionsTest(object):
|
||||
|
||||
def test_gets_instance(self, iter_entry_points_mock):
|
||||
mock_entry_point = mock.Mock()
|
||||
mock_entry_point.load.return_value = TestExtension()
|
||||
mock_entry_point.load.return_value = DummyExtension()
|
||||
|
||||
iter_entry_points_mock.return_value = [mock_entry_point]
|
||||
|
||||
@ -113,11 +130,11 @@ class LoadExtensionsTest(object):
|
||||
|
||||
def test_get_config_schema_fails(self, iter_entry_points_mock):
|
||||
mock_entry_point = mock.Mock()
|
||||
mock_entry_point.load.return_value = TestExtension
|
||||
mock_entry_point.load.return_value = DummyExtension
|
||||
|
||||
iter_entry_points_mock.return_value = [mock_entry_point]
|
||||
|
||||
with mock.patch.object(TestExtension, 'get_config_schema') as get:
|
||||
with mock.patch.object(DummyExtension, 'get_config_schema') as get:
|
||||
get.side_effect = Exception
|
||||
|
||||
assert ext.load_extensions() == []
|
||||
@ -125,11 +142,11 @@ class LoadExtensionsTest(object):
|
||||
|
||||
def test_get_default_config_fails(self, iter_entry_points_mock):
|
||||
mock_entry_point = mock.Mock()
|
||||
mock_entry_point.load.return_value = TestExtension
|
||||
mock_entry_point.load.return_value = DummyExtension
|
||||
|
||||
iter_entry_points_mock.return_value = [mock_entry_point]
|
||||
|
||||
with mock.patch.object(TestExtension, 'get_default_config') as get:
|
||||
with mock.patch.object(DummyExtension, 'get_default_config') as get:
|
||||
get.side_effect = Exception
|
||||
|
||||
assert ext.load_extensions() == []
|
||||
@ -137,22 +154,22 @@ class LoadExtensionsTest(object):
|
||||
|
||||
def test_get_command_fails(self, iter_entry_points_mock):
|
||||
mock_entry_point = mock.Mock()
|
||||
mock_entry_point.load.return_value = TestExtension
|
||||
mock_entry_point.load.return_value = DummyExtension
|
||||
|
||||
iter_entry_points_mock.return_value = [mock_entry_point]
|
||||
|
||||
with mock.patch.object(TestExtension, 'get_command') as get:
|
||||
with mock.patch.object(DummyExtension, 'get_command') as get:
|
||||
get.side_effect = Exception
|
||||
|
||||
assert ext.load_extensions() == []
|
||||
get.assert_called_once_with()
|
||||
|
||||
|
||||
class ValidateExtensionDataTest(object):
|
||||
class TestValidateExtensionData(object):
|
||||
|
||||
@pytest.fixture
|
||||
def ext_data(self):
|
||||
extension = TestExtension()
|
||||
extension = DummyExtension()
|
||||
|
||||
entry_point = mock.Mock()
|
||||
entry_point.name = extension.ext_name
|
||||
@ -221,3 +238,36 @@ class ValidateExtensionDataTest(object):
|
||||
def test_no_default_config(self, ext_data):
|
||||
ext_data = ext_data._replace(config_defaults=None)
|
||||
assert not ext.validate_extension_data(ext_data)
|
||||
|
||||
def test_get_cache_dir(self, ext_data):
|
||||
core_cache_dir = '/tmp'
|
||||
config = {'core': {'cache_dir': core_cache_dir}}
|
||||
extension = ext_data.extension
|
||||
|
||||
with mock.patch.object(ext.path, 'get_or_create_dir'):
|
||||
cache_dir = extension.get_cache_dir(config)
|
||||
|
||||
expected = os.path.join(core_cache_dir, extension.ext_name)
|
||||
assert cache_dir == expected
|
||||
|
||||
def test_get_config_dir(self, ext_data):
|
||||
core_config_dir = '/tmp'
|
||||
config = {'core': {'config_dir': core_config_dir}}
|
||||
extension = ext_data.extension
|
||||
|
||||
with mock.patch.object(ext.path, 'get_or_create_dir'):
|
||||
config_dir = extension.get_config_dir(config)
|
||||
|
||||
expected = os.path.join(core_config_dir, extension.ext_name)
|
||||
assert config_dir == expected
|
||||
|
||||
def test_get_data_dir(self, ext_data):
|
||||
core_data_dir = '/tmp'
|
||||
config = {'core': {'data_dir': core_data_dir}}
|
||||
extension = ext_data.extension
|
||||
|
||||
with mock.patch.object(ext.path, 'get_or_create_dir'):
|
||||
data_dir = extension.get_data_dir(config)
|
||||
|
||||
expected = os.path.join(core_data_dir, extension.ext_name)
|
||||
assert data_dir == expected
|
||||
|
||||
@ -64,5 +64,6 @@ class VersionTest(unittest.TestCase):
|
||||
self.assertVersionLess('1.0.4', '1.0.5')
|
||||
self.assertVersionLess('1.0.5', '1.0.6')
|
||||
self.assertVersionLess('1.0.6', '1.0.7')
|
||||
self.assertVersionLess('1.0.7', __version__)
|
||||
self.assertVersionLess(__version__, '1.0.9')
|
||||
self.assertVersionLess('1.0.7', '1.0.8')
|
||||
self.assertVersionLess('1.0.8', __version__)
|
||||
self.assertVersionLess(__version__, '1.1.1')
|
||||
|
||||
Loading…
Reference in New Issue
Block a user