local: Add new json based library
- Sets local-tagcache as disabled - Implements new library that uses a gzip compressed json as storage. - Thanks to reuse of existing serialization code this is a fairly small change.
This commit is contained in:
parent
ca358e05db
commit
118095e522
33
mopidy/backends/local/json/__init__.py
Normal file
33
mopidy/backends/local/json/__init__.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import mopidy
|
||||||
|
from mopidy import config, ext
|
||||||
|
|
||||||
|
|
||||||
|
class Extension(ext.Extension):
|
||||||
|
|
||||||
|
dist_name = 'Mopidy-Local-JSON'
|
||||||
|
ext_name = 'local-json'
|
||||||
|
version = mopidy.__version__
|
||||||
|
|
||||||
|
def get_default_config(self):
|
||||||
|
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
|
||||||
|
return config.read(conf_file)
|
||||||
|
|
||||||
|
def get_config_schema(self):
|
||||||
|
schema = super(Extension, self).get_config_schema()
|
||||||
|
schema['json_file'] = config.Path()
|
||||||
|
return schema
|
||||||
|
|
||||||
|
def validate_environment(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_backend_classes(self):
|
||||||
|
from .actor import LocalJsonBackend
|
||||||
|
return [LocalJsonBackend]
|
||||||
|
|
||||||
|
def get_library_updaters(self):
|
||||||
|
from .library import LocalJsonLibraryUpdateProvider
|
||||||
|
return [LocalJsonLibraryUpdateProvider]
|
||||||
30
mopidy/backends/local/json/actor.py
Normal file
30
mopidy/backends/local/json/actor.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pykka
|
||||||
|
|
||||||
|
from mopidy.backends import base
|
||||||
|
from mopidy.utils import encoding, path
|
||||||
|
|
||||||
|
from .library import LocalJsonLibraryProvider
|
||||||
|
|
||||||
|
logger = logging.getLogger('mopidy.backends.local.json')
|
||||||
|
|
||||||
|
|
||||||
|
class LocalJsonBackend(pykka.ThreadingActor, base.Backend):
|
||||||
|
def __init__(self, config, audio):
|
||||||
|
super(LocalJsonBackend, self).__init__()
|
||||||
|
|
||||||
|
self.config = config
|
||||||
|
self.check_dirs_and_files()
|
||||||
|
self.library = LocalJsonLibraryProvider(backend=self)
|
||||||
|
self.uri_schemes = ['local']
|
||||||
|
|
||||||
|
def check_dirs_and_files(self):
|
||||||
|
try:
|
||||||
|
path.get_or_create_file(self.config['local-json']['json_file'])
|
||||||
|
except EnvironmentError as error:
|
||||||
|
logger.warning(
|
||||||
|
'Could not create empty json file: %s',
|
||||||
|
encoding.locale_decode(error))
|
||||||
3
mopidy/backends/local/json/ext.conf
Normal file
3
mopidy/backends/local/json/ext.conf
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
[local-json]
|
||||||
|
enabled = true
|
||||||
|
json_file = $XDG_DATA_DIR/mopidy/local/library.json.gz
|
||||||
106
mopidy/backends/local/json/library.py
Normal file
106
mopidy/backends/local/json/library.py
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import gzip
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import mopidy
|
||||||
|
from mopidy import models
|
||||||
|
from mopidy.backends import base
|
||||||
|
from mopidy.backends.local import search
|
||||||
|
|
||||||
|
logger = logging.getLogger('mopidy.backends.local.json')
|
||||||
|
|
||||||
|
|
||||||
|
def _load_tracks(json_file):
|
||||||
|
try:
|
||||||
|
with gzip.open(json_file, 'rb') as fp:
|
||||||
|
result = json.load(fp, object_hook=models.model_json_decoder)
|
||||||
|
except IOError:
|
||||||
|
return []
|
||||||
|
return result.get('tracks', [])
|
||||||
|
|
||||||
|
|
||||||
|
class LocalJsonLibraryProvider(base.BaseLibraryProvider):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(LocalJsonLibraryProvider, self).__init__(*args, **kwargs)
|
||||||
|
self._uri_mapping = {}
|
||||||
|
self._media_dir = self.backend.config['local']['media_dir']
|
||||||
|
self._json_file = self.backend.config['local-json']['json_file']
|
||||||
|
self.refresh()
|
||||||
|
|
||||||
|
def refresh(self, uri=None):
|
||||||
|
logger.debug(
|
||||||
|
'Loading local tracks from %s using %s',
|
||||||
|
self._media_dir, self._json_file)
|
||||||
|
|
||||||
|
tracks = _load_tracks(self._json_file)
|
||||||
|
uris_to_remove = set(self._uri_mapping)
|
||||||
|
|
||||||
|
for track in tracks:
|
||||||
|
self._uri_mapping[track.uri] = track
|
||||||
|
uris_to_remove.discard(track.uri)
|
||||||
|
|
||||||
|
for uri in uris_to_remove:
|
||||||
|
del self._uri_mapping[uri]
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
'Loaded %d local tracks from %s using %s',
|
||||||
|
len(tracks), self._media_dir, self._json_file)
|
||||||
|
|
||||||
|
def lookup(self, uri):
|
||||||
|
try:
|
||||||
|
return [self._uri_mapping[uri]]
|
||||||
|
except KeyError:
|
||||||
|
logger.debug('Failed to lookup %r', uri)
|
||||||
|
return []
|
||||||
|
|
||||||
|
def find_exact(self, query=None, uris=None):
|
||||||
|
tracks = self._uri_mapping.values()
|
||||||
|
return search.find_exact(tracks, query=query, uris=uris)
|
||||||
|
|
||||||
|
def search(self, query=None, uris=None):
|
||||||
|
tracks = self._uri_mapping.values()
|
||||||
|
return search.search(tracks, query=query, uris=uris)
|
||||||
|
|
||||||
|
|
||||||
|
class LocalJsonLibraryUpdateProvider(base.BaseLibraryProvider):
|
||||||
|
uri_schemes = ['local']
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
self._tracks = {}
|
||||||
|
self._media_dir = config['local']['media_dir']
|
||||||
|
self._json_file = config['local-json']['json_file']
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
for track in _load_tracks(self._json_file):
|
||||||
|
self._tracks[track.uri] = track
|
||||||
|
return self._tracks.values()
|
||||||
|
|
||||||
|
def add(self, track):
|
||||||
|
self._tracks[track.uri] = track
|
||||||
|
|
||||||
|
def remove(self, uri):
|
||||||
|
if uri in self._tracks:
|
||||||
|
del self._tracks[uri]
|
||||||
|
|
||||||
|
def commit(self):
|
||||||
|
directory, basename = os.path.split(self._json_file)
|
||||||
|
|
||||||
|
# TODO: cleanup directory/basename.* files.
|
||||||
|
tmp = tempfile.NamedTemporaryFile(
|
||||||
|
prefix=basename + '.', dir=directory, delete=False)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with gzip.GzipFile(fileobj=tmp, mode='wb') as fp:
|
||||||
|
data = {'version': mopidy.__version__,
|
||||||
|
'tracks': self._tracks.values()}
|
||||||
|
json.dump(data, fp, cls=models.ModelJSONEncoder,
|
||||||
|
indent=2, separators=(',', ': '))
|
||||||
|
os.rename(tmp.name, self._json_file)
|
||||||
|
finally:
|
||||||
|
if os.path.exists(tmp.name):
|
||||||
|
os.remove(tmp.name)
|
||||||
@ -1,2 +1,2 @@
|
|||||||
[local-tagcache]
|
[local-tagcache]
|
||||||
enabled = true
|
enabled = false
|
||||||
|
|||||||
1
setup.py
1
setup.py
@ -44,6 +44,7 @@ setup(
|
|||||||
'http = mopidy.frontends.http:Extension [http]',
|
'http = mopidy.frontends.http:Extension [http]',
|
||||||
'local = mopidy.backends.local:Extension',
|
'local = mopidy.backends.local:Extension',
|
||||||
'local-tagcache = mopidy.backends.local.tagcache:Extension',
|
'local-tagcache = mopidy.backends.local.tagcache:Extension',
|
||||||
|
'local-json = mopidy.backends.local.json:Extension',
|
||||||
'mpd = mopidy.frontends.mpd:Extension',
|
'mpd = mopidy.frontends.mpd:Extension',
|
||||||
'stream = mopidy.backends.stream:Extension',
|
'stream = mopidy.backends.stream:Extension',
|
||||||
],
|
],
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user