Merge pull request #653 from adamcik/feature/local-browse-speedup

Feature/local browse speedup
This commit is contained in:
Stein Magnus Jodal 2014-01-18 13:25:30 -08:00
commit 4026e16996

View File

@ -8,6 +8,7 @@ import os
import re
import sys
import tempfile
import time
import mopidy
from mopidy import local, models
@ -49,8 +50,8 @@ class _BrowseCache(object):
splitpath_re = re.compile(r'([^/]+)')
def __init__(self, uris):
# {parent_uri: {uri: ref}}
self._cache = {}
# TODO: local.ROOT_DIRECTORY_URI
self._cache = {'local:directory': collections.OrderedDict()}
for track_uri in uris:
path = translator.local_track_uri_to_path(track_uri, b'/')
@ -58,26 +59,57 @@ class _BrowseCache(object):
path.decode(self.encoding, 'replace'))
track_ref = models.Ref.track(uri=track_uri, name=parts.pop())
parent = 'local:directory'
for i in range(len(parts)):
self._cache.setdefault(parent, collections.OrderedDict())
# Look for our parents backwards as this is faster than having to
# do a complete search for each add.
parent_uri = None
child = None
for i in reversed(range(len(parts))):
directory = '/'.join(parts[:i+1])
dir_uri = translator.path_to_local_directory_uri(directory)
dir_ref = models.Ref.directory(uri=dir_uri, name=parts[i])
self._cache[parent][dir_uri] = dir_ref
uri = translator.path_to_local_directory_uri(directory)
# First dir we process is our parent
if not parent_uri:
parent_uri = uri
parent = dir_uri
# We found ourselves and we exist, done.
if uri in self._cache:
break
self._cache.setdefault(parent, collections.OrderedDict())
self._cache[parent][track_uri] = track_ref
# Initialize ourselves, store child if present, and add
# ourselves as child for next loop.
self._cache[uri] = collections.OrderedDict()
if child:
self._cache[uri][child.uri] = child
child = models.Ref.directory(uri=uri, name=parts[i])
else:
# Loop completed, so final child needs to be added to root.
if child:
self._cache['local:directory'][child.uri] = child
# If no parent was set we belong in the root.
if not parent_uri:
parent_uri = 'local:directory'
self._cache[parent_uri][track_uri] = track_ref
def lookup(self, uri):
return self._cache.get(uri, {}).values()
# TODO: make this available to other code?
class DebugTimer(object):
def __init__(self, msg):
self.msg = msg
self.start = None
def __enter__(self):
self.start = time.time()
def __exit__(self, exc_type, exc_value, traceback):
duration = (time.time() - self.start) * 1000
logger.debug('%s: %dms', self.msg, duration)
class JsonLibrary(local.Library):
name = b'json'
name = 'json'
def __init__(self, config):
self._tracks = {}
@ -86,16 +118,18 @@ class JsonLibrary(local.Library):
self._json_file = os.path.join(
config['local']['data_dir'], b'library.json.gz')
def browse(self, path):
def browse(self, uri):
if not self._browse_cache:
return []
return self._browse_cache.lookup(path)
return self._browse_cache.lookup(uri)
def load(self):
logger.debug('Loading json library from %s', self._json_file)
library = load_library(self._json_file)
self._tracks = dict((t.uri, t) for t in library.get('tracks', []))
self._browse_cache = _BrowseCache(sorted(self._tracks))
logger.debug('Loading library: %s', self._json_file)
with DebugTimer('Loading tracks'):
library = load_library(self._json_file)
self._tracks = dict((t.uri, t) for t in library.get('tracks', []))
with DebugTimer('Building browse cache'):
self._browse_cache = _BrowseCache(sorted(self._tracks.keys()))
return len(self._tracks)
def lookup(self, uri):