def runbottle(host='0.0.0.0', port=9278, pthstr='', pathprefix=''): global datadir uplog("runbottle: version %s host %s port %d pthstr %s pathprefix %s" % (bottle.__version__, host, port, pthstr, pathprefix)) datadir = os.path.dirname(__file__) datadir = os.path.join(datadir, 'bottle') bottle.TEMPLATE_PATH = (os.path.join(datadir, 'views'), ) # All the file urls must be like /some/prefix/path where # /some/prefix must be in the path translation map (which I'm not # sure what the use of is). By default the map is an identical # translation of all topdirs entries. We create one route for each # prefix. As I don't know how a bottle method can retrieve the # route it was called from, we create a callable for each prefix. # Each route is built on the translation input, and the processor # uses the translated path as root lpth = pthstr.split(',') for ptt in lpth: l = ptt.split(':') rt = l[0] if rt[-1] != '/': rt += '/' rt += '<filepath:path>' uplog("runbottle: adding route for: %s" % rt) # We build the streamer with the translated streamer = Streamer(l[1]) bottle.route(rt, 'GET', streamer) bottle.run(server='waitress', host=host, port=port)
def favourite_artists(self): if not self.api: uplog("Not logged in") return [] data = self.api.current_user_followed_artists() #self.dmpdata('favourite_artists', data) return [_parse_artist(item) for item in data['artists']['items']]
def __call__(self, filepath): embedded = True if 'embed' in bottle.request.query else False if embedded: # Embedded image urls have had a .jpg or .png # appended. Remove it to restore the track path name. i = filepath.rfind('.') filepath = filepath[:i] apath = os.path.join(self.root, filepath) ctype, size, f = embedded_open(apath) fs = os.stat(apath) lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(fs.st_mtime)) bottle.response.set_header("Last-Modified", lm) bottle.response.set_header("Content-type", ctype) bottle.response.set_header("Content-Length", size) return f fullpath = os.path.join(self.root, filepath) if not os.path.exists(fullpath): uplog("uprcl: no such file: %s" % fullpath) return bottle.HTTPResponse(status=404) uplog("Streaming: %s " % fullpath) mutf = mutagen.File(fullpath) if mutf: return bottle.static_file(filepath, root=self.root, mimetype=mutf.mime[0]) else: return bottle.static_file(filepath, root=self.root)
def favourite_tracks(self): if not self.api: uplog("Not logged in") return [] data = self.api.current_user_saved_tracks(limit=50, offset=0) self.dmpdata('favourite_tracks', data) return [_parse_track(item['track']) for item in data['items']]
def recent_tracks(self): if not self.api: uplog("Not logged in") return [] data = self.api.current_user_recently_played() #self.dmpdata('user_recently_played', data) return [_parse_track(i['track']) for i in data['items']]
def get_artist_info(self, artist_id, doRelated=False): ret = {"albums": [], "toptracks": [], "related": []} # Happens,some library tracks have no artistId entry if artist_id is None or artist_id == 'None': uplog("get_artist_albums: artist_id is None") return ret else: uplog("get_artist_albums: artist_id %s" % artist_id) maxrel = 20 if doRelated else 0 maxtop = 0 if doRelated else 10 incalbs = False if doRelated else True data = self.api.get_artist_info(artist_id, include_albums=incalbs, max_top_tracks=maxtop, max_rel_artist=maxrel) #self.dmpdata("artist_info", data) if 'albums' in data: ret["albums"] = sortmodellist( [_parse_album(alb) for alb in data['albums']]) if 'topTracks' in data: ret["toptracks"] = sortmodellist( [_parse_track(t) for t in data['topTracks']]) if 'related_artists' in data: ret["related"] = sortmodellist( [_parse_artist(a) for a in data['related_artists']]) return ret
def _direntriesforalbums(self, pid, where, path=''): uplog("_direntriesforalbums. where: %s" % where) c = self._conn.cursor() args = (path + '%', ) if path else () if path: if not where: where = '''WHERE albfolder LIKE ?''' else: where += ''' AND albfolder LIKE ?''' substmt = '''SELECT DISTINCT albalb FROM ALBUMS %s''' % where where = '''WHERE album_id IN (%s)''' % substmt else: if not where: where = '''WHERE albtdisc IS NULL''' else: where += ''' AND albtdisc IS NULL''' stmt = '''SELECT album_id, albtitle, albarturi, albdate, artist.value FROM albums LEFT JOIN artist ON artist.artist_id = albums.artist_id %s ORDER BY albtitle''' % where uplog('direntriesforalbums: %s' % stmt) c.execute(stmt, args) entries = [] for r in c: id = pid + '$' + str(r[0]) entries.append( rcldirentry(id, pid, r[1], arturi=r[2], date=r[3], artist=r[4], upnpclass='object.container.album.musicAlbum')) return entries
def _fetchalldocs(self, confdir): start = timer() rcldb = recoll.connect(confdir=confdir) rclq = rcldb.query() rclq.execute("mime:*", stemming=0) #rclq.execute('album:a* OR album:b* OR album:c*', stemming=0) uplog("Estimated alldocs query results: %d" % (rclq.rowcount)) tagaliases = None if uprclinit.g_minimconfig: tagaliases = uprclinit.g_minimconfig.gettagaliases() self._rcldocs = [] for doc in rclq: if tagaliases: for orig, target, rep in tagaliases: val = doc.get(orig) #uplog("Rep %s doc[%s]=[%s] doc[%s]=[%s]"% # (rep, orig, val, target, doc.get(target))) if val and (rep or not doc.get(target)): setattr(doc, target, val) self._rcldocs.append(doc) if self._maxrclcnt > 0 and len(self._rcldocs) >= self._maxrclcnt: break time.sleep(0) end = timer() uplog("Retrieved %d docs in %.2f Seconds" % (len(self._rcldocs), end - start))
def _makeSearchExp(out, v, field, oper, neg): uplog("_makeSearchExp: v <%s> field <%s> oper <%s> neg <%s>" % (v, field, oper, neg)) if oper == 'I': return swords,phrases = _separatePhrasesAndWords(v) if neg: out.append(" -") # Special-case 'title' because we want to also match directory names # ((title:keyword) OR (filename:keyword AND mime:inode/directory)) if field == 'title': fields = (field, 'filename') else: fields = (field,) if len(fields) > 1: out.append(" (") for i in range(len(fields)): field = fields[i] out.append(" (") _searchClauses(out, field, oper, swords, phrases) # We'd like to do the following to avoid matching reg file names but # recoll takes all mime: clause as global filters, so can't work # if i == 1: out.append(" AND mime:inode/directory") out.append(")") if len(fields) == 2 and i == 0: out.append(" OR ") if len(fields) > 1: out.append(") ")
def find_device_id(self, data): for entry in data: if "type" in entry and entry["type"] == u"ANDROID": # Get rid of 0x id = entry["id"][2:] uplog("Using deviceid %s" % id) return id return None
def _subtreealbums(self, selwhere, values): stmt = 'SELECT DISTINCT album_id FROM tracks ' + selwhere c = self._conn.cursor() uplog('subtreealbums: executing %s' % stmt) c.execute(stmt, values) rawalbids = [r[0] for r in c] albids = self._rawalbids2albids(rawalbids) uplog('subtreealbums: returning %s' % albids) return albids
def browseFolder(self, pid, flag, pthremain, folder): uplog("Tags:browseFolder: pid %s pth %s fld %s" % (pid, pthremain, folder)) l = pthremain.split('$') # 1st elt in list is empty because pthremain begins with $. so # len(l)==2 is the root of tags from this folder if len(l) == 2: return self.rootentries(pid + '$', folder) else: qpath = l[2:] return self._dobrowse(pid, flag, qpath, folder)
def new_releases(self): if not self.api: uplog("Not logged in") return [] data = self.api.new_releases() #self.dmpdata('new_releases', data) try: return [_parse_album(alb) for alb in data['albums']['items']] except: uplog("new_releases: _parse_albums failed") pass return []
def favourite_albums(self): if not self.api: uplog("Not logged in") return [] data = self.api.current_user_saved_albums() #self.dmpdata('favourite_albums', data) try: return [_parse_album(item['album']) for item in data['items']] except: uplog("favourite_albums: _parse_albums failed") pass return []
def _albcntforfolder(self, path): c = self._conn.cursor() if path: stmt = '''SELECT COUNT(DISTINCT albalb) FROM ALBUMS WHERE albfolder LIKE ?''' args = (path + "%", ) else: stmt = "SELECT COUNT(*) FROM albums WHERE albtdisc is NULL" args = () uplog("_albcntforfolder: stmt %s args %s" % (stmt, args)) c.execute(stmt, args) return str(c.fetchone()[0])
def my_playlists(self): if not self.api: uplog("Not logged in") return [] data = self.api.current_user_playlists() #self.dmpdata('user_playlists', data) try: return [_parse_playlist(item) for item in data['items']] except: uplog("my_playlists: _parse_playlist failed") pass return []
def browse(self, pid, flag): idx = self._objidtoidx(pid) folders = uprclinit.g_trees['folders'] rcldocs = folders.rcldocs() entries = [] if idx == 0: # Browsing root for i in range(len(self.utidx))[1:]: doc = rcldocs[self.utidx[i]] id = self._idprefix + '$p' + str(i) title = doc.title if doc.title else doc.filename e = rcldirentry(id, pid, title, upnpclass='object.container.playlistContainer') if e: entries.append(e) else: pldoc = rcldocs[self.utidx[idx]] plpath = uprclutils.docpath(pldoc) #uplog("playlists: plpath %s" % plpath) try: m3u = uprclutils.M3u(plpath) except Exception as ex: uplog("M3u open failed: %s %s" % (plpath, ex)) return entries cnt = 1 for url in m3u: doc = recoll.Doc() if m3u.urlRE.match(url): # Actual URL (usually http). Create bogus doc doc.setbinurl(bytearray(url)) elt = os.path.split(url)[1] doc.title = elt.decode('utf-8', errors='ignore') doc.mtype = "audio/mpeg" else: doc.setbinurl(bytearray(b'file://' + url)) fathidx, docidx = folders._stat(doc) if docidx < 0: uplog("playlists: can't stat %s" % doc.getbinurl()) continue doc = rcldocs[docidx] id = pid + '$e' + str(len(entries)) e = rcldoctoentry(id, pid, self._httphp, self._pprefix, doc) if e: entries.append(e) return sorted(entries, key=cmpentries)
def _cmpentries_func(e1, e2): #uplog("cmpentries");_logentry("e1", e1);_logentry("e2", e2) tp1 = e1['tp'] tp2 = e2['tp'] isct1 = tp1 == 'ct' isct2 = tp2 == 'ct' # Containers come before items, and are sorted in alphabetic order ret = -2 if isct1 and not isct2: ret = -1 elif not isct1 and isct2: ret = 1 elif isct1 and isct2: tt1 = e1['tt'] tt2 = e2['tt'] if tt1.lower() < tt2.lower(): ret = -1 elif tt1.lower() > tt2.lower(): ret = 1 else: ret = 0 if ret != -2: #uplog("cmpentries tp1 %s tp2 %s, returning %d"%(tp1,tp2,ret)) return ret # Tracks. Sort by album then directory then track number k = 'upnp:album' a1 = e1[k] if k in e1 else "" a2 = e2[k] if k in e2 else "" if a1 < a2: return -1 elif a1 > a2: return 1 d1 = os.path.dirname(e1['uri']) d2 = os.path.dirname(e2['uri']) if d1 < d2: return -1 elif d1 > d2: return 1 k = 'upnp:originalTrackNumber' a1 = e1[k] if k in e1 else "0" a2 = e2[k] if k in e2 else "0" try: return int(a1) - int(a2) except: uplog("upnp:originalTrackNumber %s %s" % (a1, a2)) return 0
def featured_playlists(self): if not self.api: uplog("Not logged in") return [] data = self.api.featured_playlists() #self.dmpdata('featured_playlists', data) try: return [ _parse_playlist(item) for item in data['playlists']['items'] ] except: uplog("featured_playlists: _parse_playlist failed") pass return []
def _dobrowse(self, pid, flag, qpath, folder=''): uplog("Tags:browsFolder: qpath %s" % qpath) if qpath[0] == 'items': args = (folder + '%', ) if folder else () folderwhere = ' WHERE tracks.path LIKE ? ' if folder else ' ' stmt = 'SELECT docidx FROM tracks' + folderwhere entries = self._trackentriesforstmt(stmt, args, pid) elif qpath[0] == 'albums': entries = self._albumsbrowse(pid, qpath, flag, folder) elif qpath[0].startswith('='): entries = self._tagsbrowse(pid, qpath, flag, folder) else: raise Exception('Bad path in tags tree (start): <%s>' % qpath) return entries
def _subtreetags(self, where, values): c = self._conn.cursor() tags = [] for tt in g_indextags: tb = g_tagtotable[tt] stmt = '''SELECT COUNT(DISTINCT %s) FROM tracks %s''' % \ (_clid(tb), where) #uplog("subtreetags: stmt: [%s]" % stmt) c.execute(stmt, values) cnt = c.fetchone()[0] if len(stmt) > 80: stmt = stmt[:80] + "..." uplog("subtreetags: %d values for %s (%s,%s)" % (cnt, tb, stmt, values)) if cnt > 1: tags.append(tt) return tags
def fetchalldocs(confdir): allthedocs = [] rcldb = recoll.connect(confdir=confdir) rclq = rcldb.query() rclq.execute("mime:*", stemming=0) uplog("Estimated alldocs query results: %d" % (rclq.rowcount)) maxcnt = 1000 totcnt = 0 while True: docs = rclq.fetchmany() for doc in docs: allthedocs.append(doc) totcnt += 1 if (maxcnt > 0 and totcnt >= maxcnt) or \ len(docs) != rclq.arraysize: break uplog("Retrieved %d docs" % (totcnt, )) return allthedocs
def _maybeinitconfdir(confdir, topdirs): if not os.path.isdir(confdir): if os.path.exists(confdir): raise Exception("Exists and not directory: %s" % confdir) os.mkdir(confdir) datadir = os.path.dirname(__file__) uplog("datadir: %s" % datadir) for fn in ("fields", "mimemap", "mimeconf"): dst = os.path.join(confdir, fn) src = os.path.join(datadir, "rclconfig-" + fn) shutil.copyfile(src, dst) exclpats = uprclinit.g_minimconfig.getexcludepatterns() userconfig = uprclinit.g_upconfig.get("uprclconfrecolluser") if not userconfig: userconfig = os.path.join(confdir, "recoll.conf.user") if os.path.exists(userconfig): userconfdata = open(userconfig, "rb").read() else: userconfdata = b'' path = os.path.join(confdir, "recoll.conf") f = open(path, "wb") f.write(b"topdirs = %s\n" % topdirs.encode(locale.getpreferredencoding())) f.write(b"idxabsmlen = 0\n") f.write(b"loglevel = 2\n") #f.write(b"idxlogfilename = /tmp/loguprcl.txt\n") #f.write(b"pylogfilename = /tmp/logpyuprcl.txt\n") f.write(b"noaspell = 1\n") f.write(b"nomd5types = rclaudio rclimg\n") f.write(b"testmodifusemtime = 1\n") f.write(b"idxmetastoredlen = 20000\n") f.write(b"audiotagfixerscript = %b\n" % os.path.join(datadir, "minimtagfixer.py").encode('utf-8')) if exclpats: f.write(b"skippedNames+ = " + exclpats.encode("utf-8") + b"\n") else: f.write(b"skippedNames+ = \n") if userconfdata: f.write(userconfdata) f.close()
def _tagsbrowsealbums(self, pid, qpath, i, selwhere, values): uplog("_tagsbrowsealbums: pid %s qpath %s i %s selwhere %s values %s" % (pid, qpath, i, selwhere, values)) c = self._conn.cursor() entries = [] if i == len(qpath) - 1: # List of albums to which belong any track from selection albidsl = self._subtreealbums(selwhere, values) albids = ','.join([str(a) for a in albidsl]) where = ' WHERE album_id in (' + albids + ') ' entries = self._direntriesforalbums(pid, where) elif i == len(qpath) - 2: # Album track list. Maybe a merged album->multiple phys albids albid = int(qpath[-1]) rawalbids = self._albids2rawalbids((albid, )) uplog("_tagsbrowsealbums: albid %s rawalbids %s" % (albid, rawalbids)) stmt = '''SELECT COUNT(docidx) FROM tracks WHERE album_id IN (%s)''' % ','.join('?' * len(rawalbids)) c.execute(stmt, rawalbids) r = c.fetchone() ntracks = int(r[0]) docidsl = self._docidsforsel(selwhere, values) stmt = '''SELECT docidx FROM tracks WHERE album_id IN (%s) AND docidx IN (%s)''' % \ (','.join('?'*len(rawalbids)), ','.join('?'*len(docidsl))) entries = self._trackentriesforstmt(stmt, rawalbids + docidsl, pid) if ntracks != len(entries): id = pid + '$' + 'showca' entries = [rcldirentry(id, pid, '>> Complete Album')] + entries elif i == len(qpath) - 3: # 'Complete album' entry # Note that minim has an additional level here, probably to # present groups or multiple groups ? The trackids ids are # like: # 0$=Composer$17738$albums$2$showca.0$hcalbum$*i13458 # I don't know what the .0 is for. # The 'hcalbum' level usually has 2 entries '>> Hide Content' # and the album title. TBD albid = int(qpath[-2]) entries = self._trackentriesforalbum(albid, pid) return entries
def dirpath(self, objid): # We may get called from search, on the top dir (above # [folders]). Return empty in this case try: diridx,pthremain = self._objidtodiridx(objid) except: return "" if diridx == 0: return "/" lpath = [] while True: fathidx = self._dirvec[diridx][".."][0] found = False for nm, ids in self._dirvec[fathidx].items(): if ids[0] == diridx: lpath.append(nm) found = True break # End for if not found: uplog("uprclfolders: pwd failed for %s \ (father not found), returning /" % objid) return "/" if len(lpath) > 200: uplog("uprclfolders: pwd failed for %s \ (looping), returning /" % objid) return "/" diridx = fathidx if diridx == 0: break if not lpath: path = "/" else: path = "" for elt in reversed(lpath): path += elt + "/" return path
def rootentries(self, pid, path=''): uplog("rootentries: pid %s path %s" % (pid, path)) entries = [] nalbs = self._albcntforfolder(path) entries.append(rcldirentry(pid + 'albums', pid, nalbs + ' albums')) if path: where = ' WHERE tracks.path LIKE ? ' args = (path + '%', ) else: where = ' ' args = () c = self._conn.cursor() stmt = "SELECT COUNT(*) from tracks" c.execute(stmt + where, args) nitems = str(c.fetchone()[0]) entries.append(rcldirentry(pid + 'items', pid, nitems + ' items')) subqs = self._subtreetags(where, args) for tt in subqs: entries.append( rcldirentry(pid + '=' + tt, pid, g_tagdisplaytag[tt])) return entries
def _trackentriesforalbum(self, albid, pid): albids = self._albid2rawalbidssorted(albid) uplog("_trackentriesforalbid: %d -> %s" % (albid, albids)) # I don't see a way to use a select..in statement and get the # order right tracks = [] for albid in albids: stmt = '''SELECT docidx FROM tracks WHERE album_id = ? ORDER BY trackno''' tracks += self._trackentriesforstmt(stmt, (albid, ), pid) tno = None for track in tracks: tn = 1 if 'upnp:originalTrackNumber' in track: tn = int(track['upnp:originalTrackNumber']) if tno: if tn <= tno: tn = tno + 1 tno = tn else: tno = tn track['upnp:originalTrackNumber'] = str(tno) return tracks
def _initplaylists(self): for diridx in self._playlists: pldocidx = self._dirvec[diridx]["."][1] pldoc = self._rcldocs[pldocidx] arturi = uprclutils.docarturi(pldoc, self._httphp,self._pprefix) if arturi: pldoc.albumarturi = arturi plpath = uprclutils.docpath(pldoc) try: m3u = uprclutils.M3u(plpath) except Exception as ex: uplog("M3u open failed: %s %s" % (plpath,ex)) continue for url in m3u: if m3u.urlRE.match(url): # Actual URL (usually http). Create bogus doc doc = recoll.Doc() doc.setbinurl(bytearray(url)) elt = os.path.split(url)[1] doc.title = elt.decode('utf-8', errors='ignore') doc.mtype = "audio/mpeg" self._rcldocs.append(doc) docidx = len(self._rcldocs) -1 self._dirvec[diridx][elt] = (-1, docidx) else: doc = recoll.Doc() doc.setbinurl(bytearray(b'file://' + url)) fathidx, docidx = self._stat(doc) if docidx >= 0 and docidx < len(self._rcldocs): elt = os.path.split(url)[1] self._dirvec[diridx][elt] = (-1, docidx) else: #uplog("No track found: playlist %s entry %s" % # (plpath,url)) pass del self._playlists
def _search1(self, query, tp): uplog("_search1: query [%s] tp [%s]" % (query, tp)) # Limit is max count we return, slice unit query size limit = 150 slice = 50 if tp == 'artist': limit = 20 slice = 20 elif tp == 'album' or tp == 'playlist': limit = 50 slice = 50 offset = 0 all = [] while offset < limit: uplog("_search1: call api.search, offset %d" % offset) data = self.api.search(query, type=tp, offset=offset, limit=slice) ncnt = 0 ndata = [] try: if tp == 'artist': ncnt = len(data['artists']['items']) ndata = [ _parse_artist(i) for i in data['artists']['items'] ] elif tp == 'album': ncnt = len(data['albums']['items']) ndata = [_parse_album(i) for i in data['albums']['items']] ndata = [alb for alb in ndata if alb.available] elif tp == 'playlist': #uplog("PLAYLISTS: %s" % json.dumps(data, indent=4)) ncnt = len(data['playlists']['items']) ndata = [_parse_playlist(i) for i in \ data['playlists']['items']] elif tp == 'track': ncnt = len(data['tracks']['items']) ndata = [_parse_track(i) for i in data['tracks']['items']] except Exception as err: uplog("_search1: exception while parsing result: %s" % err) break all.extend(ndata) #uplog("Got %d more (slice %d)" % (ncnt, slice)) if ncnt < slice: break offset += slice if tp == 'artist': return SearchResult(artists=all) elif tp == 'album': return SearchResult(albums=all) elif tp == 'playlist': return SearchResult(playlists=all) elif tp == 'track': return SearchResult(tracks=all)
def search(foldersobj, rclconfdir, objid, upnps, idprefix, httphp, pathprefix): rcls = _upnpsearchtorecoll(upnps) filterdir = foldersobj.dirpath(objid) if filterdir and filterdir != "/": rcls += " dir:\"" + filterdir + "\"" uplog("Search: recoll search: <%s>" % rcls) rcldb = recoll.connect(confdir=rclconfdir) try: rclq = rcldb.query() rclq.execute(rcls) except Exception as e: uplog("Search: recoll query raised: %s" % e) return [] uplog("Estimated query results: %d" % (rclq.rowcount)) if rclq.rowcount == 0: return [] entries = [] maxcnt = 0 while True: docs = rclq.fetchmany() for doc in docs: arturi = uprclutils.docarturi(doc, httphp, pathprefix) if arturi: # The uri is quoted, so it's ascii and we can just store # it as a doc attribute doc.albumarturi = arturi id = foldersobj.objidfordoc(doc) e = uprclutils.rcldoctoentry(id, objid, httphp, pathprefix, doc) if e: entries.append(e) if (maxcnt > 0 and len(entries) >= maxcnt) or \ len(docs) != rclq.arraysize: break uplog("Search retrieved %d docs" % (len(entries),)) entries.sort(key=uprclutils.cmpentries) return entries