Esempio n. 1
0
 def __connect(self):
     try:
         return self.__threadlocal.conn
     except AttributeError:
         self.__threadlocal.conn = sqlite3.connect(self.__name, factory=self.Connection)
         log.d('thread %s connected to database %r', threading.current_thread(), self.__name)
         return self.__threadlocal.conn
Esempio n. 2
0
 def normalize_basedir(self):
     basedir = cherry.config["media.basedir"]
     basedir = os.path.normcase(basedir)
     if len(basedir) > 1:
         basedir = basedir.rstrip(os.path.sep)
     cherry.config = cherry.config.replace({"media.basedir": basedir})
     log.d("media base directory: %r" % basedir)
Esempio n. 3
0
    def search(self, term):
        reload(cherry.tweak)
        tweaks = cherry.tweak.CherryModelTweaks
        user = cherrypy.session.get('username', None)
        if user:
            log.d(_("%(user)s searched for '%(term)s'"), {
                'user': user,
                'term': term
            })
        max_search_results = cherry.config['search.maxresults']
        results = self.cache.searchfor(term, maxresults=max_search_results)
        with Performance(_('sorting DB results using ResultOrder')) as perf:
            debug = tweaks.result_order_debug
            order_function = resultorder.ResultOrder(term, debug=debug)
            results = sorted(results, key=order_function, reverse=True)
            results = results[:min(len(results), max_search_results)]
            if debug:
                n = tweaks.result_order_debug_files
                for sortedResults in results[:n]:
                    perf.log(sortedResults.debugOutputSort)
                for sortedResults in results:
                    sortedResults.debugOutputSort = None  # free ram

        with Performance(_('checking and classifying results:')):
            results = list(filter(CherryModel.isValidMediaEntry, results))
        if cherry.config['media.show_subfolder_count']:
            for result in results:
                result.count_subfolders_and_files()
        return results
Esempio n. 4
0
 def normalize_basedir(self):
     basedir = cherry.config['media.basedir']
     basedir = os.path.normcase(basedir)
     if len(basedir) > 1:
         basedir = basedir.rstrip(os.path.sep)
     cherry.config = cherry.config.replace({'media.basedir': basedir})
     log.d(_('media base directory: %r') % basedir)
Esempio n. 5
0
    def update_db_recursive(self, fullpath, skipfirst=False):
        '''recursively update the media database for a path in basedir'''

        from collections import namedtuple
        Item = namedtuple('Item', 'infs indb parent progress')
        def factory(fs, db, parent):
            fileobj = fs if fs is not None else db
            name = fileobj.relpath or fileobj.fullpath if fileobj else '<path not found in filesystem or database>'
            if parent is None:
                progress = ProgressTree(name=name)
                maxlen = lambda s: util.trim_to_maxlen(50, s)
                progress.reporter = ProgressReporter(lvl=1, namefmt=maxlen)
            else:
                progress = parent.progress.spawnchild(name)
            return Item(fs, db, parent, progress)

        log.d(_('recursive update for %s'), fullpath)
        generator = self.enumerate_fs_with_db(fullpath, itemfactory=factory)
        skipfirst and generator.send(None)
        adds_without_commit = 0
        add = 0
        deld = 0
        try:
            with self.conn:
                for item in generator:
                    infs, indb, progress = (item.infs, item.indb, item.progress)
                    if infs and indb:
                        if infs.isdir != indb.isdir:
                            progress.name = '[±] ' + progress.name
                            deld += self.remove_recursive(indb, progress)
                            self.register_file_with_db(infs)
                            adds_without_commit = 1
                        else:
                            infs.uid = indb.uid
                            progress.name = '[=] ' + progress.name
                    elif indb:
                        progress.name = '[-] ' + progress.name
                        deld += self.remove_recursive(indb, progress)
                        adds_without_commit = 0
                        continue    # progress ticked by remove; don't tick again
                    elif infs:
                        self.register_file_with_db(item.infs)
                        adds_without_commit += 1
                        progress.name = '[+] ' + progress.name
                    else:
                        progress.name = '[?] ' + progress.name
                    if adds_without_commit == AUTOSAVEINTERVAL:
                        self.conn.commit()
                        add += adds_without_commit
                        adds_without_commit = 0
                    progress.tick()
        except Exception as exc:
            log.e(_("error while updating media: %s %s"), exc.__class__.__name__, exc)
            log.e(_("rollback to previous commit."))
            traceback.print_exc()
            raise exc
        finally:
            add += adds_without_commit
            log.i(_('items added %d, removed %d'), add, deld)
            self.load_db_to_memory()
Esempio n. 6
0
 def normalize_basedir(self):
     basedir = cherry.config['media.basedir']
     basedir = os.path.normcase(basedir)
     if len(basedir) > 1:
         basedir = basedir.rstrip(os.path.sep)
     cherry.config = cherry.config.replace({'media.basedir': basedir})
     log.d(_('media base directory: %r') % basedir)
Esempio n. 7
0
    def fetchFileIds(self, terms, maxFileIdsPerTerm, mode):
        """returns list of ids each packed in a tuple containing the id"""

        assert '' not in terms, _("terms must not contain ''")
        resultlist = []

        for term in terms:
            tprefix, tlast = term[:-1], term[-1]
            query = '''SELECT search.frowid FROM dictionary JOIN search ON search.drowid = dictionary.rowid WHERE '''
            if sys.maxunicode <= ord(tlast):
                where = ''' dictionary.word LIKE ? '''
                params = (term + '%',)
            else:
                where = ''' (dictionary.word >= ? AND dictionary.word < ?) '''
                params = (term, tprefix + chr(1 + ord(tlast)))
            order = ' ORDER BY dictionary.occurrences ASC '
            limit = ' LIMIT 0, ' + str(maxFileIdsPerTerm) #TODO add maximum db results as configuration parameter
            sql = query + where + order +limit
            if debug:
                log.d('Search term: %r', term)
                log.d('Query used: %r, %r', sql, params)
            #print(self.conn.execute('EXPLAIN QUERY PLAN ' + sql, params).fetchall())
            self.db.execute(sql, params)
            resultlist += self.db.fetchall()
        return resultlist
Esempio n. 8
0
 def createOrAlterTable(self, sqlconn):
     updatedTable = False
     #table exists?
     if sqlconn.execute("""SELECT name FROM sqlite_master
         WHERE type='table' AND name=? """, (self.tablename,)).fetchall():
         dbtablelayout = sqlconn.execute("""PRAGMA table_info('%s')""" % self.tablename).fetchall()
         #map dict to column name
         dbtablelayout = dict((col[1], col) for col in dbtablelayout)
         #remove columns from db when not in template
         for columnname in dbtablelayout.keys():
             if columnname not in self.columns:
                 #can't do this in sqlite...
                 #log.i('Dropping column %s from table %s' % (columnname, self.tablename))
                 #sqlconn.execute("""ALTER TABLE %s DROP COLUMN %s"""%(self.tablename, columnname))
                 #updatedTable = True
                 pass
             else:
                 log.d('Column %s in table %s exists and needs no change' % (columnname, self.tablename))
         #add new columns to db when not in db
         for templatecolumnname, templatecolumn in self.columns.items():
             if templatecolumnname not in dbtablelayout.keys():
                 log.i('Adding column %s to table %s' % (templatecolumnname, self.tablename))
                 sqlconn.execute("""ALTER TABLE %s ADD COLUMN %s""" % (self.tablename, templatecolumn.sql()))
                 updatedTable = True
             else:
                 log.d('Column %s in table %s exists and needs no change' % (templatecolumnname, self.tablename))
         #TODO add checks for DEFAULT value and NOT NULL
     else:
         log.i('Creating table %s' % self.tablename)
         sqlconn.execute("""CREATE TABLE %s (%s)""" % (self.tablename, ', '.join(map(lambda x: x.sql(), self.columns.values()))))
         updatedTable = True
     return updatedTable
Esempio n. 9
0
 def addUser(self, username, password, admin):
     if not (username.strip() or password.strip()):
         log.d(_('empty username or password!'))
         return False
     user = User.create(username, password, admin)
     try:
         exists = self.conn.execute(
             'SELECT username'
             ' FROM users WHERE lower(username) = lower(?)',
             (username, )).fetchone()
         if (not exists):
             self.conn.execute(
                 '''
             INSERT INTO users
             (username, admin, password, salt)
             VALUES (?,?,?,?)''', (user.name, 1 if user.isadmin else 0,
                                   user.password, user.salt))
         else:
             raise sqlite3.IntegrityError
     except sqlite3.IntegrityError:
         log.e('cannot create user "%s", already exists!' % user.name)
         return False
     self.conn.commit()
     log.i('added user: ' + user.name)
     return True
Esempio n. 10
0
    def fetchFileIds(self, terms, maxFileIdsPerTerm, mode):
        """returns list of ids each packed in a tuple containing the id"""

        assert '' not in terms, _("terms must not contain ''")
        resultlist = []

        for term in terms:
            tprefix, tlast = term[:-1], term[-1]
            query = '''SELECT search.frowid FROM dictionary JOIN search ON search.drowid = dictionary.rowid WHERE '''
            if sys.maxunicode <= ord(tlast):
                where = ''' dictionary.word LIKE ? '''
                params = (term + '%',)
            else:
                where = ''' (dictionary.word >= ? AND dictionary.word < ?) '''
                nextchr = unichr(1 + ord(tlast))
                params = (term, tprefix + nextchr)
            order = ' ORDER BY dictionary.occurrences DESC '
            limit = ' LIMIT 0, ' + str(maxFileIdsPerTerm) #TODO add maximum db results as configuration parameter
            sql = query + where + order +limit
            if debug:
                log.d('Search term: %r', term)
                log.d('Query used: %r, %r', sql, params)
            #print(self.conn.execute('EXPLAIN QUERY PLAN ' + sql, params).fetchall())
            self.db.execute(sql, params)
            resultlist += [t[0] for t in self.db.fetchall()]
        return resultlist
Esempio n. 11
0
    def search(self, term):
        reload(cherry.tweak)
        tweaks = cherry.tweak.CherryModelTweaks
        user = cherrypy.session.get('username', None)
        if user:
            log.d(_("%(user)s searched for '%(term)s'"), {'user': user, 'term': term})
        max_search_results = cherry.config['search.maxresults']
        results = self.cache.searchfor(term, maxresults=max_search_results)
        with Performance(_('sorting DB results using ResultOrder')) as perf:
            debug = tweaks.result_order_debug
            order_function = resultorder.ResultOrder(term, debug=debug)
            results = sorted(results, key=order_function, reverse=True)
            results = results[:min(len(results), max_search_results)]
            if debug:
                n = tweaks.result_order_debug_files
                for sortedResults in results[:n]:
                    perf.log(sortedResults.debugOutputSort)
                for sortedResults in results:
                    sortedResults.debugOutputSort = None  # free ram

        with Performance(_('checking and classifying results:')):
            results = list(filter(CherryModel.isValidMediaFile, results))
        if cherry.config['media.show_subfolder_count']:
            for result in results:
                result.count_subfolders_and_files()
        return results
Esempio n. 12
0
    def update_db_recursive(self, fullpath, skipfirst=False):
        '''recursively update the media database for a path in basedir'''

        from collections import namedtuple
        Item = namedtuple('Item', 'infs indb parent progress')
        def factory(fs, db, parent):
            fileobj = fs if fs is not None else db
            name = fileobj.relpath or fileobj.fullpath if fileobj else '<path not found in filesystem or database>'
            if parent is None:
                progress = ProgressTree(name=name)
                maxlen = lambda s: util.trim_to_maxlen(50, s)
                progress.reporter = ProgressReporter(lvl=1, namefmt=maxlen)
            else:
                progress = parent.progress.spawnchild(name)
            return Item(fs, db, parent, progress)

        log.d(_('recursive update for %s'), fullpath)
        generator = self.enumerate_fs_with_db(fullpath, itemfactory=factory)
        skipfirst and generator.send(None)
        adds_without_commit = 0
        add = 0
        deld = 0
        try:
            with self.conn:
                for item in generator:
                    infs, indb, progress = (item.infs, item.indb, item.progress)
                    if infs and indb:
                        if infs.isdir != indb.isdir:
                            progress.name = '[±] ' + progress.name
                            deld += self.remove_recursive(indb, progress)
                            self.register_file_with_db(infs)
                            adds_without_commit = 1
                        else:
                            infs.uid = indb.uid
                            progress.name = '[=] ' + progress.name
                    elif indb:
                        progress.name = '[-] ' + progress.name
                        deld += self.remove_recursive(indb, progress)
                        adds_without_commit = 0
                        continue    # progress ticked by remove; don't tick again
                    elif infs:
                        self.register_file_with_db(item.infs)
                        adds_without_commit += 1
                        progress.name = '[+] ' + progress.name
                    else:
                        progress.name = '[?] ' + progress.name
                    if adds_without_commit == AUTOSAVEINTERVAL:
                        self.conn.commit()
                        add += adds_without_commit
                        adds_without_commit = 0
                    progress.tick()
        except Exception as exc:
            log.e(_("error while updating media: %s %s"), exc.__class__.__name__, exc)
            log.e(_("rollback to previous commit."))
            traceback.print_exc()
            raise exc
        finally:
            add += adds_without_commit
            log.i(_('items added %d, removed %d'), add, deld)
            self.load_db_to_memory()
Esempio n. 13
0
    def api_fetchalbumart(self, directory):
        _save_and_release_session()
        default_folder_image = "../res/img/folder.png"

        log.i('Fetching album art for: %s' % directory)
        filepath = os.path.join(cherry.config['media.basedir'], directory)

        if os.path.isfile(filepath):
            # if the given path is a file, try to get the image from ID3
            tag = TinyTag.get(filepath, image=True)
            image_data = tag.get_image()
            if image_data:
                log.d('Image found in tag.')
                header = {'Content-Type': 'image/jpg', 'Content-Length': len(image_data)}
                cherrypy.response.headers.update(header)
                return image_data
            else:
                # if the file does not contain an image, display the image of the
                # parent directory
                directory = os.path.dirname(directory)

        #try getting a cached album art image
        b64imgpath = albumArtFilePath(directory)
        img_data = self.albumartcache_load(b64imgpath)
        if img_data:
            cherrypy.response.headers["Content-Length"] = len(img_data)
            return img_data

        #try getting album art inside local folder
        fetcher = albumartfetcher.AlbumArtFetcher()
        localpath = os.path.join(cherry.config['media.basedir'], directory)
        header, data, resized = fetcher.fetchLocal(localpath)

        if header:
            if resized:
                #cache resized image for next time
                self.albumartcache_save(b64imgpath, data)
            cherrypy.response.headers.update(header)
            return data
        elif cherry.config['media.fetch_album_art']:
            #fetch album art from online source
            try:
                foldername = os.path.basename(directory)
                keywords = foldername
                log.i(_("Fetching album art for keywords {keywords!r}").format(keywords=keywords))
                header, data = fetcher.fetch(keywords)
                if header:
                    cherrypy.response.headers.update(header)
                    self.albumartcache_save(b64imgpath, data)
                    return data
                else:
                    # albumart fetcher failed, so we serve a standard image
                    raise cherrypy.HTTPRedirect(default_folder_image, 302)
            except:
                # albumart fetcher threw exception, so we serve a standard image
                raise cherrypy.HTTPRedirect(default_folder_image, 302)
        else:
            # no local album art found, online fetching deactivated, show default
            raise cherrypy.HTTPRedirect(default_folder_image, 302)
Esempio n. 14
0
 def needed(self):
     """ ``True`` if the database is unversioned or if its version is less
         then the maximum defined.
     """
     self._validate_locked()
     version, target = self._version, self._target
     log.d("%s update check: version=[%s] target=[%s]", self.name, version, target)
     return version is None or version < target
Esempio n. 15
0
 def _init_config(self):
     global config
     defaultcfg = configuration.from_defaults()
     configFilePath = util.configurationFile()
     log.d('loading configuration from %s', configFilePath)
     filecfg = configuration.from_configparser(configFilePath)
     config = defaultcfg + filecfg
     self._check_for_config_updates(filecfg)
Esempio n. 16
0
 def _init_with_version(self, vnum):
     log.d('initializing database %r to version %s', self.name, vnum)
     cxn = self.db.connection()
     cxn.isolation_level = None  # autocommit
     self._runscript(vnum, 'create.sql', cxn)
     self._run_afterscript_if_exists(vnum, cxn)
     self._setversion(vnum, cxn)
     cxn.isolation_level = ''
     cxn.close()
Esempio n. 17
0
 def _init_with_version(self, vnum):
     log.d('initializing database %r to version %s', self.name, vnum)
     cxn = self.db.connection()
     cxn.isolation_level = None  # autocommit
     self._runscript(vnum, 'create.sql', cxn)
     self._run_afterscript_if_exists(vnum, cxn)
     self._setversion(vnum, cxn)
     cxn.isolation_level = ''
     cxn.close()
Esempio n. 18
0
 def _update_to_version(self, vnum):
     log.d('updating database %r to version %d', self.name, vnum)
     cxn = self.db.connection()
     cxn.isolation_level = None  # autocommit
     self._runscript(vnum, 'update.sql', cxn)
     self._run_afterscript_if_exists(vnum, cxn)
     self._setversion(vnum, cxn)
     cxn.isolation_level = ''
     cxn.close()
Esempio n. 19
0
 def _update_to_version(self, vnum):
     log.d('updating database %r to version %d', self.name, vnum)
     cxn = self.db.connection()
     cxn.isolation_level = None  # autocommit
     self._runscript(vnum, 'update.sql', cxn)
     self._run_afterscript_if_exists(vnum, cxn)
     self._setversion(vnum, cxn)
     cxn.isolation_level = ''
     cxn.close()
Esempio n. 20
0
 def __disconnect(self):
     try:
         conn = self.__threadlocal.conn
         del self.__threadlocal.conn
     except AttributeError:
         pass
     else:
         super(conn.__class__, conn).close()
         log.d('thread %s closed connection to database %r', threading.current_thread(), self.__name)
Esempio n. 21
0
 def needed(self):
     """ ``True`` if the database is unversioned or if its version is less
         then the maximum defined.
     """
     self._validate_locked()
     version, target = self._version, self._target
     log.d('%s update check: version=[%s] target=[%s]', self.name, version,
           target)
     return version is None or version < target
Esempio n. 22
0
 def _update_to_version(self, vnum):
     log.d("updating database %r to version %d", self.name, vnum)
     cxn = self.db.connection()
     cxn.isolation_level = None  # autocommit
     cxn.executescript(self.desc[vnum]["update.sql"])
     self._run_afterscript_if_exists(vnum, cxn)
     self._setversion(vnum, cxn)
     cxn.isolation_level = ""
     cxn.close()
Esempio n. 23
0
 def run(self):
     """Update database schema to the highest possible version."""
     self._validate_locked()
     log.i('%r: updating database schema', self.name)
     log.d('from version %r to %r', self._version, self._target)
     if None is self._version:
         self._init_with_version(self._target)
     else:
         for version in self._updates_due:
             self._update_to_version(version)
Esempio n. 24
0
 def run(self):
     """Update database schema to the highest possible version."""
     self._validate_locked()
     log.i('%r: updating database schema', self.name)
     log.d('from version %r to %r', self._version, self._target)
     if None is self._version:
         self._init_with_version(self._target)
     else:
         for version in self._updates_due:
             self._update_to_version(version)
Esempio n. 25
0
 def _init_meta(self):
     content = self.db.execute('SELECT type, name FROM sqlite_master;').fetchall()
     content = [(t, n) for t, n in content if n != '_meta_version' and not n.startswith('sqlite')]
     with self.db.connection() as cxn:
         cxn.isolation_level = "EXCLUSIVE"
         cxn.executescript(self._metatable['create.sql'])
         if content and self._version is None:
             log.d('%s: unversioned content found: %r', self.name, content)
             self._setversion(0, cxn)
     cxn.isolation_level = ''
     cxn.close()
Esempio n. 26
0
 def getBaseUrl(self, redirect_unencrypted=False):
     ipAndPort = parse.urlparse(cherrypy.url()).netloc
     if cherry.config.server.ssl_enabled.bool and not self.issecure(cherrypy.url()):
         log.d('Not secure, redirecting...')
         ip = ipAndPort[:ipAndPort.rindex(':')]
         url = 'https://' + ip + ':' + cherry.config.server.ssl_port.str
         if redirect_unencrypted:
             raise cherrypy.HTTPRedirect(url, 302)
     else:
         url = 'http://' + ipAndPort
     return url
Esempio n. 27
0
    def search(self, term):
        user = cherrypy.session.get('username', None)
        if user:
            log.d(user+' searched for "'+term+'"')
        results = self.cache.searchfor(term, maxresults=cherry.config.search.maxresults.int)
        with Performance('sorting DB results using ResultOrder'):
            results = sorted(results,key=resultorder.ResultOrder(term),reverse=True)
            results = results[:min(len(results), cherry.config.search.maxresults.int)]

        with Performance('checking and classifying results:'):
            results = list(filter(isValidMediaFile, results))
        return results
Esempio n. 28
0
 def getBaseUrl(self, redirect_unencrypted=False):
     ipAndPort = parse.urlparse(cherrypy.url()).netloc
     is_secure_connection = self.issecure(cherrypy.url())
     ssl_enabled = cherry.config['server.ssl_enabled']
     if ssl_enabled and not is_secure_connection:
         log.d(_('Not secure, redirecting...'))
         ip = ipAndPort[:ipAndPort.rindex(':')]
         url = 'https://' + ip + ':' + str(cherry.config['server.ssl_port'])
         if redirect_unencrypted:
             raise cherrypy.HTTPRedirect(url, 302)
     else:
         url = 'http://' + ipAndPort
     return url
Esempio n. 29
0
 def getBaseUrl(self, redirect_unencrypted=False):
     ipAndPort = parse.urlparse(cherrypy.url()).netloc
     is_secure_connection = self.issecure(cherrypy.url())
     ssl_enabled = cherry.config["server.ssl_enabled"]
     if ssl_enabled and not is_secure_connection:
         log.d(_("Not secure, redirecting..."))
         ip = ipAndPort[: ipAndPort.rindex(":")]
         url = "https://" + ip + ":" + str(cherry.config["server.ssl_port"])
         if redirect_unencrypted:
             raise cherrypy.HTTPRedirect(url, 302)
     else:
         url = "http://" + ipAndPort
     return url
Esempio n. 30
0
 def getBaseUrl(self, redirect_unencrypted=False):
     ipAndPort = parse.urlparse(cherrypy.url()).netloc
     is_secure_connection = self.issecure(cherrypy.url())
     ssl_enabled = cherry.config['server.ssl_enabled']
     if ssl_enabled and not is_secure_connection:
         log.d('Not secure, redirecting...')
         ip = ipAndPort[:ipAndPort.rindex(':')]
         url = 'https://' + ip + ':' + str(cherry.config['server.ssl_port'])
         if redirect_unencrypted:
             raise cherrypy.HTTPRedirect(url, 302)
     else:
         url = 'http://' + ipAndPort
     return url
Esempio n. 31
0
 def addUser(self, username, password, admin):
     if not (username.strip() or password.strip()):
         log.d('empty username or password!')
         return
     user = User.create(username, password, admin)
     self.conn.execute('''
     INSERT INTO users
     (username, admin, password, salt)
     VALUES (?,?,?,?)''',
     (user.name, 1 if user.isadmin else 0, user.password, user.salt))
     self.conn.commit()
     msg = 'added user: ' + user.name
     log.d(msg)
     return msg
Esempio n. 32
0
 def addUser(self, username, password, admin):
     if not (username.strip() or password.strip()):
         log.d('empty username or password!')
         return
     user = User.create(username, password, admin)
     self.conn.execute(
         '''
     INSERT INTO users
     (username, admin, password, salt)
     VALUES (?,?,?,?)''',
         (user.name, 1 if user.isadmin else 0, user.password, user.salt))
     self.conn.commit()
     msg = 'added user: ' + user.name
     log.d(msg)
     return msg
Esempio n. 33
0
 def reset(self):
     """Delete all content from the database along with supporting structures."""
     self._validate_locked()
     version = self._version
     log.i('%s: resetting database', self.name)
     log.d('version: %s', version)
     if None is version:
         log.d('nothing to reset.')
         return
     with self.db.connection() as cxn:
         cxn.executescript(self.desc[version]['drop.sql'])
         cxn.executescript(self._metatable['drop.sql'])
         cxn.executescript(self._metatable['create.sql'])
         self._setversion(None, cxn)
     cxn.close()
Esempio n. 34
0
 def reset(self):
     """Delete all content from the database along with supporting structures."""
     self._validate_locked()
     version = self._version
     log.i('%s: resetting database', self.name)
     log.d('version: %s', version)
     if None is version:
         log.d('nothing to reset.')
         return
     with self.db.connection() as cxn:
         cxn.executescript(self.desc[version]['drop.sql'])
         cxn.executescript(self._metatable['drop.sql'])
         cxn.executescript(self._metatable['create.sql'])
         self._setversion(None, cxn)
     cxn.close()
Esempio n. 35
0
    def fetchFileIds(self, terms, maxFileIds, mode):
        resultlist = []

        query = '''SELECT search.frowid FROM dictionary JOIN search ON search.drowid = dictionary.rowid WHERE '''
        orterms = '('+' OR '.join([' dictionary.word LIKE ? '] * len(terms))+')'
        limit = ' LIMIT 0, ' + str(maxFileIds) #TODO add maximum db results as configuration parameter
        #log.d('Search term: ' + term)
        sql = query + orterms + limit
        if debug:
            log.d('Query used: ' + sql)
        #print(self.conn.execute('EXPLAIN QUERY PLAN '+sql, (term+'%',)).fetchall())
        self.db.execute(sql, tuple(map(lambda x: x + '%', terms)))
        resultlist += self.db.fetchall()

        return resultlist
Esempio n. 36
0
 def validate_basedir(self):
     basedir = cherry.config.media.basedir.str
     if not basedir:
         raise AssertionError('basedir is not set')
     if not os.path.isabs(basedir):
         raise AssertionError('basedir must be absolute path: %s' % basedir)
     if not os.path.exists(basedir):
         raise AssertionError("basedir doesn't exist: %s" % basedir)
     if not os.path.isdir(basedir):
         raise AssertionError("basedir is not a directory: %s" % basedir)
     basedir = os.path.normcase(basedir)
     if len(basedir) > 1:
         basedir = basedir.rstrip(os.path.sep)
     cherry.config.media.basedir = basedir
     log.d('media base directory: %r' % basedir)
Esempio n. 37
0
 def addUser(self, username, password, admin):
     if not (username.strip() or password.strip()):
         log.d(_('empty username or password!'))
         return False
     user = User.create(username, password, admin)
     try:
         self.conn.execute('''
         INSERT INTO users
         (username, admin, password, salt)
         VALUES (?,?,?,?)''',
         (user.name, 1 if user.isadmin else 0, user.password, user.salt))
     except sqlite3.IntegrityError:
         log.e('cannot create user "%s", already exists!' % user.name)
         return False
     self.conn.commit()
     log.i('added user: ' + user.name)
     return True
Esempio n. 38
0
 def addUser(self, username, password, admin):
     if not (username.strip() or password.strip()):
         log.d(_('empty username or password!'))
         return False
     user = User.create(username, password, admin)
     try:
         self.conn.execute(
             '''
         INSERT INTO users
         (username, admin, password, salt)
         VALUES (?,?,?,?)''', (user.name, 1 if user.isadmin else 0,
                               user.password, user.salt))
     except sqlite3.IntegrityError:
         log.e('cannot create user "%s", already exists!' % user.name)
         return False
     self.conn.commit()
     log.d('added user: ' + user.name)
     return True
Esempio n. 39
0
    def searchfor(self, value, maxresults=10):
        mode = 'normal'
        if value.startswith('!f '):
            mode = 'fileonly'
            value = value[3:]
        elif value.endswith(' !f'):
            mode = 'fileonly'
            value = value[:-3]
        elif value.startswith('!d '):
            mode = 'dironly'
            value = value[3:]
        elif value.endswith(' !d'):
            mode = 'dironly'
            value = value[:-3]

        reload(cherrymusicserver.tweak)
        file_search_limit = cherrymusicserver.tweak.SearchTweaks.normal_file_search_limit

        terms = SQLiteCache.searchterms(value)
        with Performance(_('searching for a maximum of %s files') % str(file_search_limit * len(terms))):
            if debug:
                log.d('searchterms')
                log.d(terms)
            results = []

            maxFileIdsPerTerm = file_search_limit
            with Performance(_('file id fetching')):
                #unpack tuples
                fileids = [t[0] for t in self.fetchFileIds(terms, maxFileIdsPerTerm, mode)]

            if len(fileids) > file_search_limit:
                with Performance(_('sorting results by fileid occurrences')):
                    resultfileids = {}
                    for fileid in fileids:
                        if fileid in resultfileids:
                            resultfileids[fileid] += 1
                        else:
                            resultfileids[fileid] = 1
                    # sort items by occurrences and only return maxresults
                    fileids = sorted(resultfileids.items(), key=itemgetter(1), reverse=True)
                    fileids = [t[0] for t in fileids]
                    fileids = fileids[:min(len(fileids), file_search_limit)]

            if mode == 'normal':
                with Performance(_('querying fullpaths for %s fileIds') % len(fileids)):
                    results += self.musicEntryFromFileIds(fileids)
            else:
                with Performance(_('querying fullpaths for %s fileIds, files only') % len(fileids)):
                    results += self.musicEntryFromFileIds(fileids,mode=mode)

            if debug:
                log.d('resulting paths')
                log.d(results)
            return results
Esempio n. 40
0
    def searchfor(self, value, maxresults=10):
        mode = 'normal'
        if value.startswith('!f '):
            mode = 'fileonly'
            value = value[3:]
        elif value.endswith(' !f'):
            mode = 'fileonly'
            value = value[:-3]
        elif value.startswith('!d '):
            mode = 'dironly'
            value = value[3:]
        elif value.endswith(' !d'):
            mode = 'dironly'
            value = value[:-3]

        reload(cherrymusicserver.tweak)
        file_search_limit = cherrymusicserver.tweak.SearchTweaks.normal_file_search_limit

        terms = SQLiteCache.searchterms(value)
        with Performance(_('searching for a maximum of %s files') % str(file_search_limit * len(terms))):
            if debug:
                log.d('searchterms')
                log.d(terms)
            results = []

            maxFileIdsPerTerm = file_search_limit
            with Performance(_('file id fetching')):
                #unpack tuples
                fileids = [t[0] for t in self.fetchFileIds(terms, maxFileIdsPerTerm, mode)]

            if len(fileids) > file_search_limit:
                with Performance(_('sorting results by fileid occurrences')):
                    resultfileids = {}
                    for fileid in fileids:
                        if fileid in resultfileids:
                            resultfileids[fileid] += 1
                        else:
                            resultfileids[fileid] = 1
                    # sort items by occurrences and only return maxresults
                    fileids = sorted(resultfileids.items(), key=itemgetter(1), reverse=True)
                    fileids = [t[0] for t in fileids]
                    fileids = fileids[:min(len(fileids), file_search_limit)]

            if mode == 'normal':
                with Performance(_('querying fullpaths for %s fileIds') % len(fileids)):
                    results += self.musicEntryFromFileIds(fileids)
            else:
                with Performance(_('querying fullpaths for %s fileIds, files only') % len(fileids)):
                    results += self.musicEntryFromFileIds(fileids,mode=mode)

            if debug:
                log.d('resulting paths')
                log.d(results)
            return results
Esempio n. 41
0
    def searchfor(self, value, maxresults=10):
        mode = 'normal'
        if value.startswith('!f '):
            mode = 'fileonly'
            value = value[3:]
        elif value.endswith(' !f'):
            mode = 'fileonly'
            value = value[:-3]
        elif value.startswith('!d '):
            mode = 'dironly'
            value = value[3:]
        elif value.endswith(' !d'):
            mode = 'dironly'
            value = value[:-3]

        reload(cherrymusicserver.tweak)
        file_search_limit = cherrymusicserver.tweak.SearchTweaks.normal_file_search_limit

        terms = SQLiteCache.searchterms(value)
        with Performance(
                _('searching for a maximum of %s files') %
                str(file_search_limit * len(terms))):
            if debug:
                log.d('searchterms')
                log.d(terms)
            results = []

            maxFileIdsPerTerm = file_search_limit
            with Performance(_('file id fetching')):
                fileids = self.fetchFileIds(terms, maxFileIdsPerTerm, mode)

            if len(fileids) > file_search_limit:
                with Performance(_('sorting results by fileid occurrences')):
                    # sort items by occurrences and only return maxresults
                    fileids = [
                        fid[0] for fid in Counter(fileids).most_common(
                            file_search_limit)
                    ]

            if mode == 'normal':
                with Performance(
                        _('querying fullpaths for %s fileIds') % len(fileids)):
                    results += self.musicEntryFromFileIds(fileids)
            else:
                with Performance(
                        _('querying fullpaths for %s fileIds, files only') %
                        len(fileids)):
                    results += self.musicEntryFromFileIds(fileids, mode=mode)

            if debug:
                log.d('resulting paths')
                log.d(results)
            return results
Esempio n. 42
0
    def searchfor(self, value, maxresults=10):
        mode = 'normal'
        if value.startswith('!f '):
            mode = 'fileonly'
            value = value[3:]
        elif value.endswith(' !f'):
            mode = 'fileonly'
            value = value[:-3]
        elif value.startswith('!d '):
            mode = 'dironly'
            value = value[3:]
        elif value.endswith(' !d'):
            mode = 'dironly'
            value = value[:-3]

        terms = SQLiteCache.searchterms(value)
        with Performance('searching for a maximum of %s files' % str(NORMAL_FILE_SEARCH_LIMIT * len(terms))):
            if debug:
                log.d('searchterms')
                log.d(terms)
            results = []

            maxFileIdsPerTerm = NORMAL_FILE_SEARCH_LIMIT
            with Performance('file id fetching'):
                #unpack tuples
                fileids = [t[0] for t in self.fetchFileIds(terms, maxFileIdsPerTerm, mode)]

            if len(fileids) > NORMAL_FILE_SEARCH_LIMIT:
                with Performance('sorting results by fileid occurrences'):
                    resultfileids = {}
                    for fileid in fileids:
                        if fileid in resultfileids:
                            resultfileids[fileid] += 1
                        else:
                            resultfileids[fileid] = 1
                    # sort items by occurrences and only return maxresults
                    fileids = sorted(resultfileids.items(), key=itemgetter(1), reverse=True)
                    fileids = [t[0] for t in fileids]
                    fileids = fileids[:min(len(fileids), NORMAL_FILE_SEARCH_LIMIT)]

            if mode == 'normal':
                with Performance('querying fullpaths for %s fileIds' % len(fileids)):
                    results += self.musicEntryFromFileIds(fileids)
            else:
                with Performance('querying fullpaths for %s fileIds, files only' % len(fileids)):
                    results += self.musicEntryFromFileIds(fileids,mode=mode)

            if debug:
                log.d('resulting paths')
                log.d(results)
            return results
Esempio n. 43
0
    def searchfor(self, value, maxresults=10):
        mode = 'normal'
        if value.startswith('!f '):
            mode = 'fileonly'
            value = value[3:]
        elif value.endswith(' !f'):
            mode = 'fileonly'
            value = value[:-3]
        elif value.startswith('!d '):
            mode = 'dironly'
            value = value[3:]
        elif value.endswith(' !d'):
            mode = 'dironly'
            value = value[:-3]

        reload(cherrymusicserver.tweak)
        file_search_limit = cherrymusicserver.tweak.SearchTweaks.normal_file_search_limit

        terms = SQLiteCache.searchterms(value)
        with Performance(_('searching for a maximum of %s files') % str(file_search_limit * len(terms))):
            if debug:
                log.d('searchterms')
                log.d(terms)
            results = []

            maxFileIdsPerTerm = file_search_limit
            with Performance(_('file id fetching')):
                fileids = self.fetchFileIds(terms, maxFileIdsPerTerm, mode)

            if len(fileids) > file_search_limit:
                with Performance(_('sorting results by fileid occurrences')):
                    # sort items by occurrences and only return maxresults
                    fileids = [
                        fid[0] for fid in
                        Counter(fileids).most_common(file_search_limit)
                    ]

            if mode == 'normal':
                with Performance(_('querying fullpaths for %s fileIds') % len(fileids)):
                    results += self.musicEntryFromFileIds(fileids)
            else:
                with Performance(_('querying fullpaths for %s fileIds, files only') % len(fileids)):
                    results += self.musicEntryFromFileIds(fileids,mode=mode)

            if debug:
                log.d('resulting paths')
                log.d(results)
            return results
Esempio n. 44
0
def provide(handle, provider, args=(), kwargs={}):
    """ Activate a provider for the service identified by ``handle``,
        replacing a previous provider for the same service.

        If the provider is a ``type``, an instance will be created as the
        actual provider. Instantiation is lazy, meaning it will be deferred
        until the provider is requested (:func:`get`) by some user.

        To use a type as a provider, you need to wrap it into something that is
        not a type.

        handle : str
            The name of the serivce.
        provider :
            An object that provides the service, or a type that instantiates
            such objects. Instantiation will happen on the first get call.
        args, kwargs :
            Pass on arguments to a type.
    """
    assert isinstance(provider, type) or not (args or kwargs)
    __provider_factories[handle] = _ProviderFactory.get(provider, args, kwargs)
    __providercache.pop(handle, None)
    log.d('service %r: now provided by %r', handle, provider)
Esempio n. 45
0
def provide(handle, provider, args=(), kwargs={}):
    """ Activate a provider for the service identified by ``handle``,
        replacing a previous provider for the same service.

        If the provider is a ``type``, an instance will be created as the
        actual provider. Instantiation is lazy, meaning it will be deferred
        until the provider is requested (:func:`get`) by some user.

        To use a type as a provider, you need to wrap it into something that is
        not a type.

        handle : str
            The name of the serivce.
        provider :
            An object that provides the service, or a type that instantiates
            such objects. Instantiation will happen on the first get call.
        args, kwargs :
            Pass on arguments to a type.
    """
    assert isinstance(provider, type) or not (args or kwargs)
    __provider_factories[handle] = _ProviderFactory.get(provider, args, kwargs)
    __providercache.pop(handle, None)
    log.d('service %r: now provided by %r', handle, provider)
Esempio n. 46
0
    def search(self, term):
        reload(cherry.tweak)
        tweaks = cherry.tweak.CherryModelTweaks
        user = cherrypy.session.get('username', None)
        if user:
            log.d(user + ' searched for "' + term + '"')
        max_search_results = cherry.config['search.maxresults']
        results = self.cache.searchfor(term, maxresults=max_search_results)
        with Performance('sorting DB results using ResultOrder'):
            debug = tweaks.result_order_debug
            order_function = resultorder.ResultOrder(term, debug=debug)
            results = sorted(results, key=order_function, reverse=True)
            results = results[:min(len(results), max_search_results)]
            if debug:
                n = tweaks.result_order_debug_files
                for sortedResults in results[:n]:
                    Performance.log(sortedResults.debugOutputSort)
                for sortedResults in results:
                    sortedResults.debugOutputSort = None  # free ram

        with Performance('checking and classifying results:'):
            results = list(filter(isValidMediaFile, results))
        return results
Esempio n. 47
0
    def search(self, term):
        reload(cherry.tweak)
        tweaks = cherry.tweak.CherryModelTweaks
        user = cherrypy.session.get('username', None)
        if user:
            log.d(user+' searched for "'+term+'"')
        max_search_results = cherry.config['search.maxresults']
        results = self.cache.searchfor(term, maxresults=max_search_results)
        with Performance('sorting DB results using ResultOrder'):
            debug = tweaks.result_order_debug
            order_function = resultorder.ResultOrder(term, debug=debug)
            results = sorted(results, key=order_function, reverse=True)
            results = results[:min(len(results), max_search_results)]
            if debug:
                n = tweaks.result_order_debug_files
                for sortedResults in results[:n]:
                    Performance.log(sortedResults.debugOutputSort)
                for sortedResults in results:
                    sortedResults.debugOutputSort = None  # free ram

        with Performance('checking and classifying results:'):
            results = list(filter(isValidMediaFile, results))
        return results
Esempio n. 48
0
    def api_fetchalbumart(self, directory):
        _save_and_release_session()
        default_folder_image = "../res/img/folder.png"

        log.i('Fetching album art for: %s' % directory)
        filepath = os.path.join(cherry.config['media.basedir'], directory)

        if os.path.isfile(filepath):
            # if the given path is a file, try to get the image from ID3
            tag = TinyTag.get(filepath, image=True)
            image_data = tag.get_image()
            if image_data:
                log.d('Image found in tag.')
                header = {
                    'Content-Type': 'image/jpg',
                    'Content-Length': len(image_data)
                }
                cherrypy.response.headers.update(header)
                return image_data
            else:
                # if the file does not contain an image, display the image of the
                # parent directory
                directory = os.path.dirname(directory)

        #try getting a cached album art image
        b64imgpath = albumArtFilePath(directory)
        img_data = self.albumartcache_load(b64imgpath)
        if img_data:
            cherrypy.response.headers["Content-Length"] = len(img_data)
            return img_data

        #try getting album art inside local folder
        fetcher = albumartfetcher.AlbumArtFetcher()
        localpath = os.path.join(cherry.config['media.basedir'], directory)
        header, data, resized = fetcher.fetchLocal(localpath)

        if header:
            if resized:
                #cache resized image for next time
                self.albumartcache_save(b64imgpath, data)
            cherrypy.response.headers.update(header)
            return data
        elif cherry.config['media.fetch_album_art']:
            #fetch album art from online source
            try:
                foldername = os.path.basename(directory)
                keywords = foldername
                log.i(
                    _("Fetching album art for keywords {keywords!r}").format(
                        keywords=keywords))
                header, data = fetcher.fetch(keywords)
                if header:
                    cherrypy.response.headers.update(header)
                    self.albumartcache_save(b64imgpath, data)
                    return data
                else:
                    # albumart fetcher failed, so we serve a standard image
                    raise cherrypy.HTTPRedirect(default_folder_image, 302)
            except:
                # albumart fetcher threw exception, so we serve a standard image
                raise cherrypy.HTTPRedirect(default_folder_image, 302)
        else:
            # no local album art found, online fetching deactivated, show default
            raise cherrypy.HTTPRedirect(default_folder_image, 302)
Esempio n. 49
0
    def searchfor(self, value, maxresults=10):
        mode = 'normal'
        if value.startswith('!f '):
            mode = 'fileonly'
            value = value[3:]
        elif value.endswith(' !f'):
            mode = 'fileonly'
            value = value[:-3]
        elif value.startswith('!d '):
            mode = 'dironly'
            value = value[3:]
        elif value.endswith(' !d'):
            mode = 'dironly'
            value = value[:-3]

        terms = SQLiteCache.searchterms(value)
        with Performance('searching for a maximum of %s files' %
                         str(NORMAL_FILE_SEARCH_LIMIT * len(terms))):
            if debug:
                log.d('searchterms')
                log.d(terms)
            results = []

            maxFileIdsPerTerm = NORMAL_FILE_SEARCH_LIMIT
            with Performance('file id fetching'):
                #unpack tuples
                fileids = [
                    t[0]
                    for t in self.fetchFileIds(terms, maxFileIdsPerTerm, mode)
                ]

            if len(fileids) > NORMAL_FILE_SEARCH_LIMIT:
                with Performance('sorting results by fileid occurrences'):
                    resultfileids = {}
                    for fileid in fileids:
                        if fileid in resultfileids:
                            resultfileids[fileid] += 1
                        else:
                            resultfileids[fileid] = 1
                    # sort items by occurrences and only return maxresults
                    fileids = sorted(resultfileids.items(),
                                     key=itemgetter(1),
                                     reverse=True)
                    fileids = [t[0] for t in fileids]
                    fileids = fileids[:min(len(fileids
                                               ), NORMAL_FILE_SEARCH_LIMIT)]

            if mode == 'normal':
                with Performance('querying fullpaths for %s fileIds' %
                                 len(fileids)):
                    results += self.musicEntryFromFileIds(fileids)
            else:
                with Performance(
                        'querying fullpaths for %s fileIds, files only' %
                        len(fileids)):
                    results += self.musicEntryFromFileIds(fileids, mode=mode)

            if debug:
                log.d('resulting paths')
                log.d(results)
            return results
Esempio n. 50
0
 def _setversion(self, value, conn=None):
     del self.__version
     conn = conn or self.db.connection
     log.d('{0}: set version to {1}'.format(self.name, value))
     conn.execute('INSERT INTO _meta_version(version) VALUES (?)',
                  (value, ))
Esempio n. 51
0
    def api_fetchalbumart(self, directory):
        _save_and_release_session()
        default_folder_image = "../res/img/folder.png"

        log.i('Fetching album art for: %s' % directory)
        filepath = os.path.join(cherry.config['media.basedir'], directory)

        if os.path.isfile(filepath):
            # if the given path is a file, try to get the image from ID3
            tag = TinyTag.get(filepath, image=True)
            image_data = tag.get_image()
            if image_data:
                log.d('Image found in tag.')
                header = {'Content-Type': 'image/jpg', 'Content-Length': len(image_data)}
                cherrypy.response.headers.update(header)
                return image_data
            else:
                # if the file does not contain an image, display the image of the
                # parent directory
                directory = os.path.dirname(directory)

        #try getting a cached album art image
        b64imgpath = albumArtFilePath(directory)
        img_data = self.albumartcache_load(b64imgpath)
        if img_data:
            cherrypy.response.headers["Content-Length"] = len(img_data)
            return img_data

        #try getting album art inside local folder
        fetcher = albumartfetcher.AlbumArtFetcher()
        localpath = os.path.join(cherry.config['media.basedir'], directory)
        header, data, resized = fetcher.fetchLocal(localpath)

        if header:
            if resized:
                #cache resized image for next time
                self.albumartcache_save(b64imgpath, data)
            cherrypy.response.headers.update(header)
            return data
        elif cherry.config['media.fetch_album_art']:
            # maximum of files to try to fetch metadata for albumart keywords
            METADATA_ALBUMART_MAX_FILES = 10
            #fetch album art from online source
            try:
                foldername = os.path.basename(directory)
                keywords = foldername
                # remove any odd characters from the folder name
                keywords = re.sub('[^A-Za-z\s]', ' ', keywords)
                # try getting metadata from files in the folder for a more
                # accurate match
                files = os.listdir(localpath)
                for i, filename in enumerate(files):
                    if i >= METADATA_ALBUMART_MAX_FILES:
                        break
                    path = os.path.join(localpath, filename)
                    metadata = metainfo.getSongInfo(path)
                    if metadata.artist and metadata.album:
                        keywords = '{} - {}'.format(metadata.artist, metadata.album)
                        break

                log.i(_("Fetching album art for keywords {keywords!r}").format(keywords=keywords))
                header, data = fetcher.fetch(keywords)
                if header:
                    cherrypy.response.headers.update(header)
                    self.albumartcache_save(b64imgpath, data)
                    return data
                else:
                    # albumart fetcher failed, so we serve a standard image
                    raise cherrypy.HTTPRedirect(default_folder_image, 302)
            except:
                # albumart fetcher threw exception, so we serve a standard image
                raise cherrypy.HTTPRedirect(default_folder_image, 302)
        else:
            # no local album art found, online fetching deactivated, show default
            raise cherrypy.HTTPRedirect(default_folder_image, 302)
Esempio n. 52
0
 def wrapper(*args, **kwargs):
     starttime = time()
     result = func(*args, **kwargs)
     duration = time() - starttime
     log.d('%s.%s: %.4f s', func.__module__, func.__name__, duration)
     return result