def _get_audiodbid(self): audiodbid = '' exists, cloglines = checkPath(self.IDFILEPATH, False) self.LOGLINES.extend(cloglines) if not exists: exists, cloglines = checkPath(self.ARTISTFILEPATH, False) self.LOGLINES.extend(cloglines) if exists: rloglines, rawdata = readFile(self.ARTISTFILEPATH) self.LOGLINES.extend(rloglines) try: gotData = True json_data = _json.loads(rawdata) except ValueError: self.LOGLINES.append( 'no valid JSON data returned from theaudiodb.com, setting artist to None' ) gotData = False if gotData: artist = json_data.get('artists') else: artist = None if artist is not None: audiodbid = artist[0].get('idArtist', '') if audiodbid: success, wloglines = writeFile(audiodbid, self.IDFILEPATH) self.LOGLINES.extend(wloglines) rloglines, audiodbid = readFile(self.IDFILEPATH) self.LOGLINES.extend(rloglines) return audiodbid
def _get_data(self, filepath, cachefilepath, url, url_params): json_data = '' if self._update_cache(filepath, cachefilepath): success, uloglines, json_data = self.JSONURL.Get(url, params=url_params) self.LOGLINES.extend(uloglines) if success: success, wloglines = writeFile( py2_encode(_json.dumps(json_data)), filepath) self.LOGLINES.extend(wloglines) exists, cloglines = checkPath(filepath, False) self.LOGLINES.extend(cloglines) if exists: self._get_audiodbid( ) # this is to generate the id file if it doesn't exist rloglines, rawdata = readFile(filepath) self.LOGLINES.extend(rloglines) try: json_data = _json.loads(rawdata) except ValueError: success, dloglines = deleteFile(filepath) self.LOGLINES.extend(dloglines) self.LOGLINES.append( 'Deleted old cache file. New file will be download on next run.' ) json_data = '' return json_data
def getMBID(self, mbid_params): self.LOGLINES = [] self._set_filepaths(mbid_params) exists, cloglines = checkPath(self.ARTISTFILEPATH, False) self.LOGLINES.extend(cloglines) if exists: cloglines, rawdata = readFile(self.ARTISTFILEPATH) self.LOGLINES.extend(cloglines) try: json_data = _json.loads(rawdata) except ValueError: self.LOGLINES.append('no valid JSON data returned from ' + self.ARTISTFILEPATH) return '', self.LOGLINES self.LOGLINES.append('musicbrainz ID found in %s file' % self.ARTISTFILEPATH) try: return json_data.get('artists')[0].get('strMusicBrainzID', ''), self.LOGLINES except TypeError: self.LOGLINES.append('error reading musicbrainz ID from ' + self.ARTISTFILEPATH) return '', self.LOGLINES else: return '', self.LOGLINES
def getMBID(self, mbid_params): self.loglines = [] filename = os.path.join(mbid_params.get( 'infodir', ''), 'musicbrainz.nfo') exists, cloglines = checkPath(filename, False) self.loglines.extend(cloglines) if exists: cloglines, rawdata = readFile(filename) self.loglines.extend(cloglines) return rawdata.rstrip('\n'), self.loglines else: return '', self.loglines
def _update_cache( self, filepath, cachefilepath ): exists, cloglines = checkPath( filepath, False ) self.LOGLINES.extend( cloglines ) if exists: st = xbmcvfs.Stat( filepath ) if time.time() - st.st_mtime() < self._get_cache_time( cachefilepath ): self.LOGLINES.append( 'cached artist info found for fanarttv' ) return False else: self.LOGLINES.append( 'outdated cached artist info found for fanarttv' ) return self._put_cache_time( cachefilepath ) else: self.LOGLINES.append( 'no fanarttv cachetime file found, creating it' ) return self._put_cache_time( cachefilepath )
def _get_data(self, filepath, cachefilepath, url_params): rawxml = '' if self._update_cache(filepath, cachefilepath): success, uloglines, data = self.TEXTURL.Get(self.URL, params=url_params) self.LOGLINES.extend(uloglines) if success: success, wloglines = writeFile(py2_encode(data), filepath) self.LOGLINES.extend(wloglines) exists, cloglines = checkPath(filepath, False) self.LOGLINES.extend(cloglines) if exists: rloglines, rawxml = readFile(filepath) self.LOGLINES.extend(rloglines) return rawxml
def _get_cache_time(self, cachefilepath): rawdata = '' self.LOGLINES.append( 'getting the cache timeout information for last.fm') exists, cloglines = checkPath(cachefilepath, False) self.LOGLINES.extend(cloglines) if exists: success = True else: success = self._put_cache_time(cachefilepath) if success: rloglines, rawdata = readFile(cachefilepath) self.LOGLINES.extend(rloglines) try: cachetime = int(rawdata) except ValueError: cachetime = 0 return cachetime
def getMBID(self, mbid_params): self.LOGLINES = [] filepath = os.path.join(mbid_params.get('infodir', ''), self.BIOFILENAME) exists, cloglines = checkPath(filepath, False) self.LOGLINES.extend(cloglines) if exists: rloglines, rawxml = readFile(filepath) self.LOGLINES.extend(rloglines) try: xmldata = _xmltree.fromstring(py2_encode(rawxml)) except _xmltree.ParseError: self.LOGLINES.append('error reading musicbrainz ID from ' + filepath) return '', self.LOGLINES for element in xmldata.getiterator(): if element.tag == "mbid": return element.text, self.LOGLINES self.LOGLINES.append('no mbid found in' + filepath) return '', self.LOGLINES else: return '', self.LOGLINES
def _get_cache_time( self, cachefilepath ): rawdata = '' self.LOGLINES.append( 'getting the cache timeout information for fanarttv' ) exists, cloglines = checkPath( cachefilepath, False ) self.LOGLINES.extend( cloglines ) if exists: success = True else: success = self._put_cache_time( cachefilepath ) if success: rloglines, rawdata = readFile( cachefilepath ) self.LOGLINES.extend( rloglines ) try: cachetime = int( rawdata ) except ValueError: cachetime = 0 # this is to honor donation or client key cache time immediately instead of after old cache expires if self.HASDONATION and cachetime > self.CACHEEXPIREWITHDONATION: return self.CACHEEXPIREWITHDONATION elif self.HASCLIENTKEY and cachetime > self.CACHEEXPIREWITHCLIENTKEY: return self.CACHEEXPIREWITHCLIENTKEY else: return cachetime