def _get_audiodbid(self): audiodbid = '' exists, cloglines = checkPath(self.IDFILEPATH, False) self.LOGLINES.extend(cloglines) if not exists: exists, cloglines = checkPath(self.ARTISTFILEPATH, False) self.LOGLINES.extend(cloglines) if exists: rloglines, rawdata = readFile(self.ARTISTFILEPATH) self.LOGLINES.extend(rloglines) try: gotData = True json_data = _json.loads(rawdata) except ValueError: self.LOGLINES.append( 'no valid JSON data returned from theaudiodb.com, setting artist to None' ) gotData = False if gotData: artist = json_data.get('artists') else: artist = None if artist is not None: audiodbid = artist[0].get('idArtist', '') if audiodbid: success, wloglines = writeFile(audiodbid, self.IDFILEPATH) self.LOGLINES.extend(wloglines) rloglines, audiodbid = readFile(self.IDFILEPATH) self.LOGLINES.extend(rloglines) return audiodbid
def getSimilarArtists(self, sim_params): self.loglines = [] similar_artists = [] filepath = os.path.join(sim_params.get( 'localartistdir', ''), self.SIMILARFILEPATH) local_path = os.path.join(sim_params.get('localartistdir', ''), py2_decode( sim_params.get('artist', '')), 'override') self.loglines.append('checking ' + filepath) rloglines, rawxml = readFile(filepath) self.loglines.extend(rloglines) if rawxml: xmldata = _xmltree.fromstring(py2_encode(rawxml)) else: return [], self.loglines for element in xmldata.iter(): if element.tag == "name": name = py2_encode(element.text) elif element.tag == "image": image_text = element.text if not image_text: image = '' else: image = os.path.join(local_path, 'similar', image_text) similar_artists.append((name, image)) if similar_artists == []: self.loglines.append('no similar artists found in local xml file') return [], self.loglines else: return similar_artists, self.loglines
def getAlbumList(self, album_params): self.loglines = [] albums = [] filepath = os.path.join(album_params.get( 'localartistdir', ''), self.ALBUMFILEPATH) local_path = os.path.join(album_params.get('localartistdir', ''), py2_decode( album_params.get('artist', '')), 'override') self.loglines.append('checking ' + filepath) rloglines, rawxml = readFile(filepath) self.loglines.extend(rloglines) if rawxml: xmldata = _xmltree.fromstring(py2_encode(rawxml)) else: return [], self.loglines for element in xmldata.iter(): if element.tag == "name": name = py2_encode(element.text) elif element.tag == "image": image_text = element.text if not image_text: image = '' else: image = os.path.join(local_path, 'albums', image_text) albums.append((name, image)) if albums == []: self.loglines.append('no albums found in local xml file') return [], self.loglines else: return albums, self.loglines
def _check_cmd_ignore(self, cmd, ignore_for, lastused_file): if not cmd: if lastused_file == self.PRE_LASTUSED_FILE: cmd_type = 'pre' else: cmd_type = 'post' self.LW.log(['no %s command to check' % cmd_type]) return '' if ignore_for == 0: self.LW.log(['ignoring the cache time and running command']) return cmd if os.path.isfile(lastused_file): exists = True loglines, lastused = readFile(lastused_file) self.LW.log(loglines) else: exists = False if (not exists) or (time.time() - float(lastused) > ignore_for * 60): self.LW.log(['setting lastused and running command']) success, loglines = writeFile(str(time.time()), lastused_file, wtype='w') self.LW.log(loglines) return cmd self.LW.log(['ignoring command for now']) return ''
def _get_data(self, filepath, cachefilepath, url, url_params): json_data = '' if self._update_cache(filepath, cachefilepath): success, uloglines, json_data = self.JSONURL.Get(url, params=url_params) self.LOGLINES.extend(uloglines) if success: success, wloglines = writeFile( py2_encode(_json.dumps(json_data)), filepath) self.LOGLINES.extend(wloglines) exists, cloglines = checkPath(filepath, False) self.LOGLINES.extend(cloglines) if exists: self._get_audiodbid( ) # this is to generate the id file if it doesn't exist rloglines, rawdata = readFile(filepath) self.LOGLINES.extend(rloglines) try: json_data = _json.loads(rawdata) except ValueError: success, dloglines = deleteFile(filepath) self.LOGLINES.extend(dloglines) self.LOGLINES.append( 'Deleted old cache file. New file will be download on next run.' ) json_data = '' return json_data
def getMBID(self, mbid_params): self.LOGLINES = [] self._set_filepaths(mbid_params) exists, cloglines = checkPath(self.ARTISTFILEPATH, False) self.LOGLINES.extend(cloglines) if exists: cloglines, rawdata = readFile(self.ARTISTFILEPATH) self.LOGLINES.extend(cloglines) try: json_data = _json.loads(rawdata) except ValueError: self.LOGLINES.append('no valid JSON data returned from ' + self.ARTISTFILEPATH) return '', self.LOGLINES self.LOGLINES.append('musicbrainz ID found in %s file' % self.ARTISTFILEPATH) try: return json_data.get('artists')[0].get('strMusicBrainzID', ''), self.LOGLINES except TypeError: self.LOGLINES.append('error reading musicbrainz ID from ' + self.ARTISTFILEPATH) return '', self.LOGLINES else: return '', self.LOGLINES
def getMBID(self, mbid_params): self.loglines = [] filename = os.path.join(mbid_params.get( 'infodir', ''), 'musicbrainz.nfo') exists, cloglines = checkPath(filename, False) self.loglines.extend(cloglines) if exists: cloglines, rawdata = readFile(filename) self.loglines.extend(cloglines) return rawdata.rstrip('\n'), self.loglines else: return '', self.loglines
def _get_data(self, filepath, cachefilepath, url_params): rawxml = '' if self._update_cache(filepath, cachefilepath): success, uloglines, data = self.TEXTURL.Get(self.URL, params=url_params) self.LOGLINES.extend(uloglines) if success: success, wloglines = writeFile(py2_encode(data), filepath) self.LOGLINES.extend(wloglines) exists, cloglines = checkPath(filepath, False) self.LOGLINES.extend(cloglines) if exists: rloglines, rawxml = readFile(filepath) self.LOGLINES.extend(rloglines) return rawxml
def _mark_one( show_info, mark_type, add_followed, tvmcache, tvmcachefile, tvmazeapi, lw ): lw.log( ['starting process to mark show'] ) tvmazeid = '' if show_info: lw.log( ['show info found, trying to match with cached TV Maze information first'] ) tvmazeid = _match_from_followed_shows( show_info, tvmcache, lw ) if not tvmazeid: lw.log( ['no match, loading cache file from disk and trying again'] ) loglines, results = readFile( tvmcachefile ) lw.log( loglines ) if results: tvmcache = json.loads( results ) else: tvmcache = [] tvmazeid = _match_from_followed_shows( show_info, tvmcache, lw ) if not tvmazeid: lw.log( ['no match, getting updated followed shows from TV Maze and trying again'] ) if add_followed: showname = show_info['name'] else: showname = '' tvmcache = _update_followed_cache( tvmcachefile, tvmazeapi, lw, showname=showname ) tvmazeid = _match_from_followed_shows( show_info, tvmcache, lw ) if tvmazeid: lw.log( ['found tvmazeid of %s' % tvmazeid, 'attempting to get episode id'] ) params = {'season':show_info['season'], 'number':show_info['episode']} success, loglines, results = tvmazeapi.getEpisodeBySeasonEpNumber( tvmazeid, params ) lw.log( loglines ) if not success: lw.log( ['no valid response returned from TV Maze, aborting'] ) return try: episodeid = results['id'] except KeyError: episodeid = '' if episodeid: lw.log( ['got back episode id of %s' % episodeid, 'marking episode on TV Maze'] ) success, loglines, results = tvmazeapi.markEpisode( episodeid, marked_as=mark_type ) lw.log( loglines ) if not success: lw.log( ['no valid response returned from TV Maze, show was not marked'] ) else: lw.log( ['no episode id found'] ) else: lw.log( ['no tvmazeid found'] ) else: lw.log( ['no show information from Kodi'] ) return tvmcache
def _get_show_ep_info( self, thetype, data ): showid = 0 epid = 0 showname = '' if data.get( 'item', {} ).get( 'type', '' ) == 'episode': epid = data['item'].get( 'id', 0 ) if epid: method = 'VideoLibrary.GetEpisodeDetails' params = '{"episodeid":%s, "properties":["season", "episode", "tvshowid"]}' % str( epid ) r_dict = _get_json( method, params, self.LW ) season = r_dict.get( 'episodedetails', {} ).get( 'season', 0 ) episode = r_dict.get( 'episodedetails', {} ).get( 'episode', 0 ) showid = r_dict.get( 'episodedetails', {} ).get( 'tvshowid', 0 ) self.LW.log( ['moving on with season of %s, episode of %s, and showid of %s' % (str(season), str(episode), str(showid))] ) if showid: method = 'VideoLibrary.GetTVShowDetails' params = '{"tvshowid":%s}' % str( showid ) r_dict = _get_json( method, params, self.LW ) showname = r_dict.get( 'tvshowdetails', {} ).get( 'label', '' ) self.LW.log( ['moving on with TV show name of %s' % showname] ) elif thetype == 'removed': epid = data.get( 'id', 0 ) loglines, episode_cache = readFile( self.EPISODECACHE ) self.LW.log( loglines ) if episode_cache: self.LW.log( ['checking in cache for epid of %s' % str( epid )] ) ep_info = json.loads( episode_cache ).get( str( epid ), {} ) else: ep_info = {} showname = ep_info.get( 'name', '' ) season = ep_info.get( 'season', 0 ) episode = ep_info.get( 'episode', 0 ) if showname and season and episode: item = {'epid': epid, 'name':showname, 'season':season, 'episode':episode} else: item = {} if item: self.LW.log( ['storing item data of:', item] ) if thetype == 'scanned': self.SCANNEDITEMS.append( item ) elif thetype == 'playing': self.PLAYINGITEMS.append( item ) elif thetype == 'removed': self.REMOVEDITEMS.append( item ) self._update_episode_cache( epid=epid )
def _get_cache_time(self, cachefilepath): rawdata = '' self.LOGLINES.append( 'getting the cache timeout information for last.fm') exists, cloglines = checkPath(cachefilepath, False) self.LOGLINES.extend(cloglines) if exists: success = True else: success = self._put_cache_time(cachefilepath) if success: rloglines, rawdata = readFile(cachefilepath) self.LOGLINES.extend(rloglines) try: cachetime = int(rawdata) except ValueError: cachetime = 0 return cachetime
def _setPID(self): self.LW.log(['setting PID file']) try: last_pidfile = glob.glob( os.path.join(self.ROOTPATH, 'data', '*.pid'))[-1] loglines, prev_pid = readFile(last_pidfile) self.LW.log(loglines) pid = str(int(prev_pid) + 1) self.PREVPIDFILE = os.path.join(self.ROOTPATH, 'data', 'iguana-blaster-%s.pid' % prev_pid) except IndexError: pid = '0' self.PREVPIDFILE = os.path.join(self.ROOTPATH, 'data', 'dummy.pid') global pidfile pidfile = os.path.join(self.ROOTPATH, 'data', 'iguana-blaster-%s.pid' % pid) success, loglines = writeFile(pid, pidfile, wtype='w') self.LW.log(loglines)
def getBio(self, bio_params): self.loglines = [] bio = '' filepath = os.path.join(bio_params.get( 'localartistdir', ''), self.BIOFILEPATH) self.loglines.append('checking ' + filepath) loglines, rawxml = readFile(filepath) self.loglines.extend(loglines) if rawxml: xmldata = _xmltree.fromstring(py2_encode(rawxml)) else: return '', self.loglines for element in xmldata.iter(): if element.tag == "content": bio = element.text if not bio: self.loglines.append('no bio found in local xml file') return '', self.loglines else: return bio, self.loglines
def _update_episode_cache( self, epid=None, item=None, items=None ): loglines, episode_cache = readFile( self.EPISODECACHE ) self.LW.log( loglines ) cache_changed = True if episode_cache: epcache_json = json.loads( episode_cache ) else: epcache_json = {} if epid: try: del epcache_json[str( epid )] except KeyError: cache_changed = False elif item: epcache_json[str( item['epid'] )] = item elif items: for item in items: epcache_json[str( item['epid'] )] = item if cache_changed: success, loglines = writeFile( json.dumps( epcache_json ), self.EPISODECACHE, 'w' ) self.LW.log( loglines )
def getMBID(self, mbid_params): self.LOGLINES = [] filepath = os.path.join(mbid_params.get('infodir', ''), self.BIOFILENAME) exists, cloglines = checkPath(filepath, False) self.LOGLINES.extend(cloglines) if exists: rloglines, rawxml = readFile(filepath) self.LOGLINES.extend(rloglines) try: xmldata = _xmltree.fromstring(py2_encode(rawxml)) except _xmltree.ParseError: self.LOGLINES.append('error reading musicbrainz ID from ' + filepath) return '', self.LOGLINES for element in xmldata.getiterator(): if element.tag == "mbid": return element.text, self.LOGLINES self.LOGLINES.append('no mbid found in' + filepath) return '', self.LOGLINES else: return '', self.LOGLINES
def _get_cache_time( self, cachefilepath ): rawdata = '' self.LOGLINES.append( 'getting the cache timeout information for fanarttv' ) exists, cloglines = checkPath( cachefilepath, False ) self.LOGLINES.extend( cloglines ) if exists: success = True else: success = self._put_cache_time( cachefilepath ) if success: rloglines, rawdata = readFile( cachefilepath ) self.LOGLINES.extend( rloglines ) try: cachetime = int( rawdata ) except ValueError: cachetime = 0 # this is to honor donation or client key cache time immediately instead of after old cache expires if self.HASDONATION and cachetime > self.CACHEEXPIREWITHDONATION: return self.CACHEEXPIREWITHDONATION elif self.HASCLIENTKEY and cachetime > self.CACHEEXPIREWITHCLIENTKEY: return self.CACHEEXPIREWITHCLIENTKEY else: return cachetime