def GetAniDBTVDBMapCustom(media, movie): AniDBTVDBMapCustom = None lib, root, path = common.GetLibraryRootPath( common.GetMediaDir(media, movie)) dir = os.path.join(root, path) while dir: scudlee_filename_custom = os.path.join(dir, SCHUDLEE_CUSTOM) if os.path.exists(scudlee_filename_custom): try: AniDBTVDBMapCustom = XML.ElementFromString( Core.storage.load(scudlee_filename_custom)) Log.Info("Local custom mapping file loaded: {}".format( scudlee_filename_custom)) except: Log.Error("Failed to open: '%s', error: '%s'" % (scudlee_filename_custom, e)) else: break dir = os.path.dirname(dir) if len(dir) > len( root ) else '' # Clear variable if we've just finished processing down to (and including) root else: Log.Info("Local custom mapping file not present: {}".format( SCHUDLEE_CUSTOM)) return AniDBTVDBMapCustom
def GetAniDBMovieSets(): global AniDBMovieSets AniDBMovieSets = common.LoadFile( filename=os.path.basename(SCHUDLEE_MOVIESET), relativeDirectory="AnimeLists", url=SCHUDLEE_MOVIESET, cache=CACHE_1MONTH) if not AniDBMovieSets: Log.Error( "GetAniDBMovieSets() - Failed to load core file '%s'" % os.path.basename(SCHUDLEE_MOVIESET) ) #; AniDB_Movie_Set = XML.ElementFromString("<anime-set-list></anime-set-list>")
def GetAniDBMovieSets(): global AniDBMovieSets ANIME_MOVIESET = 'https://raw.githubusercontent.com/ScudLee/anime-lists/master/anime-movieset-list.xml' AniDBMovieSets = common.LoadFile(filename=os.path.basename(ANIME_MOVIESET), relativeDirectory="AnimeLists", url=ANIME_MOVIESET, cache=CACHE_1WEEK * 4) if not AniDBMovieSets: Log.Error( "GetAniDBMovieSets() - Failed to load core file '%s'" % os.path.basename(ANIME_MOVIESET) ) #; AniDB_Movie_Set = XML.ElementFromString("<anime-set-list></anime-set-list>")
def GetAniDBTVDBMapCustom(media, movie): AniDBTVDBMapCustom = None lib, root, path = common.GetLibraryRootPath(common.GetMediaDir(media, movie)) dir = os.path.join(root, path) while dir and os.path.splitdrive(dir)[1] != os.sep: scudlee_filename_custom = os.path.join(dir, SCHUDLEE_CUSTOM) if os.path.exists( scudlee_filename_custom ): try: AniDBTVDBMapCustom = XML.ElementFromString(Core.storage.load(scudlee_filename_custom)) Log.Info("Local custom mapping file loaded: {}".format(scudlee_filename_custom)) except: Log.Error("Failed to open: '%s', error: '%s'" % (scudlee_filename_custom, e)) else: break dir = os.path.dirname(dir) else: Log.Info("Local custom mapping file not present: {}".format(SCHUDLEE_CUSTOM)) return AniDBTVDBMapCustom
def Search(results, media, lang, manual, movie): #if maxi<50: maxi = tvdb.Search_TVDB(results, media, lang, manual, movie) '''search for TVDB id series ''' Log.Info("=== TheTVDB.Search() ===".ljust(157, '=')) #series_data = JSON.ObjectFromString(GetResultFromNetwork(TVDB_SEARCH_URL % mediaShowYear, additionalHeaders={'Accept-Language': lang}))['data'][0] orig_title = ( media.title if movie else media.show ) maxi = 0 try: TVDBsearchXml = XML.ElementFromURL( TVDB_SERIE_SEARCH + quote(orig_title), headers=common.COMMON_HEADERS, cacheTime=CACHE_1HOUR * 24) except Exception as e: Log.Error("TVDB Loading search XML failed, Exception: '%s'" % e) else: for serie in TVDBsearchXml.xpath('Series'): a, b = orig_title, GetXml(serie, 'SeriesName').encode('utf-8') #a, b = cleansedTitle, cleanse_title (serie.xpath('SeriesName')[0].text) if b=='** 403: Series Not Permitted **': continue score = 100 - 100*Util.LevenshteinDistance(a,b) / max(len(a),len(b)) if a!=b else 100 if maxi<score: maxi = score Log.Info("TVDB - score: '%3d', id: '%6s', title: '%s'" % (score, GetXml(serie, 'seriesid'), GetXml(serie, 'SeriesName'))) results.Append(MetadataSearchResult(id="%s-%s" % ("tvdb", GetXml(serie, 'seriesid')), name="%s [%s-%s]" % (GetXml(serie, 'SeriesName'), "tvdb", GetXml(serie, 'seriesid')), year=None, lang=lang, score=score) ) return maxi
def find_tvdb4_file(file_to_find): try: folder = common.GetMediaDir(media, movie) while folder and folder[-1] not in ["/", "\\"]: filename = os.path.join(folder, file_to_find) if os.path.exists(filename): file = Core.storage.load(os.path.realpath(filename)) try: return XML.ElementFromString(file) except: return file folder = os.path.dirname(folder) else: Log.Info("No '{}' file detected locally".format(file_to_find)) except Exception as e: Log.Error( "Issues in finding setup info as directories have most likely changed post scan into Plex, Exception: '%s'" % e) return ""
def MakeGraphqlQuery(document, variables): Log.Info("Query: {}".format(document)) Log.Info("Variables: {}".format(variables)) source = variables.keys()[0] data = JSON.StringFromObject({"query": document, "variables": variables}) response = common.LoadFile(filename=str(variables[source]) + '.json', relativeDirectory=os.path.join( 'AniList', 'json', source), url=GRAPHQL_API_URL, data=data, cache=CACHE_1DAY) # EX: {"data":null,"errors":[{"message":"Not Found.","hint":"Use POST request to access graphql subdomain.","status":404}]} if len(Dict(response, 'errors', default=[])) > 0: Log.Error("Got error: {}".format(Dict(response, 'errors')[0])) return None return Dict(response, "data")
def Search(results, media, lang, manual, movie): Log.Info("=== TheMovieDb.Search() ===".ljust(157, '=')) #'Uchiage Hanabi, Shita kara Miru ka? Yoko kara Miru ka? 打ち上げ花火、下から見るか?横から見るか?' Failed with: TypeError: not all arguments converted during string formatting #Fixed with:tmdb_url = TMDB_MOVIE_SEARCH.format(query=String.Quote(orig_title)) Log.Info("TMDB - url: " + tmdb_url) try: json = JSON.ObjectFromURL(tmdb_url, sleep=2.0, headers={'Accept': 'application/json'}, cacheTime=CACHE_1WEEK * 2) except Exception as e: Log.Error("get_json - Error fetching JSON page '%s', Exception: '%s'" % (tmdb_url, e) ) orig_title = String.Quote(media.name if manual and movie else media.title if movie else media.show) maxi = 0 Log.Info("TMDB - url: " + TMDB_MOVIE_SEARCH.format(query=orig_title)) try: json = JSON.ObjectFromURL(TMDB_MOVIE_SEARCH.format(query=orig_title), sleep=2.0, headers=common.COMMON_HEADERS, cacheTime=CACHE_1WEEK * 2) except Exception as e: Log.Error("get_json - Error fetching JSON page '%s', Exception: '%s'" %( TMDB_MOVIE_SEARCH.format(query=orig_title), e)) # json = common.get_json(TMDB_MOVIE_SEARCH % orig_title, cache_time=CACHE_1WEEK * 2) else: if isinstance(json, dict) and 'results' in json: for movie in json['results']: a, b = orig_title, movie['title'].encode('utf-8') score = 100 - 100*Util.LevenshteinDistance(a,b) / max(len(a),len(b)) if a!=b else 100 if maxi<score: maxi = score Log.Info("TMDB - score: '%3d', id: '%6s', title: '%s'" % (score, movie['id'], movie['title']) ) results.Append(MetadataSearchResult(id="tmdb-"+str(movie['id']), name="{} [{}-{}]".format(movie['title'], "tmdb", movie['id']), year=None, lang=lang, score=score) ) if '' in movie and movie['adult']!="null": Log.Info("adult: '{}'".format(movie['adult'])) return maxi
def GetMetadata(movie, MALid): Log.Info("=== MyAnimeList.GetMetadata() ===".ljust(157, '=')) MAL_HTTP_API_URL = "http://fribbtastic-api.net/fribbtastic-api/services/anime?id=" MAL_PREFIX = "https://myanimelist.cdn-dena.com" # Some links in the XML will come from TheTVDB, not adding those.... MyAnimeList_dict = {} Log.Info("MALid: '%s'" % MALid) if not MALid or not MALid.isdigit(): return MyAnimeList_dict Log.Info("--- series ---".ljust(157, '-')) xml = common.LoadFile(filename=MALid + ".xml", relativeDirectory=os.path.join('MyAnimeList', 'xml'), url=MAL_HTTP_API_URL + MALid, cache=CACHE_1DAY * 7) if isinstance(xml, str): Log.Error('Invalid str returned: "{}"'.format(xml)) elif xml: Log.Info("[ ] title: {}".format( SaveDict(GetXml(xml, 'title'), MyAnimeList_dict, 'title'))) Log.Info("[ ] summary: {}".format( SaveDict(GetXml(xml, 'synopsis'), MyAnimeList_dict, 'summary'))) Log.Info("[ ] score: {}".format( SaveDict(GetXml(xml, 'rating'), MyAnimeList_dict, 'score'))) #Log.Info("[ ] rating: {}" .format(SaveDict( GetXml(xml, 'content_rating').split(" ")[0], MyAnimeList_dict, 'rating' ))) Log.Info("[ ] originally_available_at: {}".format( SaveDict(GetXml(xml, 'firstAired'), MyAnimeList_dict, 'originally_available_at'))) #for item in xml.xpath('//anime/genres/genre' or []): SaveDict([item.text], MyAnimeList_dict, 'genres') if GetXml(xml, '//anime/genres/genre'): Log.Info("[ ] genres: {}".format( SaveDict( sorted([ item.text for item in xml.xpath('//anime/genres/genre') ]), MyAnimeList_dict, 'genres'))) if GetXml(xml, 'status') == 'Currently Airing': Log.Info("[ ] status: {}".format( SaveDict("Continuing", MyAnimeList_dict, 'status'))) if GetXml(xml, 'status') == 'Finished Airing': Log.Info("[ ] status: {}".format( SaveDict("Ended", MyAnimeList_dict, 'status'))) Log.Info("--- episodes ---".ljust(157, '-')) for item in xml.xpath('//anime/episodes/episode') or []: ep_number, ep_title, ep_air = GetXml( item, 'episodeNumber'), GetXml(xml, 'engTitle'), GetXml(xml, 'aired') Log.Info('[ ] s1e{:>3} air_date: {}, title: "{}"'.format( ep_number, ep_title, ep_air)) SaveDict(ep_title, MyAnimeList_dict, 'seasons', "1", 'episodes', ep_number, 'title') SaveDict(ep_air, MyAnimeList_dict, 'seasons', "1", 'episodes', ep_number, 'originally_available_at') Log.Info("--- images ---".ljust(157, '-')) for item in xml.xpath('//anime/covers/cover'): Log.Info("[ ] poster: {}".format( SaveDict( ("MyAnimeList/" + "/".join(item.text.split('/')[3:]), 50, None) if item.text.startswith(MAL_PREFIX) else "", MyAnimeList_dict, 'posters', item.text))) for item in xml.xpath('//anime/backgrounds/background'): Log.Info("[ ] art: {}".format( SaveDict( ("MyAnimeList/" + "/".join(item.text.split('/')[3:]), 50, None) if item.text.startswith(MAL_PREFIX) else "", MyAnimeList_dict, 'art', item.text))) for item in xml.xpath('//anime/banners/banner'): Log.Info("[ ] banner: {}".format( SaveDict( ("MyAnimeList/" + "/".join(item.text.split('/')[3:]), 50, None) if item.text.startswith(MAL_PREFIX) else "", MyAnimeList_dict, 'banners', item.text))) Log.Info("--- return ---".ljust(157, '-')) Log.Info("MyAnimeList_dict: {}".format(DictString(MyAnimeList_dict, 4))) return MyAnimeList_dict
def GetMetadata(media, movie, error_log, source, AniDBid, TVDBid, AniDBMovieSets, mappingList): ''' Download metadata to dict_AniDB, ANNid, MALid ''' Log.Info("=== AniDB.GetMetadata() ===".ljust(157, '=')) ANIDB_HTTP_API_URL = 'http://api.anidb.net:9001/httpapi?request=anime&client=hama&clientver=1&protover=1&aid=' ANIDB_PIC_BASE_URL = 'http://img7.anidb.net/pics/anime/' # AniDB picture directory ANIDB_PIC_THUMB_URL = 'http://img7.anidb.net/pics/anime/thumbs/150/{}.jpg-thumb.jpg' AniDB_dict, ANNid, MALid = {}, "", "" original = AniDBid language_posters = [language.strip() for language in Prefs['PosterLanguagePriority'].split(',')] priority_posters = [provider.strip() for provider in Prefs['posters' ].split(',')] ### Build the list of anidbids for files present #### if source.startswith("tvdb") or source.startswith("anidb") and not movie and max(map(int, media.seasons.keys()))>1: #multi anidbid required only for tvdb numbering full_array = [ anidbid for season in Dict(mappingList, 'TVDB') or [] for anidbid in Dict(mappingList, 'TVDB', season) if season and 'e' not in season and anidbid.isdigit() ] AniDB_array = { AniDBid: [] } if Dict(mappingList, 'defaulttvdbseason')=='1' or Dict(mappingList, 'TVDB', 'sa') else {} for season in sorted(media.seasons, key=common.natural_sort_key) if not movie else []: # For each season, media, then use metadata['season'][season]... for episode in sorted(media.seasons[season].episodes, key=common.natural_sort_key): new_season, new_episode, anidbid = AnimeLists.anidb_ep(mappingList, season, episode) numbering = 's{}e{}'.format(season, episode) if anidbid and not (new_season=='0' and new_episode=='0'): SaveDict([numbering], AniDB_array, anidbid) else: continue elif source.startswith('anidb') and AniDBid != "": full_array, AniDB_array = [AniDBid], {AniDBid:[]} else: full_array, AniDB_array = [], {} Log.Info("AniDBid: {}, AniDBids list: {}, source: {}".format(AniDBid, full_array, source)) for anidbid in AniDB_array: Log.Info('[+] {:>5}: {}'.format(anidbid, AniDB_array[anidbid])) Log.Info('language_posters: {}'.format(language_posters)) ### Load anidb xmls in tvdb numbering format if needed ### for AniDBid in full_array: Log.Info(("--- %s ---" % AniDBid).ljust(157, '-')) Log.Info('AniDBid: {}, url: {}'.format(AniDBid, ANIDB_HTTP_API_URL+AniDBid)) Log.Info(("--- %s.series ---" % AniDBid).ljust(157, '-')) xml = common.LoadFile(filename=AniDBid+".xml", relativeDirectory=os.path.join("AniDB", "xml"), url=ANIDB_HTTP_API_URL+AniDBid) # AniDB title database loaded once every 2 weeks if not xml or isinstance(xml, str): if not xml: SaveDict(True, AniDB_dict, 'Banned') if isinstance(xml, str): Log.Error('Invalid str returned: "{}"'.format(xml)) title, original_title, language_rank = GetAniDBTitle(AniDBTitlesDB.xpath('/animetitles/anime[@aid="{}"]/title'.format(AniDBid))) if AniDBid==original or len(full_array)==1: Log.Info("[ ] title: {}" .format(SaveDict(title, AniDB_dict, 'title' ))) Log.Info("[ ] original_title: {}".format(SaveDict(original_title, AniDB_dict, 'original_title'))) Log.Info("[ ] language_rank: {}" .format(SaveDict(language_rank, AniDB_dict, 'language_rank' ))) elif xml: title, original_title, language_rank = GetAniDBTitle(xml.xpath('/anime/titles/title')) if AniDBid==original or len(full_array)==1: #Dict(mappingList, 'poster_id_array', TVDBid, AniDBid)[0]in ('1', 'a'): ### for each main anime AniDBid ### Log.Info("[ ] title: {}" .format(SaveDict(title, AniDB_dict, 'title' ))) Log.Info("[ ] original_title: {}".format(SaveDict(original_title, AniDB_dict, 'original_title'))) Log.Info("[ ] language_rank: {}" .format(SaveDict(language_rank, AniDB_dict, 'language_rank' ))) if SaveDict( GetXml(xml, 'startdate' ), AniDB_dict, 'originally_available_at'): Log.Info("[ ] originally_available_at: '{}'".format(AniDB_dict['originally_available_at'])) if SaveDict(summary_sanitizer(GetXml(xml, 'description')), AniDB_dict, 'summary') and not movie and Dict(mappingList, 'defaulttvdbseason').isdigit() and mappingList['defaulttvdbseason'] in media.seasons: SaveDict(AniDB_dict['summary'], AniDB_dict, 'seasons', mappingList['defaulttvdbseason'], 'summary') Log.Info("[ ] rating: '{}'".format(SaveDict( GetXml(xml, 'ratings/temporary'), AniDB_dict, 'rating'))) ### Posters if GetXml(xml, 'picture'): rank = 1 if 'en' in language_posters: rank = (rank//30)*30*language_posters.index('en')+rank%30 if 'AniDB' in priority_posters: rank = rank+ 6*priority_posters.index('AniDB') AniDB_dict['posters'] = {ANIDB_PIC_BASE_URL + GetXml(xml, 'picture'): ( os.path.join('AniDB', 'poster', GetXml(xml, 'picture')), rank, ANIDB_PIC_THUMB_URL.format(GetXml(xml, 'picture').split('.')[0]))} ### genre ### RESTRICTED_GENRE = {"18 restricted": 'X', "pornography": 'X', "tv censoring": 'TV-MA', "borderline p**n": 'TV-MA'} for tag in xml.xpath('tags/tag'): if GetXml(tag, 'name') and tag.get('weight', '').isdigit() and int(tag.get('weight', '') or '200') >= int(Prefs['MinimumWeight'] or '200'): SaveDict( [string.capwords(GetXml(tag, 'name'), '-')], AniDB_dict, 'genres') if GetXml(tag, 'name').lower() in RESTRICTED_GENRE: AniDB_dict['content_rating'] = RESTRICTED_GENRE[ GetXml(tag, 'name').lower() ] if Dict(AniDB_dict, 'genres'): AniDB_dict['genres'].sort() SaveDict( "Continuing" if GetXml(xml, 'Anime/enddate')=="1970-01-01" else "Ended", AniDB_dict, 'status') Log.Info("[ ] genres ({}/{} above {} weight): {}".format(len(Dict(AniDB_dict, 'genres')), len(xml.xpath('tags/tag')), int(Prefs['MinimumWeight'] or 200), Dict(AniDB_dict, 'genres'))) for element in AniDBMovieSets.xpath("/anime-set-list/set/anime"): if element.get('anidbid') == AniDBid or element.get('anidbid') in full_array: node = element.getparent() title, main, language_rank = GetAniDBTitle(node.xpath('titles')[0]) if title not in Dict(AniDB_dict, 'collections', default=[]): Log.Info("[ ] title: {}, main: {}, language_rank: {}".format(title, main, language_rank)) SaveDict([title], AniDB_dict, 'collections') Log.Info("[ ] collection: AniDBid '%s' is part of movie collection: '%s', related_anime_list: %s" % (AniDBid, title, str(full_array))) if not Dict(AniDB_dict, 'collections'): Log.Info("[ ] collection: AniDBid '%s' is not part of any collection, related_anime_list: %s" % (AniDBid, str(full_array))) #roles ### NEW, NOT IN Plex FrameWork Documentation 2.1.1 ### Log.Info(("--- %s.actors ---" % AniDBid).ljust(157, '-')) for role in xml.xpath('characters/character[(@type="secondary cast in") or (@type="main character in")]'): try: if GetXml(role, 'seiyuu') and GetXml(role, 'name'): role_dict = {'role': role.find('name').text, 'name': role.find('seiyuu').text, 'photo': ANIDB_PIC_BASE_URL + role.find('seiyuu').get('picture')} SaveDict([role_dict], AniDB_dict, 'roles') Log.Info('[ ] role: {:<20}, name: {:<20}, photo: {}'.format(role_dict['role'], role_dict['name'], role_dict['photo'])) except Exception as e: Log.Info("Seyiuu error: {}".format(e)) if movie: Log.Info("[ ] year: '{}'".format(SaveDict(GetXml(xml, 'startdate')[0:4], AniDB_dict, 'year'))) Log.Info(("--- %s.summary info ---" % AniDBid).ljust(157, '-')) ### Series ### else: ### not listed for serie but is for eps roles = { "Animation Work":"studio", "Direction":"directors", "Series Composition":"producers", "Original Work":"writers", "Script":"writers", "Screenplay":"writers" } ep_roles = {} for creator in xml.xpath('creators/name'): for role in roles: if not role in creator.get('type'): continue if roles[role]=="studio": SaveDict(creator.text, AniDB_dict, 'studio') else: SaveDict([creator.text], ep_roles, roles[role]) Log.Info("[ ] roles (creators tag): " +str(ep_roles)) if SaveDict(GetXml(xml, 'type')=='Movie', AniDB_dict, 'movie'): Log.Info("'movie': '{}'".format(AniDB_dict['movie'])) ### Translate into season/episode mapping numEpisodes, totalDuration, mapped_eps, ending_table, op_nb = 0, 0, [], {}, 0 specials = {'S': [0, 'Special'], 'C': [100, 'Opening/Ending'], 'T': [200, 'Trailer'], 'P': [300, 'Parody'], 'O': [400, 'Other']} movie_ep_groups = {} missing={'0': [], '1':[]} ### Episodes (and specials) not always in right order ### Log.Info(("--- %s.episodes ---" % AniDBid).ljust(157, '-')) ending_offset = 99 for ep_obj in sorted(xml.xpath('episodes/episode'), key=lambda x: [int(x.xpath('epno')[0].get('type')), int(x.xpath('epno')[0].text if x.xpath('epno')[0].text.isdigit() else x.xpath('epno')[0].text[1:])]): ### Title, Season, Episode number, Specials title, main, language_rank = GetAniDBTitle (ep_obj.xpath('title'), [language.strip() for language in Prefs['EpisodeLanguagePriority'].split(',')]) epNum = ep_obj.xpath('epno')[0] epNumType = epNum.get('type') season = "1" if epNumType == "1" else "0" if epNumType=="3" and ep_obj.xpath('title')[0].text.startswith('Ending') and int(epNum.text[1:])-1<ending_offset: ending_offset = int(epNum.text[1:])-1 if epNumType=="3" and int(epNum.text[1:])>ending_offset: episode = str(int(epNum.text[1:])+150-ending_offset) #shifted to 150 for 1st ending. elif epNumType=="1": episode = epNum.text else: episode = str( specials[ epNum.text[0] ][0] + int(epNum.text[1:])) numbering = "s{}e{:>3}".format(season, episode) #If tvdb numbering used, save anidb episode meta using tvdb numbering if source.startswith("tvdb") or source.startswith("anidb") and not movie and max(map(int, media.seasons.keys()))>1: season, episode = AnimeLists.tvdb_ep(mappingList, season, episode, AniDBid) ###Broken for tvdbseason='a' # Get season from absolute number OR convert episode number to absolute number if source in ('tvdb3', 'tvdb4') and season not in ('-1', '0'): if season=='a' or source=='tvdb4': season = Dict(mappingList, 'absolute_map', episode, default=(season, episode))[0] elif episode!='0': try: episode = list(Dict(mappingList, 'absolute_map', default={}).keys())[list(Dict(mappingList, 'absolute_map', default={}).values()).index((season, episode))] except Exception as e: Log.Error("Exception: {}".format(e)) if season=='0' and episode=='0' or not season in media.seasons or not episode in media.seasons[season].episodes: Log.Info('[ ] {} => s{:>1}e{:>3} epNumType: {}'.format(numbering, season, episode, epNumType)); continue ### Series poster as season poster if GetXml(xml, 'picture') and not Dict(AniDB_dict, 'seasons', season, 'posters', ANIDB_PIC_BASE_URL + GetXml(xml, 'picture')): rank = 1 if 'en' in language_posters: rank = (rank//30)*30*language_posters.index('en')+rank%30 if 'AniDB' in priority_posters: rank = rank+ 6*priority_posters.index('AniDB') SaveDict((os.path.join('AniDB', 'poster', GetXml(xml, 'picture')), rank, ANIDB_PIC_THUMB_URL.format(GetXml(xml, 'picture').split('.')[0])), AniDB_dict, 'seasons', season, 'posters', ANIDB_PIC_BASE_URL + GetXml(xml, 'picture')) ### In AniDB numbering, Movie episode group, create key and create key in dict with empty list if doesn't exist ### else: #if source.startswith("anidb") and not movie and max(map(int, media.seasons.keys()))<=1: ### Movie episode group, create key and create key in dict with empty list if doesn't exist ### key = '' if epNumType=='1' and GetXml(xml, '/anime/episodecount')=='1' and GetXml(xml, '/anime/type') in ('Movie', 'OVA'): key = '1' if title in ('Complete Movie', 'OVA') else title[-1] if title.startswith('Part ') and title[-1].isdigit() else '' #'-1' if key: SaveDict([], movie_ep_groups, key) #Episode missing from disk if not season in media.seasons or not episode in media.seasons[season].episodes: Log.Info('[ ] {} => s{:>1}e{:>3} air_date: {}'.format(numbering, season, episode, GetXml(ep_obj, 'airdate'))) current_air_date = GetXml(ep_obj, 'airdate').replace('-','') current_air_date = int(current_air_date) if current_air_date.isdigit() and int(current_air_date) > 10000000 else 99999999 if int(time.strftime("%Y%m%d")) > current_air_date+1: if epNumType == '1' and key: SaveDict([numbering], movie_ep_groups, key ) elif epNumType in ['1', '2']: SaveDict([episode], missing, season) continue ### Episodes SaveDict(language_rank, AniDB_dict, 'seasons', season, 'episodes', episode, 'language_rank') SaveDict(title, AniDB_dict, 'seasons', season, 'episodes', episode, 'title' ) Log.Info('[X] {} => s{:>1}e{:>3} air_date: {} language_rank: {}, title: "{}"'.format(numbering, season, episode, GetXml(ep_obj, 'airdate'), language_rank, title)) if GetXml(ep_obj, 'length').isdigit(): SaveDict(int(GetXml(ep_obj, 'length'))*1000*60, AniDB_dict, 'seasons', season, 'episodes', episode, 'duration') # AniDB stores it in minutes, Plex save duration in millisecs if season == "1": numEpisodes, totalDuration = numEpisodes+1, totalDuration + int(GetXml(ep_obj, 'length')) SaveDict(GetXml(ep_obj, 'rating' ), AniDB_dict, 'seasons', season, 'episodes', episode, 'rating' ) SaveDict(GetXml(ep_obj, 'airdate'), AniDB_dict, 'seasons', season, 'episodes', episode, 'originally_available_at') if SaveDict(summary_sanitizer(GetXml(ep_obj, 'summary')), AniDB_dict, 'seasons', season, 'episodes', episode, 'summary'): Log.Info(" - [ ] summary: {}".format(Dict(AniDB_dict, 'seasons', season, 'episodes', episode, 'summary'))) #for role in ep_roles: SaveDict(",".join(ep_roles[role]), AniDB_dict, 'seasons', season, 'episodes', episode, role) #Log.Info("role: '%s', value: %s " % (role, str(ep_roles[role]))) ### End of for ep_obj... Log.Info(("--- %s.summary info ---" % AniDBid).ljust(157, '-')) if SaveDict(int(totalDuration)/int(numEpisodes) if int(numEpisodes) else 0, AniDB_dict, 'duration'): Log.Info("Duration: {}, numEpisodes: {}, average duration: {}".format(str(totalDuration), str(numEpisodes), AniDB_dict['duration'])) ### AniDB numbering Missing Episodes ### if source.startswith("anidb") and not movie and max(map(int, media.seasons.keys()))<=1: if movie_ep_groups: Log.Info("Movie/OVA Ep Groups: %s" % movie_ep_groups) #movie_ep_groups: {'1': ['s1e1'], '3': ['s1e4', 's1e5', 's1e6'], '2': ['s1e3'], '-1': []} SaveDict([value for key in movie_ep_groups for value in movie_ep_groups[key] if 0 < len(movie_ep_groups[key]) < int(key)], missing, '1') for season in sorted(missing): missing_eps = sorted(missing[season], key=common.natural_sort_key) Log.Info('Season: {} Episodes: {} not on disk'.format(season, missing_eps)) if missing_eps: error_log['Missing Specials' if season=='0' else 'Missing Episodes'].append("AniDBid: %s | Title: '%s' | Missing Episodes: %s" % (common.WEB_LINK % (common.ANIDB_SERIE_URL + AniDBid, AniDBid), AniDB_dict['title'], str(missing_eps))) ### End of if not movie ### # Generate relations_map for anidb3/4(tvdb1/6) modes for relatedAnime in xml.xpath('/anime/relatedanime/anime'): if relatedAnime.get('id') not in Dict(mappingList, 'relations_map', AniDBid, relatedAnime.get('type'), default=[]): SaveDict([relatedAnime.get('id')], mappingList, 'relations_map', AniDBid, relatedAnime.get('type')) # External IDs ANNid = GetXml(xml, "/anime/resources/resource[@type='1']/externalentity/identifier") MALid = GetXml(xml, "/anime/resources/resource[@type='2']/externalentity/identifier") #ANFOid = GetXml(xml, "/anime/resources/resource[@type='3']/externalentity/identifier"), GetXml(xml, "/anime/resources/resource[@type='3']/externalentity/identifier") # Logs if not Dict(AniDB_dict, 'summary'): error_log['AniDB summaries missing'].append("AniDBid: %s" % (common.WEB_LINK % (common.ANIDB_SERIE_URL + AniDBid, AniDBid) + " | Title: '%s'" % Dict(AniDB_dict, 'title'))) if not Dict(AniDB_dict, 'posters'): error_log['AniDB posters missing' ].append("AniDBid: %s" % (common.WEB_LINK % (common.ANIDB_SERIE_URL + AniDBid, AniDBid) + " | Title: '%s'" % Dict(AniDB_dict, 'title'))) #if not Dict(AniDB_dict, 'studio' ): error_log['anime-list studio logos'].append("AniDBid: %s | Title: '%s' | AniDB has studio '%s' and anime-list has '%s' | " % (common.WEB_LINK % (ANIDB_SERIE_URL % AniDBid, AniDBid), title, metadata.studio, mapping_studio) + common.WEB_LINK % (ANIDB_TVDB_MAPPING_FEEDBACK % ("aid:" + metadata.id + " " + title, String.StripTags( XML.StringFromElement(xml, encoding='utf8'))), "Submit bug report (need GIT account)")) #if metadata.studio and 'studio' in AniDB_dict and AniDB_dict ['studio'] and AniDB_dict ['studio'] != metadata.studio: error_log['anime-list studio logos'].append("AniDBid: %s | Title: '%s' | AniDB has studio '%s' and anime-list has '%s' | " % (common.WEB_LINK % (ANIDB_SERIE_URL % AniDBid, AniDBid), title, metadata.studio, mapping_studio) + common.WEB_LINK % (ANIDB_TVDB_MAPPING_FEEDBACK % ("aid:" + metadata.id + " " + title, String.StripTags( XML.StringFromElement(xml, encoding='utf8'))), "Submit bug report (need GIT account)")) #if metadata.studio == "" and 'studio' in AniDB_dict and AniDB_dict ['studio'] == "": error_log['anime-list studio logos'].append("AniDBid: %s | Title: '%s' | AniDB and anime-list are both missing the studio" % (common.WEB_LINK % (ANIDB_SERIE_URL % AniDBid, AniDBid), title) ) Log.Info("ANNid: '%s', MALid: '%s', xml loaded: '%s'" % (ANNid, MALid, str(xml is not None))) Log.Info("--- return ---".ljust(157, '-')) Log.Info("relations_map: {}".format(DictString(Dict(mappingList, 'relations_map', default={}), 1))) Log.Info("AniDB_dict: {}".format(DictString(AniDB_dict, 4))) return AniDB_dict, ANNid, MALid
def GetMetadata(media, movie, source, TVDBid, mappingList, num=0): """ [tvdb4.mapping.xml] Attempt to get the ASS's episode mapping data [tvdb4.posters.xml] Attempt to get the ASS's image data """ Log.Info('=== tvdb4.GetMetadata() ==='.ljust(157, '=')) TVDB4_dict, TVDB4_mapping, TVDB4_xml = {}, None, None if movie or not source == "tvdb4": Log.Info("not tvdb4 mode") return TVDB4_dict Log.Info("tvdb4 mode") def find_tvdb4_file(file_to_find): try: folder = common.GetMediaDir(media, movie) while folder and folder[-1] not in ["/", "\\"]: filename = os.path.join(folder, file_to_find) if os.path.exists(filename): file = Core.storage.load(os.path.realpath(filename)) try: return XML.ElementFromString(file) except: return file folder = os.path.dirname(folder) else: Log.Info("No '{}' file detected locally".format(file_to_find)) except Exception as e: Log.Error( "Issues in finding setup info as directories have most likely changed post scan into Plex, Exception: '%s'" % e) return "" Log.Info("--- tvdb4.mapping.xml ---".ljust(157, '-')) TVDB4_mapping = find_tvdb4_file("tvdb4.mapping") if TVDB4_mapping: Log.Debug("'tvdb4.mapping' file detected locally") else: TVDB4_mapping = TVDB4_mapping or common.LoadFile( filename=os.path.basename(TVDB4_MAPPING_URL), url=TVDB4_MAPPING_URL, cache=CACHE_1DAY * 6) # AniDB title database loaded once every 2 weeks entry = "" if isinstance(TVDB4_mapping, str): entry = TVDB4_mapping else: entry = common.GetXml(TVDB4_mapping, "/tvdb4entries/anime[@tvdbid='%s']" % TVDBid) if not entry: Log.Error("TVDBid '%s' is not found in mapping file" % TVDBid) if entry: for line in filter(None, entry.strip().splitlines()): season = line.strip().split("|") for absolute_episode in range(int(season[1]), int(season[2]) + 1): SaveDict((str(int(season[0])), str(absolute_episode)), mappingList, 'absolute_map', str(absolute_episode)) SaveDict(True if "(unknown length)" in season[3] else False, mappingList, 'absolute_map', 'unknown_series_length') SaveDict(str(int(season[0])), mappingList, 'absolute_map', 'max_season') Log.Info("--- tvdb4.posters.xml ---".ljust(157, '-')) TVDB4_xml = find_tvdb4_file(os.path.basename(TVDB4_POSTERS_URL)) if TVDB4_xml: Log.Debug("'tvdb4.posters.xml' file detected locally") else: TVDB4_xml = TVDB4_xml or common.LoadFile( filename=os.path.basename(TVDB4_POSTERS_URL), url=TVDB4_POSTERS_URL, cache=CACHE_1DAY * 6) # AniDB title database loaded once every 2 weeks if TVDB4_xml: seasonposternum = 0 entry = common.GetXml(TVDB4_xml, "/tvdb4entries/posters[@tvdbid='%s']" % TVDBid) if not entry: Log.Error("TVDBid '%s' is not found in posters file" % TVDBid) for line in filter(None, entry.strip().splitlines()): season, url = line.strip().split("|", 1) season = season.lstrip("0") if season.lstrip("0") else "0" seasonposternum += 1 SaveDict(("TheTVDB/seasons/%s-%s-%s" % (TVDBid, season, os.path.basename(url)), 1, None), TVDB4_dict, 'seasons', season, 'posters', url) Log.Info("--- return ---".ljust(157, '-')) Log.Info("absolute_map: {}".format( DictString(Dict(mappingList, 'absolute_map', default={}), 0))) Log.Info("TVDB4_dict: {}".format(DictString(TVDB4_dict, 4))) return TVDB4_dict
def AdjustMapping(source, mappingList, dict_AniDB, dict_TheTVDB): """ EX: season_map: {'max_season': 2, '12560': {'max': 1, 'min': 1}, '13950': {'max': 0, 'min': 0}} relations_map: {'12560': {'Sequel': ['13950']}, '13950': {'Prequel': ['12560']}} TVDB Before: {'s1': {'12560': '0'}, 's0': {'13950': '0'}, '13950': (0, '')} 's0e5': ('1', '4', '9453') 's1': {'12560': '0'} '13950': (0, '') """ Log.Info("=== anidb34.AdjustMapping() ===".ljust(157, '=')) is_modified = False adjustments = {} tvdb6_seasons = {1: 1} is_banned = Dict(dict_AniDB, 'Banned', default=False) TVDB = Dict(mappingList, 'TVDB', default={}) season_map = Dict(mappingList, 'season_map', default={}) relations_map = Dict(mappingList, 'relations_map', default={}) if not (Dict(mappingList, 'possible_anidb3') or source=='tvdb6'): Log.Info("Neither a possible 'anidb3/tvdb' enrty nor 'anidb4/tvdb6' entry"); return is_modified Log.Info("adjusting mapping for 'anidb3/tvdb' & 'anidb4/tvdb6' usage") #Log.Info("dict_TheTVDB: {}".format(dict_TheTVDB)) Log.Info("season_map: {}".format(DictString(season_map, 0))) Log.Info("relations_map: {}".format(DictString(relations_map, 1))) try: Log.Info("--- tvdb mapping adjustments ---".ljust(157, '-')) Log.Info("TVDB Before: {}".format(DictString(TVDB, 0))) for id in sorted(season_map, key=common.natural_sort_key): new_season, new_episode = '', '' if id == 'max_season': continue #### Note: Below must match scanner (variable names are different but logic matches) #### Log.Info("Checking AniDBid: %s" % id) def get_prequel_info(prequel_id): Log.Info("-- get_prequel_info(prequel_id): %s, season min: %s, season max: %s" % (prequel_id, season_map[prequel_id]['min'], season_map[prequel_id]['max'])) if source=="tvdb": if season_map[prequel_id]['min'] == 0 and 'Prequel' in relations_map[prequel_id] and relations_map[prequel_id]['Prequel'][0] in season_map: a, b = get_prequel_info(relations_map[prequel_id]['Prequel'][0]) # Recurively go down the tree following prequels if not str(a).isdigit(): return ('', '') return (a, b+100) if a < season_map['max_season'] else (a+1, 0) # If the prequel is < max season, add 100 to the episode number offset: Else, add it into the next new season at episode 0 if season_map[prequel_id]['min'] == 0: return ('', '') # Root prequel is a special so leave mapping alone as special elif season_map[prequel_id]['max'] < season_map['max_season']: return (season_map[prequel_id]['max'], 100) # Root prequel season is < max season so add to the end of the Prequel season else: return (season_map['max_season']+1, 0) # Root prequel season is >= max season so add to the season after max if source=="tvdb6": if season_map[prequel_id]['min'] != 1 and 'Prequel' in relations_map[prequel_id] and relations_map[prequel_id]['Prequel'][0] in season_map: a, b = get_prequel_info(relations_map[prequel_id]['Prequel'][0]) # Recurively go down the tree following prequels #Log.Info("%s+%s+%s-%s" % (a,1,season_map[prequel_id]['max'],season_map[prequel_id]['min'])) return (a+1+season_map[prequel_id]['max']-season_map[prequel_id]['min'], 0) if str(a).isdigit() else ('', '') # Add 1 to the season number and start at episode 0 return (2, 0) if season_map[prequel_id]['min'] == 1 else ('', '') # Root prequel is season 1 so start counting up. Else was a sequel of specials only so leave mapping alone if source=="tvdb": if season_map[id]['min'] == 0 and 'Prequel' in relations_map[id] and relations_map[id]['Prequel'][0] in season_map: new_season, new_episode = get_prequel_info(relations_map[id]['Prequel'][0]) # Recurively go down the tree following prequels to a TVDB season non-0 AniDB prequel if source=="tvdb6": if 'Prequel' in relations_map[id] and relations_map[id]['Prequel'][0] in season_map: new_season, new_episode = get_prequel_info(relations_map[id]['Prequel'][0]) # Recurively go down the tree following prequels to the TVDB season 1 AniDB prequel if str(new_season).isdigit(): # A new season & eppisode offset has been assigned # As anidb4/tvdb6 does full season adjustments, we need to remove and existing season mapping is_modified = True removed = {} for key in TVDB.keys(): if isinstance(TVDB[key], dict) and id in TVDB[key]: Log.Info("-- Deleted: %s: {'%s': '%s'}" % (key, id, TVDB[key][id])) removed[key] = {id: TVDB[key][id]} del TVDB[key][id] # Delete season entries for its old anidb non-s0 season entries | 's4': {'11350': '0'} if isinstance(TVDB[key], tuple) and TVDB[key][0] == '1' and TVDB[key][2] == id: Log.Info("-- Deleted: {}: {}".format(key, TVDB[key])) removed[key] = TVDB[key] del TVDB[key] # Delete episode entries for its old anidb s1 entries | 's0e5': ('1', '4', '9453') SaveDict(str(new_episode), TVDB, 's'+str(new_season), id) Log.Info("-- Added : {}: {}".format('s'+str(new_season), {id: str(new_episode)})) adjustments['s'+str(new_season)+'e'+str(new_episode)] = {'deleted': removed, 'added': [str(new_season), str(new_episode)]} tvdb6_seasons[new_season] = season_map[id]['min'] # tvdb6_seasons[New season] = [Old season] Log.Info("TVDB After : {}".format(DictString(Dict(mappingList, 'TVDB'), 0))) # Push back the 'dict_TheTVDB' season munbers if tvdb6 for the new inserted season if source=="tvdb6": Log.Info("--- tvdb meta season adjustments ---".ljust(157, '-')) top_season, season, adjustment, new_seasons = max(map(int, dict_TheTVDB['seasons'].keys())), 1, 0, {} Log.Info("dict_TheTVDB Seasons Before : {}".format(sorted(dict_TheTVDB['seasons'].keys(), key=int))) Log.Info("tvdb6_seasons : {}".format(tvdb6_seasons)) if "0" in dict_TheTVDB['seasons']: new_seasons["0"] = dict_TheTVDB['seasons'].pop("0") while season <= top_season: if Dict(tvdb6_seasons, season + adjustment) == 0: Log.Info("-- New TVDB season '{}'".format(season + adjustment)) adjustment += 1 else: Log.Info("-- Adjusting season '{}' -> '{}'".format(season, season + adjustment)) if str(season) in dict_TheTVDB['seasons']: new_seasons[str(season + adjustment)] = dict_TheTVDB['seasons'].pop(str(season)) season += 1 SaveDict(new_seasons, dict_TheTVDB, 'seasons') Log.Info("dict_TheTVDB Seasons After : {}".format(sorted(dict_TheTVDB['seasons'].keys(), key=int))) # Copy in the 'dict_TheTVDB' deleted episode meta into its new added location Log.Info("--- tvdb meta episode adjustments ---".ljust(157, '-')) Log.Info("adjustments: {}".format(DictString(adjustments, 2))) for entry in sorted(adjustments, key=common.natural_sort_key): # EX: {'s6e0': {'added': ['6', '0'], 'deleted': {'s0e16': ('1', '1', '12909'), 's-1': {'12909': '0'}}}} added_season, added_offset = adjustments[entry]['added'] # 'added': ['6', '0'] Log.Info("added_season: '{}', added_offset: '{}'".format(added_season, added_offset)) for deleted in sorted(adjustments[entry]['deleted'], key=common.natural_sort_key): Log.Info("-- deleted: '{}': {}".format(deleted, adjustments[entry]['deleted'][deleted])) if isinstance(adjustments[entry]['deleted'][deleted], dict): deleted_season = deleted[1:] # {-->'s0'<--: {'6463': '0'}} deleted_offset = adjustments[entry]['deleted'][deleted].values()[0] # {'s0': {'6463': -->'0'<--}} if deleted=='s-1': Log.Info("---- {:<9}: Dead season".format("'%s'" % deleted)) continue # EX: {'s-1': {'12909': '0'}} if deleted!='s0' and added_offset=='0' and deleted_offset=='0': Log.Info("---- {:<9}: Whole season (s1+) was adjusted in previous section".format("'%s'" % deleted)) continue # EX: {'s3e0': 'added': ['3', '0'], 'deleted': {'s2': {'7680': '0'}}} == Adjusting season '2' -> '3' # EX: {'s2e0': 'added': ['2', '0' ], 'deleted': {'s0': {'6463': '0'}}} # EX: {'s1e100': 'added': ['1', '100'], 'deleted': {'s0': {'982': '1'}}} interation = 1 Log.Info("---- deleted_season: '{}', deleted_offset: '{}'".format(deleted_season, deleted_offset)) while Dict(dict_TheTVDB, 'seasons', deleted_season, 'episodes', str(int(deleted_offset) + interation)): a, b, x = deleted_season, str(int(deleted_offset) + interation), str(int(added_offset) + interation) SaveDict(Dict(dict_TheTVDB, 'seasons', a, 'episodes', b), dict_TheTVDB, 'seasons', added_season, 'episodes', x) Log.Info("---- {:<9}: dict_TheTVDB['seasons']['{}']['episodes']['{}'] => dict_TheTVDB['seasons']['{}']['episodes']['{}']".format("'%s'" % deleted, a, b, added_season, x)) interation += 1 if isinstance(adjustments[entry]['deleted'][deleted], tuple): a, b = list(filter(None, re.split(r"[se]", deleted))) # 's0e16' --> ['0', '16'] x = str(int(adjustments[entry]['deleted'][deleted][1]) + int(added_offset)) # ('1', -->'1'<--, '12909') Log.Info("---- {:<9}: dict_TheTVDB['seasons']['{}']['episodes']['{}'] => dict_TheTVDB['seasons']['{}']['episodes']['{}']".format("'%s'" % deleted, a, b, added_season, x)) SaveDict(Dict(dict_TheTVDB, 'seasons', a, 'episodes', b), dict_TheTVDB, 'seasons', added_season, 'episodes', x) except Exception as e: if is_banned: Log.Info("Expected exception hit as you were banned from AniDB so you have incomplete data to proceed") else: Log.Error("Unexpected exception hit") Log.Info('Exception: "{}"'.format(e)) Log.Info("If a key error, look at the 'season_map'/'relations_map' info to see why it is missing") if source=="tvdb": Log.Info("Source is 'tvdb' so metadata will be loaded but it will not be complete for any 'anidb3' end of season additions") if source=="tvdb6": Log.Info("Source is 'tvdb6' so removing AniDB & TVDB metadata from memory to prevent incorrect data from being loaded"); dict_AniDB.clear(); dict_TheTVDB.clear() is_modified = False Log.Info("--- return ---".ljust(157, '-')) Log.Info("is_modified: {}".format(is_modified)) return is_modified
def GetMetadata(myanimelistId, type, media): Log.Info("=== MyAnimeList.GetMetadata() ===".ljust(157, '=')) detailUrl = MYANIMELIST_URL_DETAILS.format(id=myanimelistId) Log.Info("URL : " + str(detailUrl)) try: json = JSON.ObjectFromString( HTTP.Request(detailUrl, sleep=2.0, cacheTime=MYANIMELIST_CACHE_TIME).content) except Exception as e: Log.Error("No Detail Information were available " + str(e)) return result = {} if json: if Dict(json, "id"): SaveDict(str(Dict(json, "id")), result, "id") Log.Debug("ID: " + str(Dict(json, "id"))) if Dict(json, "title"): SaveDict(str(Dict(json, "title")), result, "title") Log.Debug("Title: " + str(Dict(json, "title"))) if Dict(json, "synopsis"): SaveDict( str(re.sub(re.compile('<.*?>'), '', Dict(json, "synopsis"))), result, "summary") Log.Debug("Summary: " + str(Dict(json, "synopsis"))) if Dict(json, "members_score"): SaveDict(float(Dict(json, "members_score")), result, "rating") Log.Debug("Rating: " + str(Dict(json, "members_score"))) if Dict(json, "classification"): SaveDict(str(Dict(json, "classification")), result, "content_rating") Log.Debug("Content Rating: " + str(Dict(json, "classification"))) if Dict(json, "duration"): SaveDict(int(Dict(json, "duration") * 60000), result, "duration") Log.Debug("Duration: " + str(Dict(json, "duration") * 60000)) if Dict(json, "start_date"): SaveDict(Dict(json, "start_date"), result, "originally_available_at") Log.Debug("Release date: " + str(Dict(json, "start_date"))) if Dict(json, "image_url"): SaveDict((Dict(json, "image_url"), 1, None), result, 'poster', Dict(json, "image_url")) Log.Debug("Cover: " + str(Dict(json, "image_url"))) for genre in Dict(json, "genres") if Dict( json, "genres") and len(Dict(json, "genres")) > 0 else []: SaveDict([str(genre)], result, "genres") Log.Debug("Genres: " + str(Dict(json, "genres"))) ### TODO: Switch to Studios when they are available in the API (or add Producers to metadata when this is possible in Plex) #if Dict(json, "producers") and len(Dict(json, "producers")) > 0: # apiAnimeProducers = "" # for idx, producer in enumerate(Dict(json, "producers")): apiAnimeProducers += str(producer) + ", " # SaveDict(str(apiAnimeProducers[:-2]), result, "studio") # Log.Debug("Producers: " + str(Dict(json, "producers"))) if type == "tvshow": Log.Debug("Adding TV-Show specific data") Log.Debug("Episodes: " + str(Dict(json, "episodes"))) # metadata.seasons[1].episode_count = int( Dict(json, "episodes") or len(media.seasons[1].episodes)) # pages = int(math.ceil(float(metadata.seasons[1].episode_count) / 100)) # fetch the episodes in 100 chunks # if pages is not None: # for page in range(1, pages + 1): # episodesUrl = MYANIMELIST_URL_EPISODES.format(id=metadata.id,page=page) # try: # Log.Info("Fetching URL " + str(episodesUrl)) # episodeResult = JSON.ObjectFromString(HTTP.Request(episodesUrl, sleep=2.0, cacheTime=MYANIMELIST_CACHE_TIME).content) # except Exception as e: Log.Info("episode results could not be requested " + str(e)); return # if "error" in episodeResult: # Log.Warn("Episode Information are not available (" + str(episodeResult["error"]) + ") (might want to add them to MyAnimeList.net)!") # break # # for episode in episodeResult: # apiEpisodeNumber = Dict(json, "number" ) # apiEpisodeTitle = Dict(json, "title" ) # apiEpisodeAirDate = Dict(json, "air_date") # if apiEpisodeNumber is not None: # plexEpisode = metadata.seasons[1].episodes[int(apiEpisodeNumber)] # plexEpisode.title = str(apiEpisodeTitle) if apiEpisodeTitle else "Episode: #" + str(apiEpisodeNumber) # plexEpisode.originally_available_at = datetime.strptime(str(apiEpisodeAirDate), "%Y-%m-%d") if apiEpisodeAirDate else datetime.now() # Log.Debug("Episode " + str(apiEpisodeNumber) + ": " + str(apiEpisodeTitle) + " - " + str(apiEpisodeAirDate)) # if type == "movie": Log.Debug("Adding Movie specific data, nothing specific to add") Log.Info("MyAnimeList_dict: {}".format(DictString(result, 4))) Log.Info("--- return ---".ljust(157, '-')) return result