Ejemplo n.º 1
0
def GetAniDBTVDBMapCustom(media, movie):
    AniDBTVDBMapCustom = None
    lib, root, path = common.GetLibraryRootPath(
        common.GetMediaDir(media, movie))
    dir = os.path.join(root, path)
    while dir:
        scudlee_filename_custom = os.path.join(dir, SCHUDLEE_CUSTOM)
        if os.path.exists(scudlee_filename_custom):
            try:
                AniDBTVDBMapCustom = XML.ElementFromString(
                    Core.storage.load(scudlee_filename_custom))
                Log.Info("Local custom mapping file loaded: {}".format(
                    scudlee_filename_custom))
            except:
                Log.Error("Failed to open: '%s', error: '%s'" %
                          (scudlee_filename_custom, e))
            else:
                break
        dir = os.path.dirname(dir) if len(dir) > len(
            root
        ) else ''  # Clear variable if we've just finished processing down to (and including) root
    else:
        Log.Info("Local custom mapping file not present: {}".format(
            SCHUDLEE_CUSTOM))
    return AniDBTVDBMapCustom
Ejemplo n.º 2
0
def Search(results, media, lang, manual, movie):
    from common import Log  #Import here for startup logging to go to the plex pms log
    orig_title = media.title if movie else media.show
    Log.Open(media=media, movie=movie, search=True)
    Log.Info('=== Search() ==='.ljust(157, '='))
    Log.Info(
        "title: '%s', name: '%s', filename: '%s', manual: '%s', year: '%s'" %
        (orig_title, media.name, media.filename, str(manual), media.year)
    )  #if media.filename is not None: filename = String.Unquote(media.filename) #auto match only
    Log.Info("start: {}".format(
        datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S,%f")))
    Log.Info("".ljust(157, '='))
    if not orig_title: return

    #clear-cache directive
    if orig_title == "clear-cache":
        HTTP.ClearCache()
        results.Append(
            MetadataSearchResult(id='clear-cache',
                                 name='Plex web cache cleared',
                                 year=media.year,
                                 lang=lang,
                                 score=0))
        return

    ### Check if a guid is specified "Show name [anidb-id]" ###
    Log.Info('--- force id ---'.ljust(157, '-'))
    match = re.search(
        r"(?P<show>.*?) ?\[(?P<source>(anidb(|[2-9])|tvdb(|[2-9])|tmdb|tsdb|imdb))-(tt)?(?P<guid>[^\[\]]*)\]",
        orig_title, re.IGNORECASE)
    if match is not None:
        guid = match.group('source') + '-' + match.group('guid')
        if guid.startswith('anidb') and not movie and max(
                map(int, media.seasons.keys())) > 1:
            Log.Info('[!] multiple seasons = tvdb numbering, BAKA!')
        results.Append(
            MetadataSearchResult(id=guid,
                                 name=match.group('show') + " [" + guid + ']',
                                 year=media.year,
                                 lang=lang,
                                 score=100))
        Log.Info("Forced ID - source: {}, id: {}, title: '{}'".format(
            match.group('source'), match.group('guid'), match.group('show')))
    else:  #if media.year is not None:  orig_title = orig_title + " (" + str(media.year) + ")"  ### Year - if present (manual search or from scanner but not mine), include in title ###
        Log.Info('--- source searches ---'.ljust(157, '-'))
        maxi, n = 0, 0
        if movie or max(map(int, media.seasons.keys())) <= 1:
            maxi, n = AniDB.Search(results, media, lang, manual, movie)
        if maxi < 50 and movie:
            maxi = TheMovieDb.Search(results, media, lang, manual, movie)
        if maxi < 80 and not movie or n > 1:
            maxi = max(TheTVDBv2.Search(results, media, lang, manual, movie),
                       maxi)
    Log.Info("".ljust(157, '='))
    Log.Info("end: {}".format(
        datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S,%f")))
    Log.Close()
Ejemplo n.º 3
0
def GetMetadata(metadata, error_log, TVDBid, title):
    Log.Info("=== Plex.GetMetadata() ===".ljust(157, '='))
    url = THEME_URL.format(TVDBid)
    Plex_dict = {}

    Log.Info("Prefs['themes']: '{}', TVDBid: '{}'".format(
        Prefs['themes'], TVDBid))
    Log.Info("--- themes ---".ljust(157, '-'))
    if 'Plex' in Prefs['themes'] and TVDBid.isdigit():
        title = title or TVDBid
        result = '*' if url in metadata.themes else common.GetStatusCode(url)
        Log.Info("result code: '{plex}', url: '{url}'".format(plex=result,
                                                              url=url))
        if result in (200, "*"):
            Log.Info("[ ] theme: {}".format(
                SaveDict(("Plex/%s.mp3" % TVDBid, 2, None), Plex_dict,
                         'themes', url)))
        else:
            error_log['Plex themes missing'].append(
                "TVDBid: '{}' | Title: '{}' | {}".format(
                    common.WEB_LINK % (common.TVDB_SERIE_URL + TVDBid, title),
                    title, common.WEB_LINK %
                    ("mailto:[email protected]?cc=&subject=Missing%%20theme%%20song%%20-%%20&#39;%s%%20-%%20%s.mp3&#39;"
                     % (title, TVDBid), 'Upload')))
    else:
        Log.Info(
            "Not pulling meta - 'Plex' in Prefs['themes']: '{}', TVDBid: '{}'".
            format('Plex' in Prefs['themes'], TVDBid))

    Log.Info("--- return ---".ljust(157, '-'))
    Log.Info("Plex_dict: {}".format(DictString(Plex_dict, 1)))
    return Plex_dict
Ejemplo n.º 4
0
def GetAniDBTVDBMapCustom(media, movie):  
  AniDBTVDBMapCustom = None
  lib, root, path = common.GetLibraryRootPath(common.GetMediaDir(media, movie))
  dir = os.path.join(root, path)
  while dir and os.path.splitdrive(dir)[1] != os.sep:
    scudlee_filename_custom = os.path.join(dir, SCHUDLEE_CUSTOM)
    if os.path.exists( scudlee_filename_custom ):
      try:
        AniDBTVDBMapCustom = XML.ElementFromString(Core.storage.load(scudlee_filename_custom))
        Log.Info("Local custom mapping file loaded: {}".format(scudlee_filename_custom))
      except:  Log.Error("Failed to open: '%s', error: '%s'" % (scudlee_filename_custom, e))
      else:    break
    dir = os.path.dirname(dir)
  else:  Log.Info("Local custom mapping file not present: {}".format(SCHUDLEE_CUSTOM))
  return AniDBTVDBMapCustom
Ejemplo n.º 5
0
def GetAniDBTitlesDB():
  ''' Get the AniDB title database
  '''
  global AniDBTitlesDB
  AniDBTitlesDB = common.LoadFile(filename='anime-titles.xml', relativeDirectory="AniDB", url=ANIDB_TITLES)  # AniDB title database loaded once every 2 weeks
  if not AniDBTitlesDB:  raise Exception("Failed to load core file '{url}'".format(url=os.path.splitext(os.path.basename(ANIDB_TITLES))[0]))
  else: Log.Info("Entries loaded: {}, File: {}".format(len(AniDBTitlesDB), ANIDB_TITLES))
Ejemplo n.º 6
0
def Update(metadata, media, lang, force, movie):
  from common import Log  #Import here for startup logging to go to the plex pms log
  Log.Open(media=media, movie=movie, search=False)
  source    = metadata.id.split('-', 1)[0]
  error_log = { 'AniDB summaries missing'   :[], 'AniDB posters missing'      :[], 'anime-list AniDBid missing':[], 'anime-list studio logos'  :[],  
                'TVDB posters missing'      :[], 'TVDB season posters missing':[], 'anime-list TVDBid missing' :[], 'Plex themes missing'      :[],
                'Missing Episodes'          :[], 'Missing Specials'           :[], 'Missing Episode Summaries' :[], 'Missing Special Summaries':[]}
  Log.Info('=== Update() ==='.ljust(157, '='))
  Log.Info("id: {}, title: {}, lang: {}, force: {}, movie: {}".format(metadata.id, metadata.title, lang, force, movie))
  Log.Info("start: {}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S,%f")))
  
  dict_AnimeLists, AniDBid, TVDBid, TMDbid, IMDbid, mappingList =  AnimeLists.GetMetadata(media, movie, error_log, metadata.id)
  dict_TheTVDB,                             IMDbid              =   TheTVDBv2.GetMetadata(media, movie, error_log, lang, source, AniDBid, TVDBid, IMDbid,         mappingList, Dict(AniDB, 'movie'))
  dict_tvdb4                                                    =      common.GetMetadata(media, movie,                  source,          TVDBid,                 mappingList)
  dict_AniDB, ANNid, MALid                                      =       AniDB.GetMetadata(media, movie, error_log,       source, AniDBid, TVDBid, AnimeLists.AniDBMovieSets, mappingList)
  dict_TheMovieDb,          TSDbid, TMDbid, IMDbid              =  TheMovieDb.GetMetadata(media, movie,                                   TVDBid, TMDbid, IMDbid)
  dict_FanartTV                                                 =    FanartTV.GetMetadata(       movie,                                   TVDBid, TMDbid, IMDbid)
  dict_Plex                                                     =        Plex.GetMetadata(metadata, error_log, TVDBid, Dict(dict_TheTVDB, 'title'))
  dict_TVTunes                                                  =     TVTunes.GetMetadata(metadata, Dict(dict_TheTVDB, 'title'), Dict(mappingList, AniDBid, 'name'))  #Sources[m:eval('dict_'+m)]
  dict_OMDb                                                     =        OMDb.GetMetadata(movie, IMDbid)  #TVDBid=='hentai'
  dict_MyAnimeList                                              = MyAnimeList.GetMetadata(movie, MALid )
  dict_Local                                                    =       Local.GetMetadata(media, movie)
  if common.AdjustMapping(source, mappingList, dict_AniDB, dict_TheTVDB):
    dict_AniDB, ANNid, MALid                                    =       AniDB.GetMetadata(media, movie, error_log,       source, AniDBid, TVDBid, AnimeLists.AniDBMovieSets, mappingList)
  Log.Info('=== Update() ==='.ljust(157, '='))
  Log.Info("AniDBid: '{}', TVDBid: '{}', TMDbid: '{}', IMDbid: '{}', ANNid:'{}', MALid: '{}'".format(AniDBid, TVDBid, TMDbid, IMDbid, ANNid, MALid))
  common.write_logs(media, movie, error_log, source, AniDBid, TVDBid)
  common.UpdateMeta(metadata, media, movie, {'AnimeLists': dict_AnimeLists, 'AniDB':       dict_AniDB,       'TheTVDB': dict_TheTVDB, 'TheMovieDb': dict_TheMovieDb, 
                                             'FanartTV':   dict_FanartTV,   'tvdb4':       dict_tvdb4,       'Plex':    dict_Plex,    'TVTunes':    dict_TVTunes, 
                                             'OMDb':       dict_OMDb,       'MyAnimeList': dict_MyAnimeList, 'Local':   dict_Local}, mappingList)
  Log.Info('=== Update() ==='.ljust(157, '='))
  Log.Info("end: {}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S,%f")))
  Log.Close()
Ejemplo n.º 7
0
def GetMetadata(metadata, title1, title2):
    Log.Info("=== TVTunes.GetMetadata() ===".ljust(157, '='))
    THEME_URL = "http://www.televisiontunes.com/uploads/audio/{}.mp3"
    TVTunes_dict = {}

    Log.Info("Prefs['themes']: '{}', title: '{}', title2: '{}'".format(
        Prefs['themes'], title1, title2))
    Log.Info("--- themes ---".ljust(157, '-'))
    if 'TVTunes' in Prefs['themes'] and (title1 or title2):
        for url in [
                THEME_URL.format(String.Quote(t)) for t in (title1, title2)
                if t
        ]:
            result = '*' if url in metadata.themes or Data.Exists(
                url.split('/')[-1]) else common.GetStatusCode(url)
            Log.Info("Return code: '{}', url: '{}'".format(result, url))
            if result in (200, "*"):
                Log.Info("[ ] theme: {}".format(
                    SaveDict(
                        ("TelevisionTunes/" + url.split('/')[-1], 1, None),
                        TVTunes_dict, 'themes', url)))

    Log.Info("--- return ---".ljust(157, '-'))
    Log.Info("TVTunes_dict: {}".format(DictString(TVTunes_dict, 1)))
    return TVTunes_dict
Ejemplo n.º 8
0
def Search(results,  media, lang, manual, movie):  #if maxi<50:  maxi = tvdb.Search_TVDB(results, media, lang, manual, movie)
  '''search for TVDB id series
  '''
  Log.Info("=== TheTVDB.Search() ===".ljust(157, '='))
  #series_data = JSON.ObjectFromString(GetResultFromNetwork(TVDB_SEARCH_URL % mediaShowYear, additionalHeaders={'Accept-Language': lang}))['data'][0]
  orig_title = ( media.title if movie else media.show )
  maxi = 0
  try:                    TVDBsearchXml = XML.ElementFromURL( TVDB_SERIE_SEARCH + quote(orig_title), headers=common.COMMON_HEADERS, cacheTime=CACHE_1HOUR * 24)
  except Exception as e:  Log.Error("TVDB Loading search XML failed, Exception: '%s'" % e)
  else:
    for serie in TVDBsearchXml.xpath('Series'):
      a, b = orig_title, GetXml(serie, 'SeriesName').encode('utf-8') #a, b  = cleansedTitle, cleanse_title (serie.xpath('SeriesName')[0].text)
      if b=='** 403: Series Not Permitted **': continue
      score = 100 - 100*Util.LevenshteinDistance(a,b) / max(len(a),len(b)) if a!=b else 100
      if maxi<score:  maxi = score
      Log.Info("TVDB  - score: '%3d', id: '%6s', title: '%s'" % (score, GetXml(serie, 'seriesid'), GetXml(serie, 'SeriesName')))
      results.Append(MetadataSearchResult(id="%s-%s" % ("tvdb", GetXml(serie, 'seriesid')), name="%s [%s-%s]" % (GetXml(serie, 'SeriesName'), "tvdb", GetXml(serie, 'seriesid')), year=None, lang=lang, score=score) )
  return maxi
Ejemplo n.º 9
0
def MakeGraphqlQuery(document, variables):
    Log.Info("Query: {}".format(document))
    Log.Info("Variables: {}".format(variables))

    source = variables.keys()[0]
    data = JSON.StringFromObject({"query": document, "variables": variables})
    response = common.LoadFile(filename=str(variables[source]) + '.json',
                               relativeDirectory=os.path.join(
                                   'AniList', 'json', source),
                               url=GRAPHQL_API_URL,
                               data=data,
                               cache=CACHE_1DAY)

    # EX: {"data":null,"errors":[{"message":"Not Found.","hint":"Use POST request to access graphql subdomain.","status":404}]}
    if len(Dict(response, 'errors', default=[])) > 0:
        Log.Error("Got error: {}".format(Dict(response, 'errors')[0]))
        return None

    return Dict(response, "data")
Ejemplo n.º 10
0
def MergeMaps(AniDBTVDBMap, AniDBTVDBMap_fix):
  AniDBTVDBMap_new = copy.deepcopy(AniDBTVDBMap)
  dict_nodes, count = {}, 0  #Log.Info('type1: {}, type2: {}'.format(type(AniDBTVDBMap).__name__ , type(AniDBTVDBMap_fix).__name__))
  if type(AniDBTVDBMap_fix).__name__ == '_Element':
    for node in AniDBTVDBMap_fix or []:  dict_nodes[node.get('anidbid')] = node          # save mod list and nodes
    Log.Info("MergeMaps() - AniDBids concerned: " + str(dict_nodes.keys()))              #
  for node in AniDBTVDBMap_new or []:                                                        # LOOP IN EVERY ANIME IN SCHUDLEE_MASTER FILE
    if node and node.get('anidbid') in dict_nodes:  AniDBTVDBMap_new.remove(node); count+=1  #   if a correction exists: remove old mapping from AniDBTVDBMap
    if count == len(dict_nodes):                    break                                #   if deleted all exit loop
  for key in dict_nodes or {}:  AniDBTVDBMap_new.append( dict_nodes[key] )                   # add all new anidb mapping
  return AniDBTVDBMap_new
Ejemplo n.º 11
0
def Search(results, media, lang, manual, movie):
  Log.Info("=== TheMovieDb.Search() ===".ljust(157, '='))
  #'Uchiage Hanabi, Shita kara Miru ka? Yoko kara Miru ka? 打ち上げ花火、下から見るか?横から見るか?' Failed with: TypeError: not all arguments converted during string formatting
  #Fixed with:tmdb_url = TMDB_MOVIE_SEARCH.format(query=String.Quote(orig_title)) Log.Info("TMDB - url: " + tmdb_url) try: json = JSON.ObjectFromURL(tmdb_url, sleep=2.0, headers={'Accept': 'application/json'}, cacheTime=CACHE_1WEEK * 2) except Exception as e: Log.Error("get_json - Error fetching JSON page '%s', Exception: '%s'" % (tmdb_url, e) )
  orig_title = String.Quote(media.name if manual and movie else media.title if movie else media.show)
  maxi = 0
  
  Log.Info("TMDB  - url: " + TMDB_MOVIE_SEARCH.format(query=orig_title))
  try:                    json = JSON.ObjectFromURL(TMDB_MOVIE_SEARCH.format(query=orig_title), sleep=2.0, headers=common.COMMON_HEADERS, cacheTime=CACHE_1WEEK * 2)
  except Exception as e:  Log.Error("get_json - Error fetching JSON page '%s', Exception: '%s'" %( TMDB_MOVIE_SEARCH.format(query=orig_title), e)) # json   = common.get_json(TMDB_MOVIE_SEARCH % orig_title, cache_time=CACHE_1WEEK * 2)
  else:
    if isinstance(json, dict) and 'results' in json:
      for movie in json['results']:
        a, b  = orig_title, movie['title'].encode('utf-8')
        score = 100 - 100*Util.LevenshteinDistance(a,b) / max(len(a),len(b)) if a!=b else 100
        if maxi<score:  maxi = score
        Log.Info("TMDB  - score: '%3d', id: '%6s', title: '%s'" % (score, movie['id'],  movie['title']) )
        results.Append(MetadataSearchResult(id="tmdb-"+str(movie['id']), name="{} [{}-{}]".format(movie['title'], "tmdb", movie['id']), year=None, lang=lang, score=score) )
        if '' in movie and movie['adult']!="null":  Log.Info("adult: '{}'".format(movie['adult']))
  return maxi
Ejemplo n.º 12
0
def GetAniDBTVDBMap():
    global AniDBTVDBMap
    AniDBTVDBMap = common.LoadFile(filename=os.path.basename(SCUDLEE_MASTER),
                                   relativeDirectory="AnimeLists",
                                   url=SCUDLEE_MASTER)  #
    if not AniDBTVDBMap:
        raise Exception(
            "GetAniDBTVDBMap() - Failed to load core file '{file}'".format(
                url=os.path.splitext(os.path.basename(SCUDLEE_MASTER)))
        )  #; AniDB_Movie_Set = XML.ElementFromString("<anime-set-list></anime-set-list>")  #; raise Exception("HAMA Fatal Error Hit")
    else:
        Log.Info("Entries loaded: {}, File: {}".format(len(AniDBTVDBMap),
                                                       SCUDLEE_MASTER))
Ejemplo n.º 13
0
def GetAniDBTVDBMap():
    global AniDBTVDBMap
    MAPPING = 'https://raw.githubusercontent.com/ScudLee/anime-lists/master/anime-list-master.xml'  # ScudLee mapping file url
    MAPPING_FIX = 'https://raw.githubusercontent.com/ZeroQI/Absolute-Series-Scanner/master/anime-list-corrections.xml'  # ScudLee mapping file url online override
    MAPPING_LOCAL = os.path.join(
        common.CachePath, 'AnimeLists', 'anime-list-custom.xml'
    )  # Custom mapping list(PlexRoot, "Plug-in Support", "Data", "com.plexapp.agents.hama", "DataItems", 'AnimeLists', 'anime-list-corrections.xml')
    AniDBTVDBMap = common.LoadFile(filename=os.path.basename(MAPPING),
                                   relativeDirectory="AnimeLists",
                                   url=MAPPING,
                                   cache=CACHE_1DAY * 6)  #
    if not AniDBTVDBMap:
        Log.Critical(
            "GetAniDBTVDBMap() - Failed to load core file '{file}'".format(
                url=os.path.splitext(os.path.basename(MAPPING)))
        )  #; AniDB_Movie_Set = XML.ElementFromString("<anime-set-list></anime-set-list>")  #; raise Exception("HAMA Fatal Error Hit")
    MergeMaps(AniDBTVDBMap,
              common.LoadFile(filename=os.path.basename(MAPPING_FIX),
                              relativeDirectory="AnimeLists",
                              url=MAPPING_FIX,
                              cache=CACHE_1DAY *
                              6))  #Online ScudLee anidb to tvdb mapping list

    if os.path.exists(
            MAPPING_LOCAL):  #Local  ScudLee anidb to tvdb mapping list
        Log.Info("GetAniDBTVDBMap() - Loading local custom mapping - url: " +
                 MAPPING_LOCAL)
        try:
            MergeMaps(AniDBTVDBMap,
                      XML.ElementFromString(Core.storage.load(MAPPING_LOCAL)))
        except Exception as e:
            Log.Info(
                "GetAniDBTVDBMap() - Failed open scudlee_filename_custom, error: '%s'"
                % e)
    else:
        Log.Info(
            "GetAniDBTVDBMap() - Local custom mapping file not present: {}".
            format(MAPPING_LOCAL))
Ejemplo n.º 14
0
 def get_prequel_info(prequel_id):
     Log.Info(
         "-- get_prequel_info(prequel_id): %s, season min: %s, season max: %s"
         % (prequel_id, season_map[prequel_id]['min'],
            season_map[prequel_id]['max']))
     if source == "tvdb":
         if season_map[prequel_id][
                 'min'] == 0 and 'Prequel' in relations_map[
                     prequel_id] and relations_map[prequel_id][
                         'Prequel'][0] in season_map:
             a, b = get_prequel_info(
                 relations_map[prequel_id]['Prequel'][0]
             )  # Recurively go down the tree following prequels
             if not str(a).isdigit(): return ('', '')
             return (
                 a, b + 100
             ) if a < season_map['max_season'] else (
                 a + 1, 0
             )  # If the prequel is < max season, add 100 to the episode number offset: Else, add it into the next new season at episode 0
         if season_map[prequel_id]['min'] == 0:
             return (
                 '', ''
             )  # Root prequel is a special so leave mapping alone as special
         elif season_map[prequel_id]['max'] < season_map[
                 'max_season']:
             return (
                 season_map[prequel_id]['max'], 100
             )  # Root prequel season is < max season so add to the end of the Prequel season
         else:
             return (
                 season_map['max_season'] + 1, 0
             )  # Root prequel season is >= max season so add to the season after max
     if source == "tvdb6":
         if season_map[prequel_id][
                 'min'] != 1 and 'Prequel' in relations_map[
                     prequel_id] and relations_map[prequel_id][
                         'Prequel'][0] in season_map:
             a, b = get_prequel_info(
                 relations_map[prequel_id]['Prequel'][0]
             )  # Recurively go down the tree following prequels
             #Log.Info("%s+%s+%s-%s" % (a,1,season_map[prequel_id]['max'],season_map[prequel_id]['min']))
             return (
                 a + 1 + season_map[prequel_id]['max'] -
                 season_map[prequel_id]['min'], 0
             ) if str(a).isdigit() else (
                 '', ''
             )  # Add 1 to the season number and start at episode 0
         return (2, 0) if season_map[prequel_id]['min'] == 1 else (
             '', ''
         )  # Root prequel is season 1 so start counting up. Else was a sequel of specials only so leave mapping alone
Ejemplo n.º 15
0
def GetMetadata(AniDBid, MALid):
    Log.Info("=== AniList.GetMetadata() ===".ljust(157, '='))
    AniList_dict = {}

    # Try to match the AniDB id to an AniList id as it has a higher chance of being correct
    ALid = Dict(common.LoadFile(filename=AniDBid + '.json',
                                relativeDirectory=os.path.join(
                                    'AniList', 'json', 'AniDBid'),
                                url=ARM_SERVER_URL.format(id=AniDBid)),
                "anilist",
                default=None)

    Log.Info("AniDBid={}, MALid={}, ALid={}".format(AniDBid, MALid, ALid))
    if not MALid or not MALid.isdigit(): return AniList_dict

    Log.Info("--- series ---".ljust(157, "-"))

    # Use the AniList id if we got one, but fall back to the MAL id
    variables = {}
    if ALid is not None: SaveDict(ALid, variables, "id")
    else: SaveDict(int(MALid), variables, "malId")

    # Fetch data
    data = MakeGraphqlQuery(ANIME_DATA_DOCUMENT, variables)

    if data:
        Log.Info("--- images ---".ljust(157, "-"))

        posterUrl = Dict(data, "anime", "coverImage", "url")
        if posterUrl:
            Log.Info("[ ] poster: {}".format(posterUrl))
            SaveDict((os.path.join('AniList', 'poster',
                                   os.path.basename(posterUrl)),
                      common.poster_rank('AniList', 'posters'), None),
                     AniList_dict, 'posters', posterUrl)

        bannerUrl = Dict(data, "anime", "bannerImage")
        if bannerUrl:
            Log.Info("[ ] banner: {}".format(bannerUrl))
            SaveDict((os.path.join('AniList', 'banners',
                                   os.path.basename(bannerUrl)),
                      common.poster_rank('AniList', 'banners'), None),
                     AniList_dict, 'banners', bannerUrl)

    Log.Info("--- return ---".ljust(157, '-'))
    Log.Info("AniList_dict: {}".format(DictString(AniList_dict, 4)))
    return AniList_dict
Ejemplo n.º 16
0
def GetAniDBMovieSets():
    global AniDBMovieSets
    AniDBMovieSets = common.LoadFile(
        filename=os.path.basename(SCHUDLEE_MOVIESET),
        relativeDirectory="AnimeLists",
        url=SCHUDLEE_MOVIESET,
        cache=CACHE_1MONTH)
    if not AniDBMovieSets:
        raise Exception(
            "GetAniDBMovieSets() - Failed to load core file '%s'" %
            os.path.basename(SCHUDLEE_MOVIESET)
        )  #;  AniDB_Movie_Set = XML.ElementFromString("<anime-set-list></anime-set-list>")
    else:
        Log.Info("Entries loaded: {}, File: {}".format(len(AniDBMovieSets),
                                                       SCHUDLEE_MOVIESET))
Ejemplo n.º 17
0
 def find_tvdb4_file(file_to_find):
     try:
         folder = common.GetMediaDir(media, movie)
         while folder and folder[-1] not in ["/", "\\"]:
             filename = os.path.join(folder, file_to_find)
             if os.path.exists(filename):
                 file = Core.storage.load(os.path.realpath(filename))
                 try:
                     return XML.ElementFromString(file)
                 except:
                     return file
             folder = os.path.dirname(folder)
         else:
             Log.Info("No '{}' file detected locally".format(file_to_find))
     except Exception as e:
         Log.Error(
             "Issues in finding setup info as directories have most likely changed post scan into Plex, Exception: '%s'"
             % e)
     return ""
Ejemplo n.º 18
0
def anidb_ep(mappingList, season, episode):
  debug = False
  if debug:  Log.Info('[?] (#1) season: {}, episode: {}'.format(season, episode))

  # <mapping-list> <mapping anidbseason="0" tvdbseason="0">;1-5;2-6;</mapping>
  # <mapping-list> <mapping anidbseason="1" tvdbseason="5" start="13" end="24" offset="-12"/>
  ep_mapping = Dict(mappingList, 'TVDB', 's'+season+'e'+episode.split('-')[0])
  if ep_mapping:
    if debug:  Log.Info('[?] (#2) Exact mapping: {}'.format(ep_mapping))
    return ep_mapping[0], ep_mapping[1], ep_mapping[2]            #Lvl 3 & 2 direct ep mapping (ep or season with start-end range)
  
  # <mapping-list> <mapping anidbseason="1" tvdbseason="5" offset="-12"/>
  anidbid_list = Dict(mappingList, 'TVDB', 's'+season, default={})
  if debug:  Log.Info('[?] (#3) s{}: {}'.format(season, anidbid_list))
  for offset, anidbid in sorted(zip(anidbid_list.values(), anidbid_list.keys()), key=lambda x: common.natural_sort_key(x[0]), reverse=True):  #reverse value&index and sort per offset
    if debug:  Log.Info("[?] (#3) - offset: {}, anidbid: {}, int(episode.split('-')[0]): {}".format(offset, anidbid, int(episode.split('-')[0])))
    if int(episode.split('-')[0])> int(offset):  return '1', str(int(episode.split('-')[0])-int(offset)), anidbid   #Lvl 1 - defaulttvdbseason + offset
  
  # <anime anidbid="23" tvdbid="76885" defaulttvdbseason="1" episodeoffset="" tmdbid="" imdbid="">
  defaulttvdbseason, episodeoffset, s1e1_mapped = Dict(mappingList, 'defaulttvdbseason'), Dict(mappingList, 'episodeoffset'), Dict(mappingList, 's1e1_mapped')
  if debug:  Log.Info('[?] (#4) defaulttvdbseason: {}, episodeoffset: {}, s1e1_mapped: {}'.format(defaulttvdbseason, episodeoffset, s1e1_mapped))
  if season==defaulttvdbseason and not s1e1_mapped:
    return '1', str(int(episode)-int(episodeoffset)), ''
  
  # Map season 0 episodes directly to tvdb season 0 episodes
  # On condition of being the only anidb id mapped to the tvdbid, its set to season 1, and has no special mappings
  tvdbcount, s1_mapping = Dict(mappingList, 'tvdbcount', default=0), Dict(mappingList, 'TVDB', 's1')
  if debug:  Log.Info('[?] (#5) defaulttvdbseason: {}, episodeoffset: {}, s1e1_mapped: {}'.format(defaulttvdbseason, episodeoffset, s1e1_mapped))
  if season=="0" and tvdbcount==1 and s1_mapping: # Confirm only one entry and its 's1'
    for item in Dict(mappingList, 'TVDB'): # Also that there are no s0 mappings
      if item.startswith("s0"):
        if debug:  Log.Info('[?] (#5) Found: {}'.format(item))
        break
    else:  return season, episode, list(Dict(mappingList, 'TVDB', 's1').keys())[0]
  
  return '0', '0', 'xxxxxxx'
Ejemplo n.º 19
0
def Search(results, media, lang, manual, movie):
    ''' AniDB Search assign an anidbid to a series or movie
  '''
    Log.Info("=== AniDB.Search() ===".ljust(157, '='))
    FILTER_SEARCH_WORDS = [  ### These are words which cause extra noise due to being uninteresting for doing searches on, Lowercase only ####################################
        'to',
        'wa',
        'ga',
        'no',
        'age',
        'da',
        'chou',
        'super',
        'yo',
        'de',
        'chan',
        'hime',
        'ni',
        'sekai',  # Jp
        'a',
        'of',
        'an',
        'the',
        'motion',
        'picture',
        'special',
        'oav',
        'ova',
        'tv',
        'special',
        'eternal',
        'final',
        'last',
        'one',
        'movie',
        'me',
        'princess',
        'theater',
        'and',  # En Continued
        'le',
        'la',
        'un',
        'les',
        'nos',
        'vos',
        'des',
        'ses',
        'world',
        'in',
        'another',
        'this',
        'story',
        'life',
        'name',  # Fr
        'i',
        'ii',
        'iii',
        'iv',
        'v',
        'vi',
        'vii',
        'viii',
        'ix',
        'x',
        'xi',
        'xii',
        'xiii',
        'xiv',
        'xv',
        'xvi'
    ]  # Roman digits
    SPLIT_CHARS = [
        ';', ':', '*', '?', ',', '.', '~', '-', '\\', '/'
    ]  #Space is implied, characters forbidden by os filename limitations
    orig_title = media.title if movie else media.show
    orig_title_cleansed = common.cleanse_title(orig_title)
    Log.Info("orig_title: '{}', orig_title_cleansed: '{}'".format(
        orig_title, orig_title_cleansed))

    ### Full title search = 1.3s
    Log.Info("--- full title ---".ljust(157, '-'))
    best_aid, best_score, best_title, n = "", 0, "", 0
    start_time = time.time()
    Log.Info('len AniDBTitlesDB: {}'.format(len(AniDBTitlesDB)))
    for element in AniDBTitlesDB.xpath(
            u"/animetitles/anime/title[text()[contains(lower-case(.), '%s')]]"
            % orig_title.lower().replace("'", " ")):
        aid = element.getparent().get('aid', '')
        title = element.text
        if aid == best_aid and best_score >= 100: continue
        if orig_title == title: title_cleansed, score = title, 100
        elif orig_title.lower() == title.lower():
            title_cleansed, score = title.lower(), 99
        else:  #contained in title
            title_cleansed = common.cleanse_title(title)
            score1 = 100 * len(
                String.LongestCommonSubstring(orig_title_cleansed,
                                              title_cleansed)
            ) / max(len(title_cleansed), len(orig_title_cleansed)) - n if max(
                len(title_cleansed), len(orig_title_cleansed)) else 0
            score2 = 100 - 100 * Util.LevenshteinDistance(
                orig_title_cleansed, title_cleansed) / max(
                    len(title_cleansed), len(orig_title_cleansed)) - n if max(
                        len(title_cleansed), len(orig_title_cleansed)) else 0
            score = max(score1, score2)
        if score >= 100 and not aid == best_aid: n += 1
        results.Append(
            MetadataSearchResult(id="%s-%s" % ("anidb", aid),
                                 name="%s [%s-%s]" % (title, "anidb", aid),
                                 year=media.year,
                                 lang=lang,
                                 score=score))
        Log.Info(
            "[+] score: {:>3}, aid: {:>5}, title: '{}', title_cleansed: {}".
            format(score, aid, title, title_cleansed))
        if score > best_score:
            best_score, best_title, best_aid = score, title, aid
    if best_score:
        Log.Info(
            "[=] best_score: {:>3}, best_aid: {:>5}, best_title: {}".format(
                best_score, best_aid, best_title))
    Log.Info("elapsed_time: {:.3f}".format(time.time() - start_time))
    if best_score >= 90: return best_score, n

    ### Keyword match using Xpath
    Log.Info("--- keyword ---".ljust(157, '-'))
    words, words_skipped = [], []
    for i in SPLIT_CHARS:
        orig_title_cleansed = orig_title_cleansed.replace(i, " ")
    orig_title_cleansed = orig_title_cleansed.replace("'", '')
    for word in orig_title_cleansed.split():
        (words_skipped if word in FILTER_SEARCH_WORDS or len(word) <= 3 else
         words).append(word)
    if not words:
        words, words_skipped = orig_title_cleansed.split(), [
        ]  #Prevent CRITICAL Exception in the search function of agent named 'HamaTV', called with keyword arguments {'show': 'No 6', 'id': '20145', 'year': None} (most recent call last):
    Log.Info("Keyword Search - Words: {}, skipped: {}".format(
        str(words), str(words_skipped)))
    type_order = ('main', 'official', 'syn', 'short', '')
    best_score, best_title, best_aid, best_type, best_lang = 0, "", "", "", ""
    last_chance, best_score_entry = [], 0
    start_time = time.time()

    for element in AniDBTitlesDB.xpath(u"/animetitles/anime[title[{}]]".format(
            " or ".join([
                "contains(lower-case(text()), '{}')".format(x.lower())
                for x in words
            ]))):
        aid = element.get('aid', '')
        best_score_entry, best_title_entry, best_type_entry, best_lang_entry = 0, "", "", ""
        for element in element.xpath(u"title[%s]" % " or ".join(
            ["contains(lower-case(text()), '%s')" % x.lower()
             for x in words])):
            title = element.text
            Type = element.get('type', '')
            Lang = element.get('{http://www.w3.org/XML/1998/namespace}lang',
                               '')
            title_cleansed = common.cleanse_title(title)
            if title_cleansed == orig_title_cleansed:
                score = 98 if ';' not in title else 100
            else:
                score = WordsScore(
                    orig_title_cleansed.split(), title_cleansed
                )  # - type_order.index(Type)  #Movies can have same title
            if score > best_score_entry or score == best_score_entry and (
                    not best_type_entry or type_order.index(Type) <
                    type_order.index(best_type_entry)):
                best_score_entry, best_title_entry, best_type_entry, best_lang_entry, best_title_entry_cleansed = score, title, Type, Lang, title_cleansed
        if best_score_entry < 25:
            last_chance.append((best_score_entry, best_title_entry,
                                best_type_entry, best_lang_entry, aid))
            continue
        Log.Info('[-] score: {:>3}, aid: {:>5}, title: "{}"'.format(
            best_score_entry, aid, best_title_entry))
        #Log.Info("levenstein: {}".format(100 - 200 * Util.LevenshteinDistance(title_cleansed, orig_title_cleansed) / (len(title_cleansed) + len(orig_title_cleansed)) ))
        results.Append(
            MetadataSearchResult(
                id="%s-%s" % ("anidb", aid),
                name="{title} [{Type}({Lang})] [anidb-{aid}]".format(
                    title=best_title_entry,
                    aid=aid,
                    Type=best_type_entry,
                    Lang=best_lang_entry),
                year=media.year,
                lang=lang,
                score=best_score_entry))
        if best_score_entry > best_score:
            best_score, best_title, best_type, best_lang, best_aid = best_score_entry, best_title_entry, best_type_entry, best_lang_entry, aid
    if best_score < 50:  # Add back lower than 25 if nothin above 50
        for best_score_entry, best_title_entry, best_type_entry, best_lang_entry, aid in last_chance:
            Log.Info('[-] score: {:>3}, aid: {:>5}, title: "{}"'.format(
                best_score_entry, best_title_entry, aid))
            results.Append(
                MetadataSearchResult(
                    id="%s-%s" % ("anidb", aid),
                    name="{title} [{Type}({Lang}): {aid}]".format(
                        title=best_title_entry,
                        aid=aid,
                        Type=best_type_entry,
                        Lang=best_lang_entry),
                    year=media.year,
                    lang=lang,
                    score=best_score_entry))
        if best_score_entry > best_score:
            best_score, best_title, best_type, best_lang, best_aid = best_score_entry, best_title_entry, best_type_entry, best_lang_entry, aid
    #Log.Info('           ---       -----         ---------------------------------------------------')
    #Log.Info('[=] score: {:>3}, aid: {:>5}, title: "{}"'.format(best_score, best_aid, best_title))
    Log.Info("elapsed_time: {:.3f}".format(time.time() - start_time))

    return best_score, n
Ejemplo n.º 20
0
def GetMetadata(media, movie, error_log, source, AniDBid, TVDBid,
                AniDBMovieSets, mappingList):
    ''' Download metadata to dict_AniDB, ANNid, MALid
  '''
    Log.Info("=== AniDB.GetMetadata() ===".ljust(157, '='))
    AniDB_dict, ANNid, MALid = {}, "", ""
    original = AniDBid
    anidb_numbering = source == "anidb" and (
        movie or max(map(int, media.seasons.keys())) <= 1)
    language_posters = [
        language.strip()
        for language in Prefs['PosterLanguagePriority'].split(',')
    ]
    priority_posters = [
        provider.strip() for provider in Prefs['posters'].split(',')
    ]

    ### Build the list of anidbids for files present ####
    if source.startswith("tvdb") or source.startswith(
            "anidb") and not movie and max(map(int, media.seasons.keys(
            ))) > 1:  #multi anidbid required only for tvdb numbering
        full_array = [
            anidbid for season in Dict(mappingList, 'TVDB') or []
            for anidbid in Dict(mappingList, 'TVDB', season)
            if season and 'e' not in season and anidbid.isdigit()
        ]
        AniDB_array = {
            AniDBid: []
        } if Dict(mappingList,
                  'defaulttvdbseason') == '1' and source != 'tvdb4' else {}
        for season in sorted(
                media.seasons, key=common.natural_sort_key
        ) if not movie else []:  # For each season, media, then use metadata['season'][season]...
            for episode in sorted(media.seasons[season].episodes,
                                  key=common.natural_sort_key):
                if int(episode) > 99:
                    continue  # AniDB non-normal special (op/ed/t/o) that is not mapable
                if source == 'tvdb3' and season != 0:
                    new_season, new_episode, anidbid = AnimeLists.anidb_ep(
                        mappingList, season,
                        Dict(mappingList,
                             'absolute_map',
                             episode,
                             default=(None, episode))
                        [1])  # Pull absolute number then try to map
                elif source == 'tvdb4' and season != 0:
                    new_season, new_episode = Dict(mappingList,
                                                   'absolute_map',
                                                   episode,
                                                   default=(season, episode))
                    anidbid = 'UNKN'  # Not TVDB mapping. Use custom ASS mapping to pull season/episode
                else:
                    new_season, new_episode, anidbid = AnimeLists.anidb_ep(
                        mappingList, season, episode)  # Try to map
                numbering = 's{}e{}'.format(season, episode) + (
                    '(s{}e{})'.format(new_season, new_episode)
                    if season != new_season and episode != new_episode else '')
                if anidbid and not (new_season == '0' and new_episode == '0'):
                    SaveDict([numbering], AniDB_array, anidbid)
            else:
                continue
    elif source.startswith('anidb') and AniDBid != "":
        full_array, AniDB_array = [AniDBid], {AniDBid: []}
    else:
        full_array, AniDB_array = [], {}

    active_array = full_array if Dict(
        mappingList, 'possible_anidb3'
    ) or source in ("tvdb4", "tvdb6") else AniDB_array.keys(
    )  # anidb3(tvdb)/anidb4(tvdb6) for full relation_map data | tvdb4 bc the above will not be able to know the AniDBid
    Log.Info(
        "Source: {}, AniDBid: {}, Full AniDBids list: {}, Active AniDBids list: {}"
        .format(source, AniDBid, full_array, active_array))
    for anidbid in sorted(AniDB_array, key=common.natural_sort_key):
        Log.Info('[+] {:>5}: {}'.format(anidbid, AniDB_array[anidbid]))
    Log.Info('language_posters: {}'.format(language_posters))

    ### Build list_abs_eps for tvdb 3/4/5 ###
    list_abs_eps, list_sp_eps = {}, []
    if source in ('tvdb3', 'tvdb4'):
        for s in media.seasons:
            for e in media.seasons[s].episodes:
                if s == '0': list_sp_eps.append(e)
                else: list_abs_eps[e] = s
        Log.Info('Present abs eps: {}'.format(list_abs_eps))

    ### Load anidb xmls in tvdb numbering format if needed ###
    for AniDBid in sorted(active_array, key=common.natural_sort_key):
        is_primary_entry = AniDBid == original or len(active_array) == 1

        Log.Info(("--- %s ---" % AniDBid).ljust(157, '-'))
        Log.Info('AniDBid: {}, IsPrimary: {}, url: {}'.format(
            AniDBid, is_primary_entry, ANIDB_HTTP_API_URL + AniDBid))
        Log.Info(("--- %s.series ---" % AniDBid).ljust(157, '-'))

        xml, cache = None, CACHE_1DAY * 6
        xml_cache = common.LoadFileCache(filename=AniDBid + ".xml",
                                         relativeDirectory=os.path.join(
                                             "AniDB", "xml"))[0]
        if xml_cache:  # Pull the enddate and adjust max cache age based on series enddate in relation to now
            ed = GetXml(
                xml_cache,
                'enddate') or datetime.datetime.now().strftime("%Y-%m-%d")
            enddate = datetime.datetime.strptime(
                "{}-12-31".format(ed) if len(ed) == 4 else "{}-{}".format(
                    ed,
                    ([30, 31] if int(ed[-2:]) <= 7 else [31, 30]
                     )[int(ed[-2:]) %
                       2] if ed[-2:] != '02' else 28) if len(ed) == 7 else ed,
                '%Y-%m-%d')
            days_old = (datetime.datetime.now() - enddate).days
            if days_old > 1825:
                cache = CACHE_1DAY * 365  # enddate > 5 years ago => 1 year cache
            elif days_old > 30:
                cache = (
                    days_old * CACHE_1DAY * 365
                ) / 1825  # enddate > 30 days ago => (days_old/5yrs ended = x/1yrs cache)
        if AniDBBan:
            xml = xml_cache  # Ban has been hit in this process' life span (which is transient)
        else:
            xml = common.LoadFile(filename=AniDBid + ".xml",
                                  relativeDirectory=os.path.join(
                                      "AniDB", "xml"),
                                  url=ANIDB_HTTP_API_URL + AniDBid,
                                  cache=cache,
                                  sleep=6,
                                  throttle=['AniDB', CACHE_1HOUR, 100])
        if isinstance(xml, str) and 'banned' in xml:
            global AniDBBan
            AniDBBan = True  # Set ban hit on process level
        if AniDBBan:
            SaveDict(True, AniDB_dict, 'Banned')  # Set ban hit on series level

        if not xml or isinstance(xml, str):
            title, original_title, language_rank = GetAniDBTitle(
                AniDBTitlesDB.xpath(
                    '/animetitles/anime[@aid="{}"]/title'.format(AniDBid)))
            if is_primary_entry:
                Log.Info("[ ] title: {}".format(
                    SaveDict(title, AniDB_dict, 'title')))
                Log.Info("[ ] original_title: {}".format(
                    SaveDict(original_title, AniDB_dict, 'original_title')))
                Log.Info("[ ] language_rank: {}".format(
                    SaveDict(language_rank, AniDB_dict, 'language_rank')))

        elif xml:
            title, original_title, language_rank = GetAniDBTitle(
                xml.xpath('/anime/titles/title'))
            if is_primary_entry:  ### for each main anime AniDBid ###
                Log.Info("[ ] title: {}".format(
                    SaveDict(title, AniDB_dict, 'title')))
                Log.Info("[ ] original_title: {}".format(
                    SaveDict(original_title, AniDB_dict, 'original_title')))
                Log.Info("[ ] language_rank: {}".format(
                    SaveDict(language_rank, AniDB_dict, 'language_rank')))
                if SaveDict(GetXml(xml, 'startdate'), AniDB_dict,
                            'originally_available_at'):
                    Log.Info("[ ] originally_available_at: '{}'".format(
                        AniDB_dict['originally_available_at']))
                if SaveDict(summary_sanitizer(GetXml(
                        xml, 'description')), AniDB_dict, 'summary'
                            ) and not movie and not anidb_numbering and Dict(
                                mappingList,
                                'defaulttvdbseason').isdigit() and mappingList[
                                    'defaulttvdbseason'] in media.seasons:
                    SaveDict(AniDB_dict['summary'], AniDB_dict, 'seasons',
                             mappingList['defaulttvdbseason'], 'summary')

                Log.Info("[ ] rating: '{}'".format(
                    SaveDict(GetXml(xml, 'ratings/permanent'), AniDB_dict,
                             'rating')))

                ### Posters
                if GetXml(xml, 'picture'):
                    rank = 1
                    if 'en' in language_posters:
                        rank = (rank // 30) * 30 * language_posters.index(
                            'en') + rank % 30
                    if 'AniDB' in priority_posters:
                        rank = rank + 6 * priority_posters.index('AniDB')
                    AniDB_dict['posters'] = {
                        ANIDB_PIC_BASE_URL + GetXml(xml, 'picture'):
                        (os.path.join('AniDB', 'poster',
                                      GetXml(xml, 'picture')), rank, None)
                    }  # ANIDB_PIC_THUMB_URL.format(name=GetXml(xml, 'picture').split('.')[0])

                ### genre ###
                RESTRICTED_GENRE = {
                    "18 restricted": 'X',
                    "pornography": 'X',
                    "tv censoring": 'TV-MA',
                    "borderline p**n": 'TV-MA'
                }
                for tag in xml.xpath('tags/tag'):
                    if GetXml(tag, 'name') and tag.get(
                            'weight', '').isdigit() and int(
                                tag.get('weight', '') or '200') >= int(
                                    Prefs['MinimumWeight'] or '200'):
                        SaveDict([string.capwords(GetXml(tag, 'name'), '-')],
                                 AniDB_dict, 'genres')
                        if GetXml(tag, 'name').lower() in RESTRICTED_GENRE:
                            AniDB_dict['content_rating'] = RESTRICTED_GENRE[
                                GetXml(tag, 'name').lower()]
                if Dict(AniDB_dict, 'genres'): AniDB_dict['genres'].sort()
                SaveDict(
                    "Continuing" if GetXml(xml, 'Anime/enddate')
                    == "1970-01-01" else "Ended", AniDB_dict, 'status')
                Log.Info("[ ] genres ({}/{} above {} weight): {}".format(
                    len(Dict(AniDB_dict, 'genres')),
                    len(xml.xpath('tags/tag')),
                    int(Prefs['MinimumWeight'] or 200),
                    Dict(AniDB_dict, 'genres')))
                for element in AniDBMovieSets.xpath(
                        "/anime-set-list/set/anime"):
                    if element.get('anidbid') == AniDBid or element.get(
                            'anidbid') in full_array:
                        node = element.getparent()
                        title, main, language_rank = GetAniDBTitle(
                            node.xpath('titles')[0])
                        if title not in Dict(AniDB_dict,
                                             'collections',
                                             default=[]):
                            Log.Info(
                                "[ ] title: {}, main: {}, language_rank: {}".
                                format(title, main, language_rank))
                            SaveDict([title], AniDB_dict, 'collections')
                            Log.Info(
                                "[ ] collection: AniDBid '%s' is part of movie collection: '%s', related_anime_list: %s"
                                % (AniDBid, title, str(full_array)))
                if not Dict(AniDB_dict, 'collections'):
                    Log.Info(
                        "[ ] collection: AniDBid '%s' is not part of any collection, related_anime_list: %s"
                        % (AniDBid, str(full_array)))

                #roles  ### NEW, NOT IN Plex FrameWork Documentation 2.1.1 ###
                Log.Info(("--- %s.actors ---" % AniDBid).ljust(157, '-'))
                for role in xml.xpath(
                        'characters/character[(@type="secondary cast in") or (@type="main character in")]'
                ):
                    try:
                        if GetXml(role, 'seiyuu') and GetXml(role, 'name'):
                            role_dict = {
                                'role':
                                role.find('name').text,
                                'name':
                                role.find('seiyuu').text,
                                'photo':
                                ANIDB_PIC_BASE_URL +
                                role.find('seiyuu').get('picture')
                            }
                            SaveDict([role_dict], AniDB_dict, 'roles')
                            Log.Info(
                                '[ ] role: {:<20}, name: {:<20}, photo: {}'.
                                format(role_dict['role'], role_dict['name'],
                                       role_dict['photo']))
                    except Exception as e:
                        Log.Info("Seyiuu error: {}".format(e))

            ### Creators ###
            creator_tags = {
                "Animation Work": "studio",
                "Work": "studio",
                "Direction": "directors",
                "Series Composition": "producers",
                "Original Work": "writers",
                "Script": "writers",
                "Screenplay": "writers"
            }
            studios = {}
            creators = {}
            for creator in xml.xpath('creators/name'):
                for tag in creator_tags:
                    if tag != creator.get('type'): continue
                    if creator_tags[tag] == "studio":
                        studios[tag] = creator.text
                    else:
                        SaveDict([creator.text], creators, creator_tags[tag])
            if is_primary_entry:
                Log.Info("[ ] studio: {}".format(
                    SaveDict(
                        Dict(studios,
                             "Animation Work",
                             default=Dict(studios, "Work")), AniDB_dict,
                        'studio')))

            Log.Info("[ ] movie: {}".format(
                SaveDict(GetXml(xml, 'type') == 'Movie', AniDB_dict, 'movie')))
            ### Movie ###
            if movie:
                Log.Info("[ ] year: '{}'".format(
                    SaveDict(
                        GetXml(xml, 'startdate')[0:4], AniDB_dict, 'year')))

                if is_primary_entry:
                    for creator in creators:
                        Log.Info("[ ] {}: {}".format(
                            creator,
                            SaveDict(creators[creator], AniDB_dict, creator)))

                Log.Info(("--- %s.summary info ---" % AniDBid).ljust(157, '-'))

            ### Series ###
            else:
                ### Translate into season/episode mapping
                numEpisodes, totalDuration, mapped_eps, ending_table, op_nb = 0, 0, [], {}, 0
                specials = {
                    'S': [0, 'Special'],
                    'C': [100, 'Opening/Ending'],
                    'T': [200, 'Trailer'],
                    'P': [300, 'Parody'],
                    'O': [400, 'Other']
                }
                movie_ep_groups = {}
                ending_offset = 99
                missing = {'0': [], '1': []}

                ### Episodes (and specials) not always in right order ###
                Log.Info(("--- %s.episodes ---" % AniDBid).ljust(157, '-'))
                Log.Info("[ ] ep creators (creators tag): " + str(creators))
                for ep_obj in sorted(
                        xml.xpath('episodes/episode'),
                        key=lambda x: [
                            int(x.xpath('epno')[0].get('type')),
                            int(
                                x.xpath('epno')[0].text
                                if x.xpath('epno')[0].text.isdigit() else x.
                                xpath('epno')[0].text[1:])
                        ]):

                    ### Title, Season, Episode number, Specials
                    title, main, language_rank = GetAniDBTitle(
                        ep_obj.xpath('title'), [
                            language.strip() for language in
                            Prefs['EpisodeLanguagePriority'].split(',')
                        ])
                    if not anidb_numbering and title == 'Complete Movie':
                        title = ""  # For mapping use meanningful titles
                    epNum = ep_obj.xpath('epno')[0]
                    epNumType = epNum.get('type')
                    season = "1" if epNumType == "1" else "0"
                    if epNumType == "3" and ep_obj.xpath(
                            'title')[0].text.startswith('Ending') and int(
                                epNum.text[1:]) - 1 < ending_offset:
                        ending_offset = int(epNum.text[1:]) - 1
                    if epNumType == "3" and int(
                            epNum.text[1:]) > ending_offset:
                        episode = str(
                            int(epNum.text[1:]) + 150 -
                            ending_offset)  #shifted to 150 for 1st ending.
                    elif epNumType == "1":
                        episode = epNum.text
                    else:
                        episode = str(specials[epNum.text[0]][0] +
                                      int(epNum.text[1:]))
                    numbering = "s{}e{:>3}".format(season, episode)

                    #If tvdb numbering used, save anidb episode meta using tvdb numbering
                    if source.startswith("tvdb") or source.startswith(
                            "anidb") and not movie and max(
                                map(int, media.seasons.keys())) > 1:
                        season, episode = AnimeLists.tvdb_ep(
                            mappingList, season, episode, AniDBid)

                        # Get episode number to absolute number
                        if source in ('tvdb3',
                                      'tvdb4') and season not in ['-1', '0']:
                            if source == 'tvdb4' or season == '1':
                                ms, usl = (
                                    season, True) if source == 'tvdb3' else (
                                        Dict(mappingList, 'absolute_map',
                                             'max_season'),
                                        Dict(mappingList, 'absolute_map',
                                             'unknown_series_length'))
                                if ms and usl:
                                    season = Dict(mappingList,
                                                  'absolute_map',
                                                  episode,
                                                  default=(ms if usl else
                                                           str(int(ms) + 1),
                                                           None))[0]
                            else:
                                try:
                                    episode = list(
                                        Dict(mappingList,
                                             'absolute_map',
                                             default={}).keys())[list(
                                                 Dict(mappingList,
                                                      'absolute_map',
                                                      default={}).values()
                                             ).index((season, episode))]
                                except:
                                    pass

                        if not(season =='0' and episode in list_sp_eps) and \
                           not(source in ('tvdb3', 'tvdb4') and episode in list_abs_eps) and \
                           not(season in media.seasons and episode in media.seasons[season].episodes):
                            Log.Info(
                                '[ ] {} => s{:>1}e{:>3} epNumType: {}'.format(
                                    numbering, season, episode, epNumType))
                            continue

                        ### Series poster as season poster
                        if GetXml(xml, 'picture') and not Dict(
                                AniDB_dict, 'seasons', season, 'posters',
                                ANIDB_PIC_BASE_URL + GetXml(xml, 'picture')):
                            rank = 1
                            if 'en' in language_posters:
                                rank = (rank //
                                        30) * 30 * language_posters.index(
                                            'en') + rank % 30
                            if 'AniDB' in priority_posters:
                                rank = rank + 6 * priority_posters.index(
                                    'AniDB')
                            SaveDict(
                                (os.path.join('AniDB', 'poster',
                                              GetXml(xml,
                                                     'picture')), rank, None),
                                AniDB_dict, 'seasons', season, 'posters',
                                ANIDB_PIC_BASE_URL + GetXml(xml, 'picture'))

                    ### In AniDB numbering, Movie episode group, create key and create key in dict with empty list if doesn't exist ###
                    else:  #if source.startswith("anidb") and not movie and max(map(int, media.seasons.keys()))<=1:

                        ### Movie episode group, create key and create key in dict with empty list if doesn't exist ###
                        key = ''
                        if epNumType == '1' and GetXml(
                                xml, '/anime/episodecount') == '1' and GetXml(
                                    xml, '/anime/type') in ('Movie', 'OVA'):
                            key = '1' if title in (
                                'Complete Movie', 'OVA'
                            ) else title[-1] if title.startswith(
                                'Part ') and title[-1].isdigit() else ''  #'-1'
                            if key: SaveDict([], movie_ep_groups, key)

                        #Episode missing from disk
                        if not season in media.seasons or not episode in media.seasons[
                                season].episodes:
                            Log.Info(
                                '[ ] {} => s{:>1}e{:>3} air_date: {}'.format(
                                    numbering, season, episode,
                                    GetXml(ep_obj, 'airdate')))
                            current_air_date = GetXml(ep_obj,
                                                      'airdate').replace(
                                                          '-', '')
                            current_air_date = int(
                                current_air_date
                            ) if current_air_date.isdigit() and int(
                                current_air_date) > 10000000 else 99999999
                            if int(time.strftime(
                                    "%Y%m%d")) > current_air_date + 1:
                                if epNumType == '1' and key:
                                    SaveDict([numbering], movie_ep_groups, key)
                                elif epNumType in ['1', '2']:
                                    SaveDict([episode], missing, season)
                            continue

                    ### Episodes
                    SaveDict(language_rank, AniDB_dict, 'seasons', season,
                             'episodes', episode, 'language_rank')
                    SaveDict(title, AniDB_dict, 'seasons', season, 'episodes',
                             episode, 'title')
                    Log.Info(
                        '[X] {} => s{:>1}e{:>3} air_date: {} language_rank: {}, title: "{}"'
                        .format(numbering, season, episode,
                                GetXml(ep_obj, 'airdate'), language_rank,
                                title))

                    if GetXml(ep_obj, 'length').isdigit():
                        SaveDict(
                            int(GetXml(ep_obj, 'length')) * 1000 * 60,
                            AniDB_dict, 'seasons', season, 'episodes', episode,
                            'duration'
                        )  # AniDB stores it in minutes, Plex save duration in millisecs
                        if season == "1":
                            numEpisodes, totalDuration = numEpisodes + 1, totalDuration + int(
                                GetXml(ep_obj, 'length'))

                    SaveDict(GetXml(ep_obj, 'rating'), AniDB_dict, 'seasons',
                             season, 'episodes', episode, 'rating')
                    SaveDict(GetXml(ep_obj,
                                    'airdate'), AniDB_dict, 'seasons', season,
                             'episodes', episode, 'originally_available_at')
                    ep_summary = SaveDict(
                        summary_sanitizer(GetXml(ep_obj,
                                                 'summary')), AniDB_dict,
                        'seasons', season, 'episodes', episode, 'summary')
                    Log.Info(' - [ ] summary: {}'.format(
                        (ep_summary[:200]
                         ).replace("\n", "\\n").replace("\r", "\\r") +
                        '..' if len(ep_summary) > 200 else ep_summary))
                    for creator in creators:
                        SaveDict(",".join(creators[creator]), AniDB_dict,
                                 'seasons', season, 'episodes', episode,
                                 creator)

                ### End of for ep_obj...
                Log.Info(("--- %s.summary info ---" % AniDBid).ljust(157, '-'))
                if SaveDict((int(totalDuration) / int(numEpisodes)) * 60 *
                            1000 if int(numEpisodes) else 0, AniDB_dict,
                            'duration'):
                    Log.Info(
                        "Duration: {}, numEpisodes: {}, average duration: {}".
                        format(str(totalDuration), str(numEpisodes),
                               AniDB_dict['duration']))

                ### AniDB numbering Missing Episodes ###
                if source.startswith("anidb") and not movie and max(
                        map(int, media.seasons.keys())) <= 1:
                    if movie_ep_groups:
                        Log.Info(
                            "Movie/OVA Ep Groups: %s" % movie_ep_groups
                        )  #movie_ep_groups: {'1': ['s1e1'], '3': ['s1e4', 's1e5', 's1e6'], '2': ['s1e3'], '-1': []}
                        SaveDict([
                            value for key in movie_ep_groups
                            for value in movie_ep_groups[key]
                            if 0 < len(movie_ep_groups[key]) < int(key)
                        ], missing, '1')
                    for season in sorted(missing):
                        missing_eps = sorted(missing[season],
                                             key=common.natural_sort_key)
                        Log.Info('Season: {} Episodes: {} not on disk'.format(
                            season, missing_eps))
                        if missing_eps:
                            error_log[
                                'Missing Specials' if season ==
                                '0' else 'Missing Episodes'].append(
                                    "AniDBid: %s | Title: '%s' | Missing Episodes: %s"
                                    % (common.WEB_LINK %
                                       (common.ANIDB_SERIE_URL + AniDBid,
                                        AniDBid), AniDB_dict['title'],
                                       str(missing_eps)))

            ### End of if not movie ###

            # Generate relations_map for anidb3/4(tvdb1/6) modes
            for relatedAnime in xml.xpath('/anime/relatedanime/anime'):
                if relatedAnime.get('id') not in Dict(mappingList,
                                                      'relations_map',
                                                      AniDBid,
                                                      relatedAnime.get('type'),
                                                      default=[]):
                    SaveDict([relatedAnime.get('id')],
                             mappingList, 'relations_map', AniDBid,
                             relatedAnime.get('type'))

            # External IDs
            ANNid = GetXml(
                xml,
                "/anime/resources/resource[@type='1']/externalentity/identifier"
            )
            MALid = GetXml(
                xml,
                "/anime/resources/resource[@type='2']/externalentity/identifier"
            )
            #ANFOid = GetXml(xml, "/anime/resources/resource[@type='3']/externalentity/identifier"), GetXml(xml, "/anime/resources/resource[@type='3']/externalentity/identifier")

            # Logs
            if not Dict(AniDB_dict, 'summary'):
                error_log['AniDB summaries missing'].append(
                    "AniDBid: %s" %
                    (common.WEB_LINK %
                     (common.ANIDB_SERIE_URL + AniDBid, AniDBid) +
                     " | Title: '%s'" % Dict(AniDB_dict, 'title')))
            if not Dict(AniDB_dict, 'posters'):
                error_log['AniDB posters missing'].append(
                    "AniDBid: %s" %
                    (common.WEB_LINK %
                     (common.ANIDB_SERIE_URL + AniDBid, AniDBid) +
                     " | Title: '%s'" % Dict(AniDB_dict, 'title')))
            #if not Dict(AniDB_dict, 'studio' ):                                                                                          error_log['anime-list studio logos'].append("AniDBid: %s | Title: '%s' | AniDB has studio '%s' and anime-list has '%s' | "    % (common.WEB_LINK % (ANIDB_SERIE_URL % AniDBid, AniDBid), title, metadata.studio, mapping_studio) + common.WEB_LINK % (ANIDB_TVDB_MAPPING_FEEDBACK % ("aid:" + metadata.id + " " + title, String.StripTags( XML.StringFromElement(xml, encoding='utf8'))), "Submit bug report (need GIT account)"))
            #if metadata.studio       and 'studio' in AniDB_dict and AniDB_dict ['studio'] and AniDB_dict ['studio'] != metadata.studio:  error_log['anime-list studio logos'].append("AniDBid: %s | Title: '%s' | AniDB has studio '%s' and anime-list has '%s' | "    % (common.WEB_LINK % (ANIDB_SERIE_URL % AniDBid, AniDBid), title, metadata.studio, mapping_studio) + common.WEB_LINK % (ANIDB_TVDB_MAPPING_FEEDBACK % ("aid:" + metadata.id + " " + title, String.StripTags( XML.StringFromElement(xml, encoding='utf8'))), "Submit bug report (need GIT account)"))
            #if metadata.studio == "" and 'studio' in AniDB_dict and AniDB_dict ['studio'] == "":                                         error_log['anime-list studio logos'].append("AniDBid: %s | Title: '%s' | AniDB and anime-list are both missing the studio" % (common.WEB_LINK % (ANIDB_SERIE_URL % AniDBid, AniDBid), title) )

            Log.Info("ANNid: '%s', MALid: '%s', xml loaded: '%s'" %
                     (ANNid, MALid, str(xml is not None)))

    Log.Info("--- return ---".ljust(157, '-'))
    Log.Info("relations_map: {}".format(
        DictString(Dict(mappingList, 'relations_map', default={}), 1)))
    Log.Info("AniDB_dict: {}".format(DictString(AniDB_dict, 4)))
    return AniDB_dict, ANNid, MALid
Ejemplo n.º 21
0
def GetMetadata(movie, MALid):
    Log.Info("=== MyAnimeList.GetMetadata() ===".ljust(157, '='))
    MAL_HTTP_API_URL = "http://fribbtastic-api.net/fribbtastic-api/services/anime?id="
    MAL_PREFIX = "https://myanimelist.cdn-dena.com"  # Some links in the XML will come from TheTVDB, not adding those....
    MyAnimeList_dict = {}

    Log.Info("MALid: '%s'" % MALid)
    if not MALid or not MALid.isdigit(): return MyAnimeList_dict

    Log.Info("--- series ---".ljust(157, '-'))
    xml = common.LoadFile(filename=MALid + ".xml",
                          relativeDirectory=os.path.join('MyAnimeList', 'xml'),
                          url=MAL_HTTP_API_URL + MALid,
                          cache=CACHE_1DAY * 7)
    if isinstance(xml, str):
        Log.Error('Invalid str returned: "{}"'.format(xml))
    elif xml:
        Log.Info("[ ] title: {}".format(
            SaveDict(GetXml(xml, 'title'), MyAnimeList_dict, 'title')))
        Log.Info("[ ] summary: {}".format(
            SaveDict(GetXml(xml, 'synopsis'), MyAnimeList_dict, 'summary')))
        Log.Info("[ ] score: {}".format(
            SaveDict(GetXml(xml, 'rating'), MyAnimeList_dict, 'score')))
        #Log.Info("[ ] rating: {}"                 .format(SaveDict( GetXml(xml, 'content_rating').split(" ")[0], MyAnimeList_dict, 'rating'   )))
        Log.Info("[ ] originally_available_at: {}".format(
            SaveDict(GetXml(xml, 'firstAired'), MyAnimeList_dict,
                     'originally_available_at')))

        #for item in xml.xpath('//anime/genres/genre' or []):  SaveDict([item.text], MyAnimeList_dict, 'genres')
        if GetXml(xml, '//anime/genres/genre'):
            Log.Info("[ ] genres: {}".format(
                SaveDict(
                    sorted([
                        item.text for item in xml.xpath('//anime/genres/genre')
                    ]), MyAnimeList_dict, 'genres')))
        if GetXml(xml, 'status') == 'Currently Airing':
            Log.Info("[ ] status: {}".format(
                SaveDict("Continuing", MyAnimeList_dict, 'status')))
        if GetXml(xml, 'status') == 'Finished Airing':
            Log.Info("[ ] status: {}".format(
                SaveDict("Ended", MyAnimeList_dict, 'status')))

        Log.Info("--- episodes ---".ljust(157, '-'))
        for item in xml.xpath('//anime/episodes/episode') or []:
            ep_number, ep_title, ep_air = GetXml(
                item,
                'episodeNumber'), GetXml(xml,
                                         'engTitle'), GetXml(xml, 'aired')
            Log.Info('[ ] s1e{:>3} air_date: {}, title: "{}"'.format(
                ep_number, ep_title, ep_air))
            SaveDict(ep_title, MyAnimeList_dict, 'seasons', "1", 'episodes',
                     ep_number, 'title')
            SaveDict(ep_air, MyAnimeList_dict, 'seasons', "1", 'episodes',
                     ep_number, 'originally_available_at')

        Log.Info("--- images ---".ljust(157, '-'))
        for item in xml.xpath('//anime/covers/cover'):
            Log.Info("[ ] poster: {}".format(
                SaveDict(
                    ("MyAnimeList/" + "/".join(item.text.split('/')[3:]), 50,
                     None) if item.text.startswith(MAL_PREFIX) else "",
                    MyAnimeList_dict, 'posters', item.text)))
        for item in xml.xpath('//anime/backgrounds/background'):
            Log.Info("[ ] art: {}".format(
                SaveDict(
                    ("MyAnimeList/" + "/".join(item.text.split('/')[3:]), 50,
                     None) if item.text.startswith(MAL_PREFIX) else "",
                    MyAnimeList_dict, 'art', item.text)))
        for item in xml.xpath('//anime/banners/banner'):
            Log.Info("[ ] banner: {}".format(
                SaveDict(
                    ("MyAnimeList/" + "/".join(item.text.split('/')[3:]), 50,
                     None) if item.text.startswith(MAL_PREFIX) else "",
                    MyAnimeList_dict, 'banners', item.text)))

    Log.Info("--- return ---".ljust(157, '-'))
    Log.Info("MyAnimeList_dict: {}".format(DictString(MyAnimeList_dict, 4)))
    return MyAnimeList_dict
Ejemplo n.º 22
0
def GetMetadata(media, movie, error_log, source, AniDBid, TVDBid, AniDBMovieSets, mappingList):
  ''' Download metadata to dict_AniDB, ANNid, MALid
  '''
  Log.Info("=== AniDB.GetMetadata() ===".ljust(157, '='))
  ANIDB_HTTP_API_URL       = 'http://api.anidb.net:9001/httpapi?request=anime&client=hama&clientver=1&protover=1&aid='
  ANIDB_PIC_BASE_URL       = 'http://img7.anidb.net/pics/anime/'                                                                # AniDB picture directory
  ANIDB_PIC_THUMB_URL      = 'http://img7.anidb.net/pics/anime/thumbs/150/{}.jpg-thumb.jpg' 
  AniDB_dict, ANNid, MALid = {}, "", ""
  original                 = AniDBid
  language_posters         = [language.strip() for language in Prefs['PosterLanguagePriority'].split(',')]
  priority_posters         = [provider.strip() for provider in Prefs['posters'               ].split(',')]
  
  ### Build the list of anidbids for files present ####
  if source.startswith("tvdb") or source.startswith("anidb") and not movie and max(map(int, media.seasons.keys()))>1:  #multi anidbid required only for tvdb numbering
    full_array  = [ anidbid for season in Dict(mappingList, 'TVDB') or [] for anidbid in Dict(mappingList, 'TVDB', season) if season and 'e' not in season and anidbid.isdigit() ]
    AniDB_array = { AniDBid: [] } if Dict(mappingList, 'defaulttvdbseason')=='1' or Dict(mappingList, 'TVDB', 'sa') else {}
    for season in sorted(media.seasons, key=common.natural_sort_key) if not movie else []:  # For each season, media, then use metadata['season'][season]...
      for episode in sorted(media.seasons[season].episodes, key=common.natural_sort_key):
        new_season, new_episode, anidbid = AnimeLists.anidb_ep(mappingList, season, episode)
        numbering                        = 's{}e{}'.format(season, episode)
        if anidbid and not (new_season=='0' and new_episode=='0'):  SaveDict([numbering], AniDB_array, anidbid)
      else:  continue
  elif source.startswith('anidb') and AniDBid != "":  full_array, AniDB_array = [AniDBid], {AniDBid:[]}
  else:                                               full_array, AniDB_array = [], {}
  Log.Info("AniDBid: {}, AniDBids list: {}, source: {}".format(AniDBid, full_array, source))
  for anidbid in AniDB_array:
    Log.Info('[+] {:>5}: {}'.format(anidbid, AniDB_array[anidbid]))
  Log.Info('language_posters: {}'.format(language_posters))
  
  ### Load anidb xmls in tvdb numbering format if needed ###
  for AniDBid in full_array:
    Log.Info(("--- %s ---" % AniDBid).ljust(157, '-'))
    Log.Info('AniDBid: {}, url: {}'.format(AniDBid, ANIDB_HTTP_API_URL+AniDBid))
    Log.Info(("--- %s.series ---" % AniDBid).ljust(157, '-'))
    xml = common.LoadFile(filename=AniDBid+".xml", relativeDirectory=os.path.join("AniDB", "xml"), url=ANIDB_HTTP_API_URL+AniDBid)  # AniDB title database loaded once every 2 weeks

    if not xml or isinstance(xml, str):
      if not xml:               SaveDict(True, AniDB_dict, 'Banned')
      if isinstance(xml, str):  Log.Error('Invalid str returned: "{}"'.format(xml))

      title, original_title, language_rank = GetAniDBTitle(AniDBTitlesDB.xpath('/animetitles/anime[@aid="{}"]/title'.format(AniDBid)))
      if AniDBid==original or len(full_array)==1:
        Log.Info("[ ] title: {}"         .format(SaveDict(title,          AniDB_dict, 'title'         )))
        Log.Info("[ ] original_title: {}".format(SaveDict(original_title, AniDB_dict, 'original_title')))
        Log.Info("[ ] language_rank: {}" .format(SaveDict(language_rank,  AniDB_dict, 'language_rank' )))

    elif xml:
      title, original_title, language_rank = GetAniDBTitle(xml.xpath('/anime/titles/title'))
      if AniDBid==original or len(full_array)==1: #Dict(mappingList, 'poster_id_array', TVDBid, AniDBid)[0]in ('1', 'a'):  ### for each main anime AniDBid ###
        Log.Info("[ ] title: {}"         .format(SaveDict(title,          AniDB_dict, 'title'         )))
        Log.Info("[ ] original_title: {}".format(SaveDict(original_title, AniDB_dict, 'original_title')))
        Log.Info("[ ] language_rank: {}" .format(SaveDict(language_rank,  AniDB_dict, 'language_rank' )))
        if SaveDict( GetXml(xml, 'startdate'  ), AniDB_dict, 'originally_available_at'):  Log.Info("[ ] originally_available_at: '{}'".format(AniDB_dict['originally_available_at']))
        if SaveDict(summary_sanitizer(GetXml(xml, 'description')), AniDB_dict, 'summary') and not movie and Dict(mappingList, 'defaulttvdbseason').isdigit() and mappingList['defaulttvdbseason'] in media.seasons:
          SaveDict(AniDB_dict['summary'], AniDB_dict, 'seasons', mappingList['defaulttvdbseason'], 'summary') 
            
        Log.Info("[ ] rating: '{}'".format(SaveDict( GetXml(xml, 'ratings/temporary'), AniDB_dict, 'rating')))
        
        ### Posters
        if GetXml(xml, 'picture'):
          rank = 1
          if 'en'     in language_posters:  rank = (rank//30)*30*language_posters.index('en')+rank%30
          if 'AniDB'  in priority_posters:  rank = rank+ 6*priority_posters.index('AniDB')
          AniDB_dict['posters'] = {ANIDB_PIC_BASE_URL + GetXml(xml, 'picture'): ( os.path.join('AniDB', 'poster', GetXml(xml, 'picture')), rank, ANIDB_PIC_THUMB_URL.format(GetXml(xml, 'picture').split('.')[0]))}
        
        ### genre ###
        RESTRICTED_GENRE     = {"18 restricted": 'X', "pornography": 'X', "tv censoring": 'TV-MA', "borderline p**n": 'TV-MA'}
        for tag in xml.xpath('tags/tag'):
          if GetXml(tag, 'name') and tag.get('weight', '').isdigit() and int(tag.get('weight', '') or '200') >= int(Prefs['MinimumWeight'] or '200'):
            SaveDict( [string.capwords(GetXml(tag, 'name'), '-')], AniDB_dict, 'genres')
            if GetXml(tag, 'name').lower() in RESTRICTED_GENRE:  AniDB_dict['content_rating'] = RESTRICTED_GENRE[ GetXml(tag, 'name').lower() ]
        if Dict(AniDB_dict, 'genres'): AniDB_dict['genres'].sort()
        SaveDict( "Continuing" if GetXml(xml, 'Anime/enddate')=="1970-01-01" else "Ended", AniDB_dict, 'status')
        Log.Info("[ ] genres ({}/{} above {} weight): {}".format(len(Dict(AniDB_dict, 'genres')), len(xml.xpath('tags/tag')), int(Prefs['MinimumWeight'] or 200), Dict(AniDB_dict, 'genres')))
        for element in AniDBMovieSets.xpath("/anime-set-list/set/anime"):
          if element.get('anidbid') == AniDBid or element.get('anidbid') in full_array:
            node              = element.getparent()
            title, main, language_rank = GetAniDBTitle(node.xpath('titles')[0])
            if title not in Dict(AniDB_dict, 'collections', default=[]):
              Log.Info("[ ] title: {}, main: {}, language_rank: {}".format(title, main, language_rank))
              SaveDict([title], AniDB_dict, 'collections')
              Log.Info("[ ] collection: AniDBid '%s' is part of movie collection: '%s', related_anime_list: %s" % (AniDBid, title, str(full_array)))
        if not Dict(AniDB_dict, 'collections'):  Log.Info("[ ] collection: AniDBid '%s' is not part of any collection, related_anime_list: %s" % (AniDBid, str(full_array))) 
      
        #roles  ### NEW, NOT IN Plex FrameWork Documentation 2.1.1 ###
        Log.Info(("--- %s.actors ---" % AniDBid).ljust(157, '-'))
        for role in xml.xpath('characters/character[(@type="secondary cast in") or (@type="main character in")]'):
          try:
            if GetXml(role, 'seiyuu') and GetXml(role, 'name'):  
              role_dict = {'role': role.find('name').text, 'name': role.find('seiyuu').text, 'photo': ANIDB_PIC_BASE_URL + role.find('seiyuu').get('picture')}
              SaveDict([role_dict], AniDB_dict, 'roles')
              Log.Info('[ ] role: {:<20}, name: {:<20}, photo: {}'.format(role_dict['role'], role_dict['name'], role_dict['photo']))
          except Exception as e:  Log.Info("Seyiuu error: {}".format(e))
        
      if  movie:
        Log.Info("[ ] year: '{}'".format(SaveDict(GetXml(xml, 'startdate')[0:4], AniDB_dict, 'year')))
        Log.Info(("--- %s.summary info ---" % AniDBid).ljust(157, '-'))
          
      ### Series ###
      else:
        
        ### not listed for serie but is for eps
        roles    = { "Animation Work":"studio", "Direction":"directors", "Series Composition":"producers", "Original Work":"writers", "Script":"writers", "Screenplay":"writers" }
        ep_roles = {}
        for creator in xml.xpath('creators/name'):
          for role in roles: 
            if not role in creator.get('type'):  continue
            if roles[role]=="studio":  SaveDict(creator.text, AniDB_dict, 'studio')
            else:                      SaveDict([creator.text], ep_roles, roles[role])
        Log.Info("[ ] roles (creators tag): " +str(ep_roles))
        if SaveDict(GetXml(xml, 'type')=='Movie', AniDB_dict, 'movie'):  Log.Info("'movie': '{}'".format(AniDB_dict['movie']))
      
        ### Translate into season/episode mapping
        numEpisodes, totalDuration, mapped_eps, ending_table, op_nb = 0, 0, [], {}, 0 
        specials = {'S': [0, 'Special'], 'C': [100, 'Opening/Ending'], 'T': [200, 'Trailer'], 'P': [300, 'Parody'], 'O': [400, 'Other']}
        movie_ep_groups = {}
        missing={'0': [], '1':[]}
                
        ### Episodes (and specials) not always in right order ###
        Log.Info(("--- %s.episodes ---" % AniDBid).ljust(157, '-'))
        ending_offset = 99
        for ep_obj in sorted(xml.xpath('episodes/episode'), key=lambda x: [int(x.xpath('epno')[0].get('type')), int(x.xpath('epno')[0].text if x.xpath('epno')[0].text.isdigit() else x.xpath('epno')[0].text[1:])]):
          
          ### Title, Season, Episode number, Specials
          title, main, language_rank = GetAniDBTitle (ep_obj.xpath('title'), [language.strip() for language in Prefs['EpisodeLanguagePriority'].split(',')])
          epNum     = ep_obj.xpath('epno')[0]
          epNumType = epNum.get('type')
          season    = "1" if epNumType == "1" else "0"
          if   epNumType=="3" and ep_obj.xpath('title')[0].text.startswith('Ending') and int(epNum.text[1:])-1<ending_offset:  ending_offset = int(epNum.text[1:])-1
          if   epNumType=="3" and int(epNum.text[1:])>ending_offset:  episode = str(int(epNum.text[1:])+150-ending_offset)  #shifted to 150 for 1st ending.  
          elif epNumType=="1":                                        episode = epNum.text
          else:                                                       episode = str( specials[ epNum.text[0] ][0] + int(epNum.text[1:]))
          numbering = "s{}e{:>3}".format(season, episode)
          
          #If tvdb numbering used, save anidb episode meta using tvdb numbering
          if source.startswith("tvdb") or source.startswith("anidb") and not movie and max(map(int, media.seasons.keys()))>1:
            season, episode = AnimeLists.tvdb_ep(mappingList, season, episode, AniDBid) ###Broken for tvdbseason='a'

            # Get season from absolute number OR convert episode number to absolute number
            if source in ('tvdb3', 'tvdb4') and season not in ('-1', '0'):
              if season=='a' or source=='tvdb4':  season = Dict(mappingList, 'absolute_map', episode, default=(season, episode))[0]
              elif episode!='0':
                try:  episode = list(Dict(mappingList, 'absolute_map', default={}).keys())[list(Dict(mappingList, 'absolute_map', default={}).values()).index((season, episode))]
                except Exception as e:  Log.Error("Exception: {}".format(e))

            if season=='0' and episode=='0' or not season in media.seasons or not episode in media.seasons[season].episodes:   Log.Info('[ ] {} => s{:>1}e{:>3} epNumType: {}'.format(numbering, season, episode, epNumType));  continue
            
            ### Series poster as season poster
            if GetXml(xml, 'picture') and not Dict(AniDB_dict, 'seasons', season, 'posters', ANIDB_PIC_BASE_URL + GetXml(xml, 'picture')):
              rank = 1
              if 'en'     in language_posters:  rank = (rank//30)*30*language_posters.index('en')+rank%30
              if 'AniDB'  in priority_posters:  rank = rank+ 6*priority_posters.index('AniDB')
              SaveDict((os.path.join('AniDB', 'poster', GetXml(xml, 'picture')), rank, ANIDB_PIC_THUMB_URL.format(GetXml(xml, 'picture').split('.')[0])), AniDB_dict, 'seasons', season, 'posters', ANIDB_PIC_BASE_URL + GetXml(xml, 'picture'))

          ### In AniDB numbering, Movie episode group, create key and create key in dict with empty list if doesn't exist ###
          else:  #if source.startswith("anidb") and not movie and max(map(int, media.seasons.keys()))<=1:
                     
            ### Movie episode group, create key and create key in dict with empty list if doesn't exist ###
            key = ''
            if epNumType=='1' and GetXml(xml, '/anime/episodecount')=='1' and GetXml(xml, '/anime/type') in ('Movie', 'OVA'):
              key = '1' if title in ('Complete Movie', 'OVA') else title[-1] if title.startswith('Part ') and title[-1].isdigit() else '' #'-1'
              if key:  SaveDict([], movie_ep_groups, key)
            
            #Episode missing from disk
            if not season in media.seasons or not episode in media.seasons[season].episodes:
              Log.Info('[ ] {} => s{:>1}e{:>3} air_date: {}'.format(numbering, season, episode, GetXml(ep_obj, 'airdate')))
              current_air_date = GetXml(ep_obj, 'airdate').replace('-','')
              current_air_date = int(current_air_date) if current_air_date.isdigit() and int(current_air_date) > 10000000 else 99999999
              if int(time.strftime("%Y%m%d")) > current_air_date+1:
                if   epNumType == '1' and key:  SaveDict([numbering], movie_ep_groups, key   )
                elif epNumType in ['1', '2']:   SaveDict([episode],   missing,         season)
              continue
                    
          ### Episodes
          SaveDict(language_rank, AniDB_dict, 'seasons', season, 'episodes', episode, 'language_rank')
          SaveDict(title,         AniDB_dict, 'seasons', season, 'episodes', episode, 'title'        )
          Log.Info('[X] {} => s{:>1}e{:>3} air_date: {} language_rank: {}, title: "{}"'.format(numbering, season, episode, GetXml(ep_obj, 'airdate'), language_rank, title))
          
          if GetXml(ep_obj, 'length').isdigit():
            SaveDict(int(GetXml(ep_obj, 'length'))*1000*60, AniDB_dict, 'seasons', season, 'episodes', episode, 'duration')  # AniDB stores it in minutes, Plex save duration in millisecs
            if season == "1":  numEpisodes, totalDuration = numEpisodes+1, totalDuration + int(GetXml(ep_obj, 'length'))
          
          SaveDict(GetXml(ep_obj, 'rating' ), AniDB_dict, 'seasons', season, 'episodes', episode, 'rating'                 )
          SaveDict(GetXml(ep_obj, 'airdate'), AniDB_dict, 'seasons', season, 'episodes', episode, 'originally_available_at')
          if SaveDict(summary_sanitizer(GetXml(ep_obj, 'summary')), AniDB_dict, 'seasons', season, 'episodes', episode, 'summary'):  Log.Info(" - [ ] summary: {}".format(Dict(AniDB_dict, 'seasons', season, 'episodes', episode, 'summary')))
          #for role in ep_roles: SaveDict(",".join(ep_roles[role]), AniDB_dict, 'seasons', season, 'episodes', episode, role)
            #Log.Info("role: '%s', value: %s " % (role, str(ep_roles[role])))
                  
        ### End of for ep_obj...
        Log.Info(("--- %s.summary info ---" % AniDBid).ljust(157, '-'))
        if SaveDict(int(totalDuration)/int(numEpisodes) if int(numEpisodes) else 0, AniDB_dict, 'duration'):
          Log.Info("Duration: {}, numEpisodes: {}, average duration: {}".format(str(totalDuration), str(numEpisodes), AniDB_dict['duration']))

        ### AniDB numbering Missing Episodes ###
        if source.startswith("anidb") and not movie and max(map(int, media.seasons.keys()))<=1:
          if movie_ep_groups:
            Log.Info("Movie/OVA Ep Groups: %s" % movie_ep_groups)  #movie_ep_groups: {'1': ['s1e1'], '3': ['s1e4', 's1e5', 's1e6'], '2': ['s1e3'], '-1': []}
            SaveDict([value for key in movie_ep_groups for value in movie_ep_groups[key] if 0 < len(movie_ep_groups[key]) < int(key)], missing, '1')
          for season in sorted(missing):
            missing_eps = sorted(missing[season], key=common.natural_sort_key)
            Log.Info('Season: {} Episodes: {} not on disk'.format(season, missing_eps))
            if missing_eps:  error_log['Missing Specials' if season=='0' else 'Missing Episodes'].append("AniDBid: %s | Title: '%s' | Missing Episodes: %s" % (common.WEB_LINK % (common.ANIDB_SERIE_URL + AniDBid, AniDBid), AniDB_dict['title'], str(missing_eps)))
          
      ### End of if not movie ###
    
      # Generate relations_map for anidb3/4(tvdb1/6) modes
      for relatedAnime in xml.xpath('/anime/relatedanime/anime'):
        if relatedAnime.get('id') not in Dict(mappingList, 'relations_map', AniDBid, relatedAnime.get('type'), default=[]): SaveDict([relatedAnime.get('id')], mappingList, 'relations_map', AniDBid, relatedAnime.get('type'))

      # External IDs
      ANNid = GetXml(xml, "/anime/resources/resource[@type='1']/externalentity/identifier")
      MALid = GetXml(xml, "/anime/resources/resource[@type='2']/externalentity/identifier")
      #ANFOid = GetXml(xml, "/anime/resources/resource[@type='3']/externalentity/identifier"), GetXml(xml, "/anime/resources/resource[@type='3']/externalentity/identifier")
    
      # Logs
      if not Dict(AniDB_dict, 'summary'):  error_log['AniDB summaries missing'].append("AniDBid: %s" % (common.WEB_LINK % (common.ANIDB_SERIE_URL + AniDBid, AniDBid) + " | Title: '%s'" % Dict(AniDB_dict, 'title')))
      if not Dict(AniDB_dict, 'posters'):  error_log['AniDB posters missing'  ].append("AniDBid: %s" % (common.WEB_LINK % (common.ANIDB_SERIE_URL + AniDBid, AniDBid) + " | Title: '%s'" % Dict(AniDB_dict, 'title')))
      #if not Dict(AniDB_dict, 'studio' ):                                                                                          error_log['anime-list studio logos'].append("AniDBid: %s | Title: '%s' | AniDB has studio '%s' and anime-list has '%s' | "    % (common.WEB_LINK % (ANIDB_SERIE_URL % AniDBid, AniDBid), title, metadata.studio, mapping_studio) + common.WEB_LINK % (ANIDB_TVDB_MAPPING_FEEDBACK % ("aid:" + metadata.id + " " + title, String.StripTags( XML.StringFromElement(xml, encoding='utf8'))), "Submit bug report (need GIT account)"))
      #if metadata.studio       and 'studio' in AniDB_dict and AniDB_dict ['studio'] and AniDB_dict ['studio'] != metadata.studio:  error_log['anime-list studio logos'].append("AniDBid: %s | Title: '%s' | AniDB has studio '%s' and anime-list has '%s' | "    % (common.WEB_LINK % (ANIDB_SERIE_URL % AniDBid, AniDBid), title, metadata.studio, mapping_studio) + common.WEB_LINK % (ANIDB_TVDB_MAPPING_FEEDBACK % ("aid:" + metadata.id + " " + title, String.StripTags( XML.StringFromElement(xml, encoding='utf8'))), "Submit bug report (need GIT account)"))
      #if metadata.studio == "" and 'studio' in AniDB_dict and AniDB_dict ['studio'] == "":                                         error_log['anime-list studio logos'].append("AniDBid: %s | Title: '%s' | AniDB and anime-list are both missing the studio" % (common.WEB_LINK % (ANIDB_SERIE_URL % AniDBid, AniDBid), title) )
    
      Log.Info("ANNid: '%s', MALid: '%s', xml loaded: '%s'" % (ANNid, MALid, str(xml is not None)))
  
  Log.Info("--- return ---".ljust(157, '-'))
  Log.Info("relations_map: {}".format(DictString(Dict(mappingList, 'relations_map', default={}), 1)))
  Log.Info("AniDB_dict: {}".format(DictString(AniDB_dict, 4)))
  return AniDB_dict, ANNid, MALid
Ejemplo n.º 23
0
def GetMetadata(media, movie, error_log, id):
    Log.Info("=== AnimeLists.GetMetadata() ===".ljust(157, '='))
    MAPPING_FEEDBACK = 'http://github.com/ScudLee/anime-lists/issues/new?title=%s&body=%s'  # ScudLee mapping file git feedback url
    mappingList, AnimeLists_dict = {}, {}  #mappingList['poster_id_array'] = {}
    found = False
    source, id = id.split('-', 1) if '-' in id else ("", id)
    AniDB_id = id if source.startswith('anidb') else ""
    TVDB_id = id if source.startswith('tvdb') else ""
    TMDB_id = id if source.startswith('tmdb') else ""
    AniDBid = ""
    TVDBid = ""
    TMDBid = ""
    IMDBid = ""
    tvdb_numbering = True if not movie and (
        TVDB_id
        or AniDB_id and max(map(int, media.seasons.keys())) > 1) else False
    tvdbcounts = {}

    ### Search for match ###
    Log.Info("tvdb_numbering: {}".format(tvdb_numbering))
    AniDB_id2, TVDB_id2 = "", ""

    Log.Info("--- AniDBTVDBMap ---".ljust(157, '-'))
    forcedID = {
        'anidbid': AniDB_id,
        'tvdbid': TVDB_id,
        'tmdbid': TMDB_id,
        "imdbid": ""
    }
    for anime in AniDBTVDBMap.iter('anime') if AniDBTVDBMap else []:
        # gather any manually specified source ids
        foundID, wantedID = {}, {}
        for check in forcedID.keys():
            foundID[check] = anime.get(check, "")
            wantedID[check] = True if foundID[check] == forcedID[
                check] and forcedID[check] != '' else False

        # if this row matches our specified source-id
        if True in wantedID.values():
            # save the found values for later use in other GetMetadata that dont depend on AniDB etc.
            IMDBid, TMDBid, TVDBid, AniDBid = foundID['imdbid'], foundID[
                'tmdbid'], foundID['tvdbid'], foundID['anidbid']
            # use the old check to decide whether to proceed
            if TVDBid == '' and AniDBid == '': continue
        # nothing found, skip
        else:
            continue

        # record the number of entries using the same tvdb id
        SaveDict(Dict(tvdbcounts, TVDBid, default=0) + 1, tvdbcounts, TVDBid)

        found = True

        if not tvdb_numbering and not TVDB_id: TVDB_id = TVDBid
        if tvdb_numbering and AniDBid and TVDBid.isdigit(
        ) and anime.get('defaulttvdbseason') in [
                'a', '1'
        ] and anime.get('episodeoffset') in ['', '0'] and len(
                anime.xpath(
                    "mapping-list/mapping[@anidbseason='1'][@tvdbseason='0']")
        ) == 0 and not AniDB_id:
            AniDB_id2 = AniDBid
        Log.Info(
            "[+] AniDBid: {:>5}, TVDBid: {:>6}, defaulttvdbseason: {:>2}, offset: {:>3}, name: {}"
            .format(AniDBid, TVDBid, anime.get('defaulttvdbseason'),
                    anime.get('episodeoffset') or '0', GetXml(anime, 'name')))

        ### Anidb numbered serie ###
        if AniDB_id:  # or defaulttvdbseason=='1':
            SaveDict(anime.get('tmdbid', ""), mappingList, 'tmdbid')
            SaveDict(anime.get('imdbid', ""), mappingList, 'imdbid')
            SaveDict(anime.get('defaulttvdbseason'), mappingList,
                     'defaulttvdbseason')
            SaveDict(
                anime.get('episodeoffset') or '0', mappingList,
                'episodeoffset')
            SaveDict(GetXml(anime, 'name'), mappingList, 'name')
            SaveDict(GetXml(anime, 'studio'), AnimeLists_dict, 'studio')
            SaveDict(GetXml(anime, "supplemental-info/director"),
                     AnimeLists_dict, 'director')
            SaveDict(GetXml(anime, "supplemental-info/credits"),
                     AnimeLists_dict, 'writer')
            for genre in anime.xpath('supplemental-info/genre'):
                SaveDict([genre.text], AnimeLists_dict, 'genres')
            for art in anime.xpath('supplemental-info/fanart/thumb'):
                SaveDict(
                    {
                        art.text: ('/'.join(
                            art.text.split('/')[3:]), 1, art.get('preview'))
                    }, AnimeLists_dict, 'art')

        ### TheTVDB numbered series ###
        if TVDB_id or not movie and max(map(int, media.seasons.keys(
        ))) > 1 and AniDB_id == '':  #In case AniDB guid but multiple seasons
            if TVDBid.isdigit():
                if anime.get('defaulttvdbseason'):
                    if anime.get('defaulttvdbseason') in [
                            'a', '1'
                    ] and anime.get('episodeoffset') in ['', '0'] and len(
                            anime.xpath(
                                "mapping-list/mapping[@anidbseason='1'][@tvdbseason='0']"
                            )) == 0:
                        SaveDict(anime.get('defaulttvdbseason'), mappingList,
                                 'defaulttvdbseason')
                        AniDB_id2 = AniDBid
                    SaveDict(
                        anime.get('episodeoffset') or '0', mappingList, 'TVDB',
                        's-1' if anime.get('defaulttvdbseason') == '0' and len(
                            anime.xpath(
                                "mapping-list/mapping[@anidbseason='1'][@tvdbseason='0']"
                            )) >= 1 else 's' + anime.get('defaulttvdbseason'),
                        AniDBid
                    )  #mappingList['TVDB'][s1][anidbid]=episodeoffset
                    SaveDict(
                        {
                            'min': anime.get('defaulttvdbseason'),
                            'max': anime.get('defaulttvdbseason')
                        }, mappingList, 'season_map', AniDBid
                    )  # Set the min/max season to the 'defaulttvdbseason'
                    if source == "tvdb6" and anime.get(
                            'episodeoffset').isdigit() and int(
                                anime.get('episodeoffset')) > 0:
                        SaveDict(
                            {
                                'min': '0',
                                'max': '0'
                            }, mappingList, 'season_map', AniDBid
                        )  # Force series as special if not starting the TVDB season
                for season in anime.iter(
                        'mapping'
                ):  ### mapping list: <mapping-list> <mapping anidbseason="0" tvdbseason="0">;1-12;2-14;3-16;4-18;</mapping> </mapping-list>
                    anidbseason, tvdbseason, offset, start, end = season.get(
                        'anidbseason'), season.get('tvdbseason'), season.get(
                            'offset') or '0', season.get('start'), season.get(
                                'end')
                    Log.Info(
                        "    - season: [{:>2}],           [{:>2}], range:      [{:>3}-{:>3}], offset: {:>3}, text: {}"
                        .format(anidbseason, tvdbseason, start or '000', end
                                or '000', offset, (season.text
                                                   or '').strip(';')))
                    for ep in range(int(start), int(end) + 1) if start else []:
                        #Log.Info("[?] start: {}, end: {}, ep: {}".format(start, end, ep))
                        if not Dict(
                                mappingList, 'TVDB', 's' + tvdbseason + 'e' +
                                str(ep + int(offset))):
                            SaveDict(
                                (anidbseason, str(ep), AniDBid), mappingList,
                                'TVDB',
                                's' + tvdbseason + 'e' + str(ep + int(offset))
                            )  #mappingList['TVDB'][s1e1]=(AniDB_season, AniDB_episode, AniDBid) for start-end mappings
                        #else: Log.Info("already present")
                    for ep in filter(
                            None,
                            season.text.split(';')) if season.text else []:
                        if not '-' in ep:
                            Log.Info(
                                '[!] MAPPING ERROR, season.text: "{}", ep mapping missing hyphen: "{}"'
                                .format(season.text, ep))
                        elif not Dict(
                                mappingList, 'TVDB',
                                's' + tvdbseason + 'e' + ep.split('-')[1]):
                            SaveDict(
                                (anidbseason, ep.split('-')[0], AniDBid),
                                mappingList, 'TVDB',
                                's' + tvdbseason + 'e' + ep.split('-')[1]
                            )  #mappingList['TVDB'][s1e1]=(AniDB_season, AniDB_episode, AniDBid) for manual mapping like '1-12'
                        #elif '-' not in (mappingList, 'TVDB', 's'+tvdbseason+'e'+ep.split('-')[1]):
                        #  SaveDict((anidbseason, Dict(mappingList, 'TVDB', 's'+tvdbseason+'e'+ep.split('-')[1])[1]+'-'+ep.split('-')[0], AniDBid), mappingList, 'TVDB', 's'+tvdbseason+'e'+ep.split('-')[1])
                        #  Log.Info("already present so converting to range but range not supported")
                    if Dict(mappingList, 'season_map', AniDBid,
                            'max').isdigit() and int(
                                Dict(mappingList, 'season_map', AniDBid,
                                     'max')) < int(season.get("tvdbseason")):
                        SaveDict(
                            season.get("tvdbseason"), mappingList,
                            'season_map', AniDBid, 'max'
                        )  # Update the max season to the largest 'tvdbseason' season seen in 'mapping-list'

            elif TVDBid == "hentai":
                SaveDict("X", AnimeLists_dict, 'content_rating')
            elif TVDBid in ("", "unknown", None):
                link = MAPPING_FEEDBACK % ("aid:%s &#39;%s&#39; TVDBid:" %
                                           (AniDB_id, "title"),
                                           String.StripTags(
                                               XML.StringFromElement(
                                                   anime, encoding='utf8')))
                error_log['anime-list TVDBid missing'].append(
                    'AniDBid: "{}" | Title: "{}" | Has no matching TVDBid "{}" in mapping file | <a href="{}" target="_blank">Submit bug report</a>'
                    .format(AniDB_id, "title", TVDBid, link))
                Log.Info(
                    '"anime-list TVDBid missing.htm" log added as tvdb serie id missing in mapping file: "{}"'
                    .format(TVDBid))

        #AniDB guid need 1 AniDB xml only, not an TheTVDB numbered serie with anidb guid (not anidb2 since seen as TheTVDB)
        if AniDB_id and (movie or max(map(int, media.seasons.keys())) <= 1):
            break

    else:

        # case [tmdb-123]:
        # <anime anidbid="456" tvdbid="" defaulttvdbseason="" episodeoffset="" tmdbid="123" imdbid="">
        # fails the above tvdbid + anidb check, but useful info was still obtained (anidbid=456)
        # <anime tmdbid="123">
        # fails the above tvdbid + anidbid check, so this used to return a blank tmdbid to be later used in
        # TheMovieDB.GetMetadata(), and '' as AniDBid to be used in AniDB.GetMetadata()
        # so, not resetting the AniDBid/TVDBid, and saving found info
        if ((TMDB_id or TMDBid) or IMDBid):
            SaveDict(TMDB_id or TMDBid or '', mappingList, 'tmdbid')
            SaveDict(IMDBid or '', mappingList, 'imdbid')
            Log.Info(
                "Saved possible tmdb/imdb values for later '%s'/'%s' for later, since not in AnimeList."
                % (Dict(mappingList, 'tmdbid'), Dict(mappingList, 'imdbid')))
        elif not found:
            Log.Info("ERROR: Could not find %s: %s" % (source, id))
            # this error only makes sense if it's AniDB_id, right? otherwise AniDB_id is always == ""
            # since it cant be not found and also have been set
            if AniDB_id != "":
                error_log['anime-list AniDBid missing'].append(
                    "AniDBid: " + common.WEB_LINK %
                    (common.ANIDB_SERIE_URL + AniDB_id, AniDB_id))
            # keeping this reset since im not clear on it's purpose.
            AniDBid, TVDBid = '', ''

    Log.Info('             -----          ------')
    Log.Info('             {:>5}          {:>6}'.format(
        AniDB_id or AniDB_id2 or AniDBid, TVDB_id or TVDBid))
    SaveDict(Dict(tvdbcounts, TVDB_id or TVDBid), mappingList, 'tvdbcount')

    ### Update collection
    TVDB_collection, title = [], ''
    for anime in AniDBTVDBMap.iter(
            'anime') if AniDBTVDBMap and TVDB_id.isdigit() else []:
        if anime.get('tvdbid', "") == TVDB_id:
            TVDB_collection.append(anime.get("anidbid", ""))
            if anime.get('defaulttvdbseason') in [
                    'a', '1'
            ] and anime.get('episodeoffset') in ['', '0'] and len(
                    anime.xpath(
                        "mapping-list/mapping[@anidbseason='1'][@tvdbseason='0']"
                    )) == 0:
                title = AniDB.GetAniDBTitle(
                    AniDB.AniDBTitlesDB.xpath(
                        '/animetitles/anime[@aid="{}"]/title'.format(
                            anime.get("anidbid", ""))))[
                                0]  #returns [title, main, language_rank]
    if len(TVDB_collection) > 1 and title:
        SaveDict([title + ' Collection'], AnimeLists_dict, 'collections')
        Log.Info("[ ] collection: TVDBid '%s' is part of collection: '%s'" %
                 (TVDB_id, title))
    else:
        Log.Info("[ ] collection: TVDBid '%s' is not part of any collection" %
                 (TVDB_id))

    Log.Info("--- return ---".ljust(157, '-'))
    Log.Info(
        "AniDB_id: '{}', AniDB_id2: '{}', AniDBid: '{}', TVDB_id: '{}', TVDBid: '{}'"
        .format(AniDB_id, AniDB_id2, AniDBid, TVDB_id, TVDBid))
    Log.Info("mappingList: {}".format(DictString(mappingList, 1)))
    Log.Info("AnimeLists_dict: {}".format(DictString(AnimeLists_dict, 1)))
    return AnimeLists_dict, AniDB_id or AniDB_id2 or AniDBid, (
        TVDB_id or TVDBid) if (TVDB_id or TVDBid).isdigit() else "", Dict(
            mappingList, 'tmdbid'), Dict(mappingList, 'imdbid'), mappingList
Ejemplo n.º 24
0
def tvdb_ep(mappingList, season, episode, anidbid=''):
    '''
  <anime anidbid="23" tvdbid="76885" defaulttvdbseason="1" episodeoffset="" tmdbid="" imdbid="">
    defaulttvdbseason = Dict(mappingList, 'defaulttvdbseason')
    episodeoffset     = Dict(mappingList, 'episodeoffset', default="0")
                        Dict(mappingList, 's'+season+'e'+episode.split('-')[0]
    <name>Cowboy Bebop</name>
    <mapping-list>
      <mapping anidbseason="0" tvdbseason="0">;1-5;2-6;</mapping>
      <mapping anidbseason="1" tvdbseason="5" start="13" end="24" offset="-12"/>
      <mapping anidbseason="1" tvdbseason="6" start="25" end="36" offset="-24"/>
      <mapping anidbseason="1" tvdbseason="7" start="37" end="48" offset="-36"/>
    </mapping-list> 
    <before>;1-25;</before>
    <supplemental-info replace="true"> 
      <studio>Sunrise</studio>
      <genre>Comedy</genre>
      <genre>Music</genre>
      <actor /> /not used
      <director>Tetsuya Nomura</director>
      <credits>Kazuhito Yamamoto</credits> =writer
      <fanart>
        <thumb dim="1280x720" colors="" preview="http://www.thetvdb.com/banners/_cache/fanart/original/191221-1.jpg">http://www.thetvdb.com/banners/fanart/original/191221-1.jpg</thumb>
      </fanart>
    </supplemental-info>
</anime>
  '''
    mapping = ('0', '0')  #(season or '0', episode)
    debug = False
    if debug:
        Log.Info('[?] (#1) season: {}, episode: {}, anidbid: {}'.format(
            season, episode, anidbid))

    defaulttvdbseason = Dict(mappingList, 'defaulttvdbseason')
    episodeoffset = Dict(mappingList, 'episodeoffset')
    for item in Dict(mappingList, 'TVDB') or {}:
        # mappingList: {'TVDB': {'s3': {'13485': '0'}, 's2': {'12233': '0'}, 's1': {'11739': '0'}, 's0': {'12344': '0'}}, 'defaulttvdbseason': '1'}
        if Dict(mappingList, 'TVDB', item, anidbid):
            episodeoffset = Dict(mappingList, 'TVDB', item, anidbid)
            defaulttvdbseason = item[1:]
            break
    else:
        Log.Info('[!] anidbid {} not found in mappingList: {}'.format(
            anidbid, mappingList))

    # <mapping anidbseason="x" tvdbseason="x" start="13" end="24" offset="-12"> ;1-5;2-6; </mapping>
    value = (season, episode, anidbid)
    tvdbList = Dict(mappingList, 'TVDB', default={})
    if value in tvdbList.values():
        mapping = list(tvdbList.keys())[list(
            tvdbList.values()).index(value)][1:].split('e')
        if debug:
            Log.Info('[?] (#2) value "{}" in mappingList "{}"'.format(
                value, mappingList))

    # if not mapped with mapping, specials are not mapped with tvdb
    elif season == '0':
        mapping = ('0', '0')
        if debug:
            Log.Info(
                '[?] (#3) value "{}" not in mappingList "{}" and season 0'.
                format(value, mappingList))

    # <anime anidbid="xxxxx" tvdbid="xxxxx" defaulttvdbseason="x" episodeoffset="x">
    elif season == '1':
        if debug:
            Log.Info(
                '[?] (#4) value "{}" not in mappingList "{}" and season 1, defaulttvdbseason: {}, episodeoffset: {}'
                .format(value, mappingList, defaulttvdbseason, episodeoffset))
        mapping = (defaulttvdbseason, str(int(episode) + int(episodeoffset)))
    else:
        Log.Info('[!] error {}'.format(value))

    return mapping
Ejemplo n.º 25
0
def GetMetadata(media, movie, source, TVDBid, mappingList, num=0):
    """ [tvdb4.mapping.xml] Attempt to get the ASS's episode mapping data
      [tvdb4.posters.xml] Attempt to get the ASS's image data
  """
    Log.Info('=== tvdb4.GetMetadata() ==='.ljust(157, '='))
    TVDB4_dict, TVDB4_mapping, TVDB4_xml = {}, None, None

    if movie or not source == "tvdb4":
        Log.Info("not tvdb4 mode")
        return TVDB4_dict
    Log.Info("tvdb4 mode")

    def find_tvdb4_file(file_to_find):
        try:
            folder = common.GetMediaDir(media, movie)
            while folder and folder[-1] not in ["/", "\\"]:
                filename = os.path.join(folder, file_to_find)
                if os.path.exists(filename):
                    file = Core.storage.load(os.path.realpath(filename))
                    try:
                        return XML.ElementFromString(file)
                    except:
                        return file
                folder = os.path.dirname(folder)
            else:
                Log.Info("No '{}' file detected locally".format(file_to_find))
        except Exception as e:
            Log.Error(
                "Issues in finding setup info as directories have most likely changed post scan into Plex, Exception: '%s'"
                % e)
        return ""

    Log.Info("--- tvdb4.mapping.xml ---".ljust(157, '-'))
    TVDB4_mapping = find_tvdb4_file("tvdb4.mapping")

    if TVDB4_mapping: Log.Debug("'tvdb4.mapping' file detected locally")
    else:
        TVDB4_mapping = TVDB4_mapping or common.LoadFile(
            filename=os.path.basename(TVDB4_MAPPING_URL),
            url=TVDB4_MAPPING_URL,
            cache=CACHE_1DAY *
            6)  # AniDB title database loaded once every 2 weeks
    entry = ""
    if isinstance(TVDB4_mapping, str): entry = TVDB4_mapping
    else:
        entry = common.GetXml(TVDB4_mapping,
                              "/tvdb4entries/anime[@tvdbid='%s']" % TVDBid)
        if not entry:
            Log.Error("TVDBid '%s' is not found in mapping file" % TVDBid)
    if entry:
        for line in filter(None, entry.strip().splitlines()):
            season = line.strip().split("|")
            for absolute_episode in range(int(season[1]), int(season[2]) + 1):
                SaveDict((str(int(season[0])), str(absolute_episode)),
                         mappingList, 'absolute_map', str(absolute_episode))
            SaveDict(True if "(unknown length)" in season[3] else False,
                     mappingList, 'absolute_map', 'unknown_series_length')
            SaveDict(str(int(season[0])), mappingList, 'absolute_map',
                     'max_season')

    Log.Info("--- tvdb4.posters.xml ---".ljust(157, '-'))
    TVDB4_xml = find_tvdb4_file(os.path.basename(TVDB4_POSTERS_URL))

    if TVDB4_xml: Log.Debug("'tvdb4.posters.xml' file detected locally")
    else:
        TVDB4_xml = TVDB4_xml or common.LoadFile(
            filename=os.path.basename(TVDB4_POSTERS_URL),
            url=TVDB4_POSTERS_URL,
            cache=CACHE_1DAY *
            6)  # AniDB title database loaded once every 2 weeks
    if TVDB4_xml:
        seasonposternum = 0
        entry = common.GetXml(TVDB4_xml,
                              "/tvdb4entries/posters[@tvdbid='%s']" % TVDBid)
        if not entry:
            Log.Error("TVDBid '%s' is not found in posters file" % TVDBid)
        for line in filter(None, entry.strip().splitlines()):
            season, url = line.strip().split("|", 1)
            season = season.lstrip("0") if season.lstrip("0") else "0"
            seasonposternum += 1
            SaveDict(("TheTVDB/seasons/%s-%s-%s" %
                      (TVDBid, season, os.path.basename(url)), 1, None),
                     TVDB4_dict, 'seasons', season, 'posters', url)

    Log.Info("--- return ---".ljust(157, '-'))
    Log.Info("absolute_map: {}".format(
        DictString(Dict(mappingList, 'absolute_map', default={}), 0)))
    Log.Info("TVDB4_dict: {}".format(DictString(TVDB4_dict, 4)))
    return TVDB4_dict
Ejemplo n.º 26
0
def GetMetadata(movie, IMDbid):  # return 200 but not downloaded correctly - IMDB has a single poster, downloading through OMDB xml, prefered by mapping file
  Log.Info("=== OMDb.GetMetadata() ===".ljust(157, '='))
  url       = OMDB_HTTP_API_URL.format(api_key=Prefs['OMDbApiKey']) #'
  OMDb_dict = {}

  if Prefs['OMDbApiKey'] in ('None', '', 'N/A'):  Log.Info("No api key found - Prefs['OMDbApiKey']: '{}'".format(Prefs['OMDbApiKey']));  return OMDb_dict
  
  Log.Info("IMDbid: '%s'" % IMDbid)
  for imdbid_single in IMDbid.split(",") if IMDbid.startswith("tt") else []:
    Log.Info(("--- %s.series ---" % imdbid_single).ljust(157, '-'))
    json = common.LoadFile(filename=imdbid_single+".json", relativeDirectory=os.path.join('OMDb', 'json'), url=url + imdbid_single)
    if json:
      Log.Info("[ ] title: {}"                  .format(SaveDict( Dict(json,'title')     , OMDb_dict, 'title'                  )))
      Log.Info("[ ] summary: {}"                .format(SaveDict( Dict(json,'Plot')      , OMDb_dict, 'summary'                )))
      Log.Info("[ ] originally_available_at: {}".format(SaveDict( Dict(json,'Released')  , OMDb_dict, 'originally_available_at')))
      Log.Info("[ ] countries: {}"              .format(SaveDict( Dict(json,'Country')   , OMDb_dict, 'countries'              )))
      Log.Info("[ ] directors: {}"              .format(SaveDict( Dict(json,'Director')  , OMDb_dict, 'directors'              )))
      Log.Info("[ ] genres: {}"                 .format(SaveDict( sorted([x.strip() for x in Dict(json,'Genre').split(',')]), OMDb_dict, 'genres')))
      Log.Info("[ ] writers: {}"                .format(SaveDict( Dict(json,'Writer')    , OMDb_dict, 'writers'                )))
      SaveDict( Dict(json,'imdbRating'), OMDb_dict, 'rating')
      if Dict(json,'Metascore').isdigit() and not Dict(OMDb_dict,'rating'):
        SaveDict( float(json['Metascore'])/10, OMDb_dict, 'rating')
      Log.Info("[ ] rating: {}".format(Dict(OMDb_dict,'rating')))
      if SaveDict( Dict(json,'Rated'), OMDb_dict, 'content_rating') in common.Movie_to_Serie_US_rating and not movie and Dict(json,'Type')=="movie":
        Log.Info("[ ] content_rating: {}".format(SaveDict(common.Movie_to_Serie_US_rating[json['Rated']], OMDb_dict, 'content_rating')))
      if Dict(json,'Poster'):  Log.Info("[ ] poster: {}".format(json['Poster'])); SaveDict((os.path.join('OMDb', 'poster', imdbid_single+'.jpg'), common.poster_rank('OMDb', 'posters'), None), OMDb_dict, 'posters', json['Poster'])
      try:     Log.Info("[ ] duration: {}".format(SaveDict( int(Dict(json,'Runtime').replace(' min','')) * 60 * 1000, OMDb_dict, 'duration')))  # Plex save duration in millisecs
      except:  pass

  Log.Info("--- return ---".ljust(157, '-'))
  Log.Info("OMDb_dict: {}".format(DictString(OMDb_dict, 4)))
  return OMDb_dict
Ejemplo n.º 27
0
def GetMetadata(media, movie):
  Log.Info("=== Local.GetMetadata() ===".ljust(157, '='))
  Local_dict          = {}
  dir                 = GetMediaDir(media, movie)
  library, root, path = GetLibraryRootPath(dir)

  if movie: return Local_dict

  Log.Info("dir:     {}".format(dir))
  Log.Info("library: {}".format(library))
  Log.Info("root:    {}".format(root))
  Log.Info("path:    {}".format(path))

  if not path in ('_unknown_folder', '.'):
  
    series_root_folder = os.path.join(root, path.split(os.sep, 1)[0])
    Log.Info("series_root_folder:  {}".format(series_root_folder))
    Log.Info("Grouping folder:     {}".format(os.path.basename(series_root_folder)))
    if not os.path.exists(series_root_folder):
      Log.Info('files are currently inaccessible')
      return Local_dict
    subfolder_count    = len([file for file in os.listdir(series_root_folder) if os.path.isdir(os.path.join(series_root_folder, file))])
    Log.Info("subfolder_count:     {}".format(subfolder_count))
    
    ### Extract season and transparent folder to reduce complexity and use folder as serie name ###
    reverse_path, season_folder_first = list(reversed(path.split(os.sep))), False
    for folder in reverse_path[:-1]:                 # remove root folder from test, [:-1] Doesn't thow errors but gives an empty list if items don't exist, might not be what you want in other cases
      for rx in SEASON_RX:                           # in anime, more specials folders than season folders, so doing it first
        if re.search(rx, folder, re.IGNORECASE):     # get season number but Skip last entry in seasons (skipped folders)
          reverse_path.remove(folder)                # Since iterating slice [:] or [:-1] doesn't hinder iteration. All ways to remove: reverse_path.pop(-1), reverse_path.remove(thing|array[0])
          if rx!=SEASON_RX[-1] and len(reverse_path)>=2 and folder==reverse_path[-2]:  season_folder_first = True
          break
   
    Log.Info("reverse_path:        {}".format(reverse_path))
    Log.Info("season_folder_first: {}".format(season_folder_first))
    if len(reverse_path)>1 and not season_folder_first and subfolder_count>1:  ### grouping folders only ###
      Log.Info("[ ] collection (Grouping folder): {}".format(SaveDict([reverse_path[-1]], Local_dict, 'collections')))
    else:  Log.Info("Grouping folder not found")
       
  Log.Info("--- return ---".ljust(157, '-'))
  Log.Info("Local_dict: {}".format(DictString(Local_dict, 1)))
  return Local_dict
Ejemplo n.º 28
0
def AdjustMapping(source, mappingList, dict_AniDB, dict_TheTVDB):
  """ EX:
  season_map: {'max_season': 2, '12560': {'max': 1, 'min': 1}, '13950': {'max': 0, 'min': 0}}
  relations_map: {'12560': {'Sequel': ['13950']}, '13950': {'Prequel': ['12560']}}
  TVDB Before: {'s1': {'12560': '0'}, 's0': {'13950': '0'}, '13950': (0, '')}
    's0e5': ('1', '4', '9453')
    's1': {'12560': '0'}
    '13950': (0, '')
  """
  Log.Info("=== anidb34.AdjustMapping() ===".ljust(157, '=')) 
  is_modified   = False
  adjustments   = {}
  tvdb6_seasons = {1: 1}
  is_banned     = Dict(dict_AniDB,  'Banned',        default=False)
  TVDB          = Dict(mappingList, 'TVDB',          default={})
  season_map    = Dict(mappingList, 'season_map',    default={})
  relations_map = Dict(mappingList, 'relations_map', default={})
  
  if not (Dict(mappingList, 'possible_anidb3') or source=='tvdb6'):  Log.Info("Neither a possible 'anidb3/tvdb' enrty nor 'anidb4/tvdb6' entry");  return is_modified
  Log.Info("adjusting mapping for 'anidb3/tvdb' & 'anidb4/tvdb6' usage") 

  #Log.Info("dict_TheTVDB: {}".format(dict_TheTVDB))
  Log.Info("season_map: {}".format(DictString(season_map, 0)))
  Log.Info("relations_map: {}".format(DictString(relations_map, 1)))

  try:
    Log.Info("--- tvdb mapping adjustments ---".ljust(157, '-'))
    Log.Info("TVDB Before: {}".format(DictString(TVDB, 0)))
    for id in sorted(season_map, key=common.natural_sort_key):
      new_season, new_episode = '', ''
      if id == 'max_season':  continue
      #### Note: Below must match scanner (variable names are different but logic matches) ####
      Log.Info("Checking AniDBid: %s" % id)
      def get_prequel_info(prequel_id):
        Log.Info("-- get_prequel_info(prequel_id): %s, season min: %s, season max: %s" % (prequel_id, season_map[prequel_id]['min'], season_map[prequel_id]['max']))
        if source=="tvdb":
          if season_map[prequel_id]['min'] == 0 and 'Prequel' in relations_map[prequel_id] and relations_map[prequel_id]['Prequel'][0] in season_map:
            a, b = get_prequel_info(relations_map[prequel_id]['Prequel'][0])             # Recurively go down the tree following prequels
            if not str(a).isdigit():  return ('', '')
            return (a, b+100) if a < season_map['max_season'] else (a+1, 0)  # If the prequel is < max season, add 100 to the episode number offset: Else, add it into the next new season at episode 0
          if season_map[prequel_id]['min'] == 0:                          return ('', '')                              # Root prequel is a special so leave mapping alone as special
          elif season_map[prequel_id]['max'] < season_map['max_season']:  return (season_map[prequel_id]['max'], 100)  # Root prequel season is < max season so add to the end of the Prequel season
          else:                                                           return (season_map['max_season']+1, 0)       # Root prequel season is >= max season so add to the season after max
        if source=="tvdb6":
          if season_map[prequel_id]['min'] != 1 and 'Prequel' in relations_map[prequel_id] and relations_map[prequel_id]['Prequel'][0] in season_map:
            a, b = get_prequel_info(relations_map[prequel_id]['Prequel'][0])             # Recurively go down the tree following prequels
            #Log.Info("%s+%s+%s-%s" % (a,1,season_map[prequel_id]['max'],season_map[prequel_id]['min']))
            return (a+1+season_map[prequel_id]['max']-season_map[prequel_id]['min'], 0) if str(a).isdigit() else ('', '') # Add 1 to the season number and start at episode 0
          return (2, 0) if season_map[prequel_id]['min'] == 1 else ('', '')              # Root prequel is season 1 so start counting up. Else was a sequel of specials only so leave mapping alone
      if source=="tvdb":
        if season_map[id]['min'] == 0 and 'Prequel' in relations_map[id] and relations_map[id]['Prequel'][0] in season_map:
          new_season, new_episode = get_prequel_info(relations_map[id]['Prequel'][0])    # Recurively go down the tree following prequels to a TVDB season non-0 AniDB prequel 
      if source=="tvdb6":
        if 'Prequel' in relations_map[id] and relations_map[id]['Prequel'][0] in season_map:
          new_season, new_episode = get_prequel_info(relations_map[id]['Prequel'][0])    # Recurively go down the tree following prequels to the TVDB season 1 AniDB prequel 

      if str(new_season).isdigit():  # A new season & eppisode offset has been assigned # As anidb4/tvdb6 does full season adjustments, we need to remove and existing season mapping
        is_modified = True
        removed = {}
        for key in TVDB.keys():
          if isinstance(TVDB[key], dict)  and id in TVDB[key]:
            Log.Info("-- Deleted: %s: {'%s': '%s'}" % (key, id, TVDB[key][id]))
            removed[key] = {id: TVDB[key][id]}
            del TVDB[key][id]  # Delete season entries for its old anidb non-s0 season entries | 's4': {'11350': '0'}
          if isinstance(TVDB[key], tuple) and TVDB[key][0] == '1' and TVDB[key][2] == id:
            Log.Info("-- Deleted: {}: {}".format(key, TVDB[key]))
            removed[key] = TVDB[key]
            del TVDB[key]      # Delete episode entries for its old anidb s1 entries           | 's0e5': ('1', '4', '9453')
        SaveDict(str(new_episode), TVDB, 's'+str(new_season), id)
        Log.Info("-- Added  : {}: {}".format('s'+str(new_season), {id: str(new_episode)}))
        
        adjustments['s'+str(new_season)+'e'+str(new_episode)] = {'deleted': removed, 'added': [str(new_season), str(new_episode)]}
        tvdb6_seasons[new_season] = season_map[id]['min']  # tvdb6_seasons[New season] = [Old season]

    Log.Info("TVDB After : {}".format(DictString(Dict(mappingList, 'TVDB'), 0)))
    
    # Push back the 'dict_TheTVDB' season munbers if tvdb6 for the new inserted season
    if source=="tvdb6":
      Log.Info("--- tvdb meta season adjustments ---".ljust(157, '-'))
      top_season, season, adjustment, new_seasons = max(map(int, dict_TheTVDB['seasons'].keys())), 1, 0, {}
      Log.Info("dict_TheTVDB Seasons Before : {}".format(sorted(dict_TheTVDB['seasons'].keys(), key=int)))
      Log.Info("tvdb6_seasons : {}".format(tvdb6_seasons))
      if "0" in dict_TheTVDB['seasons']:  new_seasons["0"] = dict_TheTVDB['seasons'].pop("0")
      while season <= top_season:
        if Dict(tvdb6_seasons, season + adjustment) == 0:
          Log.Info("-- New TVDB season  '{}'".format(season + adjustment))
          adjustment += 1
        else:
          Log.Info("-- Adjusting season '{}' -> '{}'".format(season, season + adjustment))
          if str(season) in dict_TheTVDB['seasons']:  new_seasons[str(season + adjustment)] = dict_TheTVDB['seasons'].pop(str(season))
          season += 1
      SaveDict(new_seasons, dict_TheTVDB, 'seasons')
      Log.Info("dict_TheTVDB Seasons After  : {}".format(sorted(dict_TheTVDB['seasons'].keys(), key=int)))

    # Copy in the 'dict_TheTVDB' deleted episode meta into its new added location
    Log.Info("--- tvdb meta episode adjustments ---".ljust(157, '-'))
    Log.Info("adjustments: {}".format(DictString(adjustments, 2)))
    for entry in sorted(adjustments, key=common.natural_sort_key):
      # EX: {'s6e0': {'added': ['6', '0'], 'deleted': {'s0e16': ('1', '1', '12909'), 's-1': {'12909': '0'}}}}
      added_season, added_offset = adjustments[entry]['added']  # 'added': ['6', '0']
      Log.Info("added_season: '{}', added_offset: '{}'".format(added_season, added_offset))
      for deleted in sorted(adjustments[entry]['deleted'], key=common.natural_sort_key):
        Log.Info("-- deleted: '{}': {}".format(deleted, adjustments[entry]['deleted'][deleted]))
        if isinstance(adjustments[entry]['deleted'][deleted], dict):
          deleted_season = deleted[1:]                                         # {-->'s0'<--: {'6463': '0'}}
          deleted_offset = adjustments[entry]['deleted'][deleted].values()[0]  # {'s0': {'6463': -->'0'<--}}
          if deleted=='s-1':
            Log.Info("---- {:<9}: Dead season".format("'%s'" % deleted))
            continue  # EX: {'s-1': {'12909': '0'}}
          if deleted!='s0' and added_offset=='0' and deleted_offset=='0':
            Log.Info("---- {:<9}: Whole season (s1+) was adjusted in previous section".format("'%s'" % deleted))
            continue  # EX: {'s3e0': 'added': ['3', '0'], 'deleted': {'s2': {'7680': '0'}}} == Adjusting season '2' -> '3'
          # EX: {'s2e0': 'added': ['2', '0' ], 'deleted': {'s0': {'6463': '0'}}}
          # EX: {'s1e100': 'added': ['1', '100'], 'deleted': {'s0': {'982': '1'}}}
          interation = 1
          Log.Info("---- deleted_season: '{}', deleted_offset: '{}'".format(deleted_season, deleted_offset))
          while Dict(dict_TheTVDB, 'seasons', deleted_season, 'episodes', str(int(deleted_offset) + interation)):
            a, b, x = deleted_season, str(int(deleted_offset) + interation), str(int(added_offset) + interation)
            SaveDict(Dict(dict_TheTVDB, 'seasons', a, 'episodes', b), dict_TheTVDB, 'seasons', added_season, 'episodes', x)
            Log.Info("---- {:<9}: dict_TheTVDB['seasons']['{}']['episodes']['{}'] => dict_TheTVDB['seasons']['{}']['episodes']['{}']".format("'%s'" % deleted, a, b, added_season, x))
            interation += 1
        if isinstance(adjustments[entry]['deleted'][deleted], tuple):
          a, b = list(filter(None, re.split(r"[se]", deleted)))                        # 's0e16' --> ['0', '16']
          x = str(int(adjustments[entry]['deleted'][deleted][1]) + int(added_offset))  # ('1', -->'1'<--, '12909')
          Log.Info("---- {:<9}: dict_TheTVDB['seasons']['{}']['episodes']['{}'] => dict_TheTVDB['seasons']['{}']['episodes']['{}']".format("'%s'" % deleted, a, b, added_season, x))
          SaveDict(Dict(dict_TheTVDB, 'seasons', a, 'episodes', b), dict_TheTVDB, 'seasons', added_season, 'episodes', x)

  except Exception as e:
    if is_banned:  Log.Info("Expected exception hit as you were banned from AniDB so you have incomplete data to proceed")
    else:          Log.Error("Unexpected exception hit")
    Log.Info('Exception: "{}"'.format(e))
    Log.Info("If a key error, look at the 'season_map'/'relations_map' info to see why it is missing")
    if source=="tvdb":   Log.Info("Source is 'tvdb' so metadata will be loaded but it will not be complete for any 'anidb3' end of season additions")
    if source=="tvdb6":  Log.Info("Source is 'tvdb6' so removing AniDB & TVDB metadata from memory to prevent incorrect data from being loaded"); dict_AniDB.clear(); dict_TheTVDB.clear()
    is_modified = False

  Log.Info("--- return ---".ljust(157, '-'))
  Log.Info("is_modified: {}".format(is_modified))
  return is_modified
Ejemplo n.º 29
0
def GetMetadata(media, movie, error_log, lang, metadata_source, AniDBid,
                TVDBid, IMDbid, mappingList):
    ''' TVDB - Load serie JSON
  '''
    Log.Info("=== TheTVDB.GetMetadata() ===".ljust(157, '='))
    TheTVDB_dict = {}
    max_season = 0
    anidb_numbering = metadata_source == "anidb" and (
        movie or max(map(int, media.seasons.keys())) <= 1)
    anidb_prefered = anidb_numbering and Dict(mappingList,
                                              'defaulttvdbseason') != '1'
    language_series = [
        language.strip()
        for language in Prefs['SerieLanguagePriority'].split(',')
        if language.strip() not in ('x-jat', 'zh-Hans', 'zh-Hant', 'zh-x-yue',
                                    'zh-x-cmn', 'zh-x-nan', 'main')
    ]
    language_episodes = [
        language.strip()
        for language in Prefs['EpisodeLanguagePriority'].split(',')
        if language.strip() not in ('x-jat', 'zh-Hans', 'zh-Hant', 'zh-x-yue',
                                    'zh-x-cmn', 'zh-x-nan', 'main')
    ]
    Log.Info(
        "TVDBid: '{}', IMDbid: '{}', language_series : {}, language_episodes: {}"
        .format(TVDBid, IMDbid, language_series, language_episodes))

    if not TVDBid.isdigit():
        Log.Info('TVDBid non-digit')
        return TheTVDB_dict, IMDbid

    ### TVDB Series JSON ###
    Log.Info("--- series ---".ljust(157, '-'))
    json = {}
    if lang not in language_series:
        language_series.insert(
            0, lang)  #for summary in lang (library setting) language
    if 'en' not in language_series:
        language_series.insert(0, 'en')  #for failover title
    if lang not in language_episodes:
        language_episodes.append(
            lang)  #for summary in lang (library setting) language
    if 'en' not in language_episodes:
        language_episodes.append('en')  #for failover title
    for language in language_series:
        json[language] = Dict(
            LoadFileTVDB(id=TVDBid,
                         filename='series_{}.json'.format(language),
                         url=TVDB_SERIES_URL.format(id=TVDBid) + '?' +
                         language,
                         headers={'Accept-Language': language}), 'data')
        if Dict(json[language],
                'seriesName'):  # and not Dict(TheTVDB_dict, 'language_rank'):
            SaveDict(
                language_series.index(language) if not anidb_prefered else
                len(language_series), TheTVDB_dict, 'language_rank')
            Log.Info("[ ] language_rank: {}".format(
                Dict(TheTVDB_dict, 'language_rank')))
            Log.Info("[ ] title: {}".format(
                SaveDict(
                    Dict(json[language], 'seriesName')
                    or Dict(serie2_json, 'seriesName'), TheTVDB_dict,
                    'title')))
            Log.Info("[ ] original_title: {}".format(
                SaveDict(
                    Dict(json[language], 'seriesName')
                    or Dict(serie2_json, 'seriesName'), TheTVDB_dict,
                    'original_title')))
        if Dict(json, lang) and (Dict(json, lang, 'overview')
                                 or Dict(TheTVDB_dict, 'language_rank')):
            break  #only need json in lang for summary, in 'en' for most things
    if not anidb_prefered:
        SaveDict(
            Dict(json, lang, 'overview').strip(" \n\r")
            or Dict(json, 'en', 'overview').strip(" \n\r"), TheTVDB_dict,
            'summary')
    if Dict(json, lang):
        #format:   { "id","seriesId", "airsDayOfWeek", "imdbId", "zap2itId", "added", "addedBy", "lastUpdated", "seriesName", "aliases", "banner", "status",
        #             "firstAired", "network", "networkId", "runtime", "genre, "overview", "airsTime", "rating" , "siteRating", "siteRatingCount" }
        Log.Info("[ ] IMDbid: {}".format(
            SaveDict(Dict(json[lang], 'imdbId' or IMDbid), TheTVDB_dict,
                     'IMDbid')))
        Log.Info("[ ] zap2itId: {}".format(
            SaveDict(Dict(json[lang], 'zap2it_id'), TheTVDB_dict, 'zap2itId')))
        Log.Info("[ ] content_rating: {}".format(
            SaveDict(Dict(json[lang], 'rating'), TheTVDB_dict,
                     'content_rating')))
        Log.Info("[ ] originally_available_at: {}".format(
            SaveDict(Dict(json[lang], 'firstAired'), TheTVDB_dict,
                     'originally_available_at')))
        Log.Info("[ ] studio: {}".format(
            SaveDict(Dict(json[lang], 'network'), TheTVDB_dict, 'studio')))
        Log.Info("[ ] rating: {}".format(
            SaveDict(Dict(json[lang], 'siteRating'), TheTVDB_dict, 'rating')))
        Log.Info("[ ] status: {}".format(
            SaveDict(Dict(json[lang], 'status'), TheTVDB_dict, 'status')))
        Log.Info("[ ] genres: {}".format(
            SaveDict(sorted(Dict(json[lang], 'genre')), TheTVDB_dict,
                     'genres')))
        if Dict(json[lang], 'runtime').isdigit():
            Log.Info('[ ] duration: {}'.format(
                SaveDict(
                    int(Dict(json[lang], 'runtime')) * 60 * 1000, TheTVDB_dict,
                    'duration')))  #in ms in plex

        series_images = {  # Pull the primary images used for the series entry
            'poster': Dict(json[language], 'poster'),
            'banner': Dict(json[language], 'banner'),
            'fanart': Dict(json[language], 'fanart'),
            'seasonwide': Dict(json[language], 'seasonwide'),
            'series': Dict(json[language], 'series')
        }

        ### TVDB Series Actors JSON ###
        Log.Info("--- actors ---".ljust(157, '-'))
        actor_json = Dict(LoadFileTVDB(id=TVDBid,
                                       filename='actors_{}.json'.format(lang),
                                       url=TVDB_ACTORS_URL.format(id=TVDBid),
                                       headers={'Accept-Language': lang}),
                          'data',
                          default=[])
        if actor_json:  #JSON format: 'data': [{"seriesId", "name", "image", "lastUpdated", "imageAuthor", "role", "sortOrder", "id", "imageAdded", },...]
            for role in actor_json:
                try:
                    role_dict = {
                        'role':
                        Dict(role, 'role'),
                        'name':
                        Dict(role, 'name'),
                        'photo':
                        TVDB_IMG_ROOT +
                        role['image'] if Dict(role, 'image') else ''
                    }
                    SaveDict([role_dict], TheTVDB_dict, 'roles')
                    Log.Info(
                        "[ ] role: {:<50}, name: {:<20}, photo: {}".format(
                            role_dict['role'], role_dict['name'],
                            role_dict['photo']))
                except Exception as e:
                    Log.Info(" role: {}, error: '{}'".format(
                        str(role), str(e)))
            #common.DisplayDict(actor_json, ['role', 'name', 'image'])

        ### Load pages of episodes ###
        Log.Info("--- episodes ---".ljust(157, '-'))
        episodes_json, sorted_episodes_json, next_page = [], {}, 1
        while next_page not in (None, '', 'null'):
            episodes_json_page = LoadFileTVDB(
                id=TVDBid,
                filename='episodes_page{}_{}.json'.format(next_page, lang),
                url=TVDB_EPISODE_PAGE_URL.format(id=TVDBid, page=next_page),
                headers={'Accept-Language': lang})
            next_page = Dict(episodes_json_page, 'links', 'next')
            episodes_json.extend(Dict(episodes_json_page, 'data'))
        for episode_json in episodes_json:
            sorted_episodes_json['s{:02d}e{:03d}'.format(
                Dict(episode_json, 'airedSeason'),
                Dict(episode_json, 'airedEpisodeNumber'))] = episode_json

        ### Build list_abs_eps for tvdb 3/4/5 ###
        list_abs_eps, list_sp_eps = {}, []
        if metadata_source in ('tvdb3', 'tvdb4'):
            for s in media.seasons:
                for e in media.seasons[s].episodes:
                    if s == '0': list_sp_eps.append(e)
                    else: list_abs_eps[e] = s
            Log.Info('Present abs eps: {}'.format(list_abs_eps))

        ### episode loop ###
        tvdb_special_missing, summary_missing_special, summary_missing, summary_present, episode_missing, episode_missing_season, episode_missing_season_all, abs_number, ep_count = [], [], [], [], [], [], True, 0, 0
        for key in sorted(sorted_episodes_json):

            # Episode and Absolute number calculation engine, episode translation
            episode_json = sorted_episodes_json[key]
            episode = str(Dict(episode_json, 'airedEpisodeNumber'))
            season = str(Dict(episode_json, 'airedSeason'))
            numbering = "s{}e{}".format(season, episode)

            # Replace all the individual episodes reported as missing with a single season 'sX' entry
            if episode == "1":
                if not episode_missing_season_all:
                    episode_missing.extend(episode_missing_season)
                elif episode_missing_season:
                    first_entry, last_entry = episode_missing_season[
                        0], episode_missing_season[-1]
                    fm = re.match(
                        r'((?P<abs>\d+) \()?s(?P<s>\d+)e(?P<e>\d+)\)?',
                        first_entry).groupdict()
                    lm = re.match(
                        r'((?P<abs>\d+) \()?s(?P<s>\d+)e(?P<e>\d+)\)?',
                        last_entry).groupdict()
                    episode_missing.append(
                        "s{}e{}-{}".format(fm['s'], fm['e'], lm['e'])
                        if fm['abs'] is None else "{}-{} (s{}e{}-{})".format(
                            fm['abs'], lm['abs'], fm['s'], fm['e'], lm['e']))
                episode_missing_season, episode_missing_season_all = [], True

            # Get the max season number from TVDB API
            if int(season) > max_season: max_season = int(season)

            ### ep translation [Thetvdb absolute numbering followed, including in specials to stay accurate with scudlee's mapping]
            anidbid = ""
            abs_number = Dict(episode_json,
                              'absoluteNumber',
                              default=0 if season == '0' else abs_number + 1)
            if anidb_numbering:
                if Dict(mappingList, 'defaulttvdbseason_a'):
                    season, episode = '1', str(abs_number)
                else:
                    season, episode, anidbid = AnimeLists.anidb_ep(
                        mappingList, season, episode)
            elif metadata_source in ('tvdb3', 'tvdb4'):
                for s in media.seasons:  #if abs id exists on disk, leave specials with no translation
                    if str(abs_number) in list_abs_eps and str(
                            abs_number
                    ) in media.seasons[s].episodes and s != "0":
                        season, episode = s, str(abs_number)
                        break
            elif metadata_source == 'tvdb5':
                if abs_number: season, episode = '1', str(abs_number)

            # Record absolute number mapping for AniDB metadata pull
            if metadata_source == 'tvdb3':
                SaveDict((str(Dict(episode_json, 'airedSeason')),
                          str(Dict(episode_json, 'airedEpisodeNumber'))),
                         mappingList, 'absolute_map', str(abs_number))

            ### Missing summaries logs ###
            if Dict(episode_json, 'overview'):
                summary_present.append(numbering)
            elif season != '0':
                summary_missing.append(numbering)
            else:
                summary_missing_special.append(numbering)

            ### Check for Missing Episodes ###
            is_missing = False
            if not(str(Dict(episode_json, 'airedSeason'))=='0' and str(Dict(episode_json, 'airedEpisodeNumber')) in list_sp_eps) and \
               not(metadata_source in ('tvdb3', 'tvdb4') and str(abs_number) in list_abs_eps) and \
               not(not movie and season in media.seasons and episode in media.seasons[season].episodes):
                is_missing = True
                Log.Info(
                    '[ ] {:>7} s{:0>2}e{:0>3} anidbid: {:>7} air_date: {}'.
                    format(numbering, season, episode, anidbid,
                           Dict(episode_json, 'firstAired')))
                air_date = Dict(episode_json, 'firstAired')
                air_date = int(air_date.replace(
                    '-', '')) if air_date.replace('-', '').isdigit() and int(
                        air_date.replace('-', '')) > 10000000 else 99999999
                if int(time.strftime("%Y%m%d")) <= air_date + 1:
                    pass  #Log.Info("TVDB - Episode '{}' missing but not aired/missing '{}'".format(numbering, air_date))
                elif season == '0':
                    tvdb_special_missing.append(episode)
                elif metadata_source != 'tvdb6':
                    episode_missing_season.append(
                        str(abs_number) + " (" + numbering +
                        ")" if metadata_source in ('tvdb3',
                                                   'tvdb4') else numbering)

            ### File present on disk
            if not is_missing or Dict(
                    mappingList, 'possible_anidb3'
            ) or metadata_source == "tvdb6":  # Only pull all if anidb3(tvdb)/anidb4(tvdb6) usage for tvdb ep/season adjustments
                episode_missing_season_all = False
                if not is_missing:
                    Log.Info(
                        '[X] {:>7} s{:0>2}e{:0>3} anidbid: {:>7} air_date: {} abs_number: {}, title: {}'
                        .format(numbering, season, episode, anidbid,
                                Dict(episode_json, 'firstAired'), abs_number,
                                Dict(episode_json, 'episodeName')))
                if not anidb_numbering:
                    SaveDict(abs_number, TheTVDB_dict, 'seasons', season,
                             'episodes', episode, 'absolute_index')
                SaveDict(Dict(json[lang], 'rating'), TheTVDB_dict, 'seasons',
                         season, 'episodes', episode, 'content_rating')
                SaveDict(Dict(TheTVDB_dict, 'duration'), TheTVDB_dict,
                         'seasons', season, 'episodes', episode, 'duration')
                SaveDict(Dict(episode_json,
                              'firstAired'), TheTVDB_dict, 'seasons', season,
                         'episodes', episode, 'originally_available_at')

                # Title from serie page
                rank, title = len(language_episodes) + 1, ''
                if Dict(episode_json, 'episodeName'):
                    rank = language_episodes.index(
                        lang) if lang in language_episodes else len(
                            language_episodes)
                    title = Dict(episode_json, 'episodeName')
                    Log.Info(" - [1] title:   [{}] {}".format(
                        language_episodes[rank], title))

                #Summary from serie page
                if Dict(episode_json, 'overview').strip(" \n\r"):
                    SaveDict(
                        Dict(episode_json,
                             'overview').strip(" \n\r"), TheTVDB_dict,
                        'seasons', season, 'episodes', episode, 'summary')
                    Log.Info(' - [1] summary: [{}] {}'.format(
                        lang,
                        Dict(TheTVDB_dict, 'seasons', season, 'episodes',
                             episode, 'summary')))

                ### Ep advance information ###
                ep_count += 1
                lang2 = 'en' if len(
                    language_episodes) <= 1 else language_episodes[1]
                episode_details_json = Dict(
                    LoadFileTVDB(id=TVDBid,
                                 filename='episode_{}_{}.json'.format(
                                     Dict(episode_json, 'id'), lang2),
                                 url=TVDB_EPISODE_URL.format(
                                     id=str(Dict(episode_json, 'id'))),
                                 headers={'Accept-Language': lang2}), 'data')
                if episode_details_json:

                    # Std ep info loaded for Library language ten details for 1st language, loading other languages if needed
                    if lang2 in language_episodes and language_episodes.index(
                            lang2) < rank and Dict(
                                episode_details_json, 'language',
                                'episodeName') == lang2 and Dict(
                                    episode_details_json, 'episodeName'):
                        rank = language_episodes.index(lang2)
                        title = Dict(episode_details_json, 'episodeName')
                        Log.Info(" - [2] title:   [{}] {}".format(
                            language_episodes[rank], title))

                    #Summary
                    if not Dict(TheTVDB_dict, 'seasons', season, 'episodes',
                                episode, 'summary') and Dict(
                                    episode_details_json, 'overview'):
                        SaveDict(
                            Dict(episode_details_json,
                                 'overview').strip(" \n\r"), TheTVDB_dict,
                            'seasons', season, 'episodes', episode, 'summary')
                        Log.Info(' - [2] summary: [{}] {}'.format(
                            lang2,
                            Dict(TheTVDB_dict, 'seasons', season, 'episodes',
                                 episode, 'summary')))

                    SaveDict(Dict(episode_details_json,
                                  'writers'), TheTVDB_dict, 'seasons', season,
                             'episodes', episode, 'writers')
                    SaveDict(Dict(episode_details_json,
                                  'directors'), TheTVDB_dict, 'seasons',
                             season, 'episodes', episode, 'directors')
                    SaveDict(Dict(episode_details_json,
                                  'siteRating'), TheTVDB_dict, 'seasons',
                             season, 'episodes', episode, 'rating')
                    #SaveDict( Dict(episode_details_json, 'guestStars'         ), TheTVDB_dict, 'seasons', season, 'episodes', episode, 'guest_stars')

                    # Episode screenshot/Thumbnail
                    if Dict(episode_details_json, 'filename'):
                        SaveDict((os.path.join(
                            "TheTVDB", "episodes",
                            os.path.basename(
                                Dict(episode_details_json,
                                     'filename'))), 1, None), TheTVDB_dict,
                                 'seasons', season, 'episodes', episode,
                                 'thumbs',
                                 str(TVDB_IMG_ROOT +
                                     Dict(episode_details_json, 'filename')))
                        Log.Info(' - [ ] thumb: {}'.format(
                            TVDB_IMG_ROOT +
                            Dict(episode_details_json, 'filename') if Dict(
                                episode_details_json, 'filename') else ''))

                #Ep title fallback (first lang title come from ep list, second from ep details)
                for lang_rank, language in enumerate(
                        language_episodes[2:rank -
                                          1] if len(language_episodes) > 1
                        and rank >= 2 and not title else []):
                    if not language: continue
                    episode_details_json = Dict(LoadFileTVDB(
                        id=TVDBid,
                        filename='episode_{}_{}.json'.format(
                            Dict(episode_json, 'id'), language),
                        url=TVDB_EPISODE_URL.format(
                            id=str(Dict(episode_json, 'id'))),
                        headers={'Accept-Language': lang}),
                                                'data',
                                                default={})
                    if Dict(episode_details_json, 'episodeName'):
                        title = Dict(episode_details_json, 'episodeName')
                        rank = lang_rank
                        Log.Info(" - [3] title:   [{}] {}".format(
                            language_episodes[rank], title))
                    if not Dict(TheTVDB_dict, 'seasons', season, 'episodes',
                                episode, 'summary') and Dict(
                                    episode_details_json, 'overview'):
                        SaveDict(
                            Dict(episode_details_json,
                                 'overview')[:160].strip(" \n\r"),
                            TheTVDB_dict, 'seasons', season, 'episodes',
                            episode, 'summary')
                        Log.Info(' - [3] summary: [{}] {}'.format(
                            language_episodes[lang_rank],
                            Dict(TheTVDB_dict, 'seasons', season, 'episodes',
                                 episode, 'summary')))
                    if title and Dict(TheTVDB_dict, 'seasons', season,
                                      'episodes', episode, 'summary'):
                        break
                SaveDict(title, TheTVDB_dict, 'seasons', season, 'episodes',
                         episode, 'title')
                SaveDict(rank, TheTVDB_dict, 'seasons', season, 'episodes',
                         episode, 'language_rank')

        # (last season) Replace all the individual episodes reported as missing with a single season 'sX' entry
        if not episode_missing_season_all:
            episode_missing.extend(episode_missing_season)
        elif episode_missing_season:
            first_entry, last_entry = episode_missing_season[
                0], episode_missing_season[-1]
            fm = re.match(r'((?P<abs>\d+) \()?s(?P<s>\d+)e(?P<e>\d+)\)?',
                          first_entry).groupdict()
            lm = re.match(r'((?P<abs>\d+) \()?s(?P<s>\d+)e(?P<e>\d+)\)?',
                          last_entry).groupdict()
            episode_missing.append(
                "s{}e{}-{}".format(fm['s'], fm['e'], lm['e'])
                if fm['abs'] is None else "{}-{} (s{}e{}-{})".
                format(fm['abs'], lm['abs'], fm['s'], fm['e'], lm['e']))

        # Set the min/max season to ints & update max value to the next min-1 to handle multi tvdb season anidb entries
        map_min_values = [
            int(Dict(mappingList, 'season_map')[x]['min'])
            for x in Dict(mappingList, 'season_map', default={})
            for y in Dict(mappingList, 'season_map')[x] if y == 'min'
        ]
        for entry in Dict(mappingList, 'season_map', default={}):
            entry_min, entry_max = int(
                mappingList['season_map'][entry]['min']), int(
                    mappingList['season_map'][entry]['max'])
            while entry_min != 0 and entry_max + 1 not in map_min_values and entry_max < max_season:
                entry_max += 1
            mappingList['season_map'][entry] = {
                'min': entry_min,
                'max': entry_max
            }
        SaveDict(max_season, mappingList, 'season_map', 'max_season')

        ### Logging ###
        if not movie:
            if summary_missing:
                error_log['Missing Episode Summaries'].append(
                    "TVDBid: %s | Title: '%s' | Missing Episode Summaries: %s"
                    % (common.WEB_LINK %
                       (common.TVDB_SERIE_URL + TVDBid, TVDBid),
                       Dict(TheTVDB_dict, 'title'), str(summary_missing)))
            if summary_missing_special:
                error_log['Missing Special Summaries'].append(
                    "TVDBid: %s | Title: '%s' | Missing Special Summaries: %s"
                    % (common.WEB_LINK %
                       (common.TVDB_SERIE_URL + TVDBid, TVDBid),
                       Dict(TheTVDB_dict,
                            'title'), str(summary_missing_special)))
        if metadata_source.startswith("tvdb") or metadata_source.startswith(
                "anidb") and not movie and max(map(int,
                                                   media.seasons.keys())) > 1:
            if episode_missing:
                error_log['Missing Episodes'].append(
                    "TVDBid: %s | Title: '%s' | Missing Episodes: %s" %
                    (common.WEB_LINK %
                     (common.TVDB_SERIE_URL + TVDBid, TVDBid),
                     Dict(TheTVDB_dict, 'title'), str(episode_missing)))
            if tvdb_special_missing:
                error_log['Missing Specials'].append(
                    "TVDBid: %s | Title: '%s' | Missing Specials: %s" %
                    (common.WEB_LINK %
                     (common.TVDB_SERIE_URL + TVDBid, TVDBid),
                     Dict(TheTVDB_dict, 'title'), str(tvdb_special_missing)))
            #Log.Debug("Episodes without Summary: " + str(sorted(summary_missing, key=common.natural_sort_key)))

        ### Picture types JSON download ###
        Log.Info("--- images ---".ljust(157, '-'))
        languages = [
            language.strip()
            for language in Prefs['PosterLanguagePriority'].split(',')
        ]
        Log.Info('languages: {}'.format(languages))
        for language in languages:
            try:
                bannerTypes = Dict(LoadFileTVDB(
                    id=TVDBid,
                    filename='images_{}.json'.format(language),
                    url=TVDB_SERIES_IMG_INFO_URL.format(id=TVDBid),
                    headers={'Accept-Language': language}),
                                   'data',
                                   default={})
            except:
                Log.Info("Invalid image JSON from url: " +
                         TVDB_SERIES_IMG_INFO_URL % TVDBid)
            else:  #JSON format = {"fanart", "poster", "season", "seasonwide", "series"}
                metanames = {
                    'fanart': "art",
                    'poster': "posters",
                    'series': "banners",
                    'season': "seasons",
                    'seasonwide': 'seasonwide'
                }
                count_valid = {key: 0 for key in metanames}
                Log.Info("bannerTypes: {}".format(bannerTypes))

                #Loop per banner type ("fanart", "poster", "season", "series") skip 'seasonwide' - Load bannerType images list JSON
                for bannerType in bannerTypes:
                    if bannerTypes[
                            bannerType] == 0 or bannerType == 'seasonwide' or movie and not bannerType in (
                                'fanart', 'poster'):
                        continue  #Loop if no images
                    #if anidb_numbering and Dict(mappingList, 'defaulttvdbseason') != '1' and bannerType=='poster':  continue  #skip if anidb numbered serie mapping to season 0 or 2+

                    Log.Info(
                        ("--- images.%s ---" % bannerType).ljust(157, '-'))
                    try:
                        images = Dict(LoadFileTVDB(
                            id=TVDBid,
                            filename='images_{}_{}.json'.format(
                                bannerType, language),
                            url=TVDB_SERIES_IMG_QUERY_URL.format(
                                id=TVDBid, type=bannerType),
                            headers={'Accept-Language': language}),
                                      'data',
                                      default={})
                    except:
                        images = {}
                        Log.Info(
                            "Bad image type query data for TVDB id: %s (bannerType: %s)"
                            % (TVDBid, bannerType))
                    else:
                        images = sorted(
                            images,
                            key=lambda x: Dict(
                                x, "ratingsInfo", "average", default=0),
                            reverse=True)
                        for image in images:  #JSON format = {"data": [{"id", "keyType", "subKey"(season/graphical/text), "fileName", "resolution", "ratingsInfo": {"average", "count"}, "thumbnail"}]}
                            if not Dict(image, 'fileName'):
                                continue  #Avod "IOError: [Errno 21] Is a directory: u'/var/lib/plexmediaserver/Library/Application Support/Plex Media Server/Plug-in Support/Data/com.plexapp.agents.hama/DataItems/TheTVDB'" if filename empty
                            count_valid[
                                bannerType] = count_valid[bannerType] + 1

                            ### Adding picture ###
                            rank = common.poster_rank(
                                'TheTVDB', metanames[bannerType], language,
                                0 if Dict(image, 'fileName') == Dict(
                                    series_images,
                                    bannerType) else count_valid[bannerType])
                            fileNamePath = os.path.join(
                                'TheTVDB',
                                Dict(image, 'fileName').replace('/', os.sep))
                            fileNameURL = TVDB_IMG_ROOT + Dict(
                                image, 'fileName')
                            thumbnail = TVDB_IMG_ROOT + Dict(
                                image, 'thumbnail') if Dict(
                                    image, 'thumbnail') else None
                            subKey = str(Dict(
                                image, 'subKey'))  # Convert to string once
                            if bannerType == 'season':  #tvdb season posters or anidb specials and defaulttvdb season  ## season 0 et empty+ season ==defaulttvdbseason(a=1)
                                if not anidb_numbering:
                                    SaveDict((fileNamePath, rank, thumbnail),
                                             TheTVDB_dict, 'seasons', subKey,
                                             'posters', fileNameURL)
                                else:
                                    if subKey == Dict(
                                            mappingList, 'defaulttvdbseason'
                                    ):  # If the TVDB season is the AniDB default season, add season poster as series poster
                                        SaveDict(
                                            (fileNamePath, rank, thumbnail),
                                            TheTVDB_dict, 'posters',
                                            fileNameURL)
                                    if subKey in [
                                            '0',
                                            Dict(mappingList,
                                                 'defaulttvdbseason')
                                    ]:  # If the TVDB season is the season 0 OR AniDB default season, add season poster
                                        SaveDict(
                                            (fileNamePath, rank, thumbnail),
                                            TheTVDB_dict, 'seasons',
                                            '0' if subKey == '0' else '1',
                                            'posters', fileNameURL)
                            else:
                                if anidb_numbering and Dict(
                                        mappingList,
                                        'defaulttvdbseason') != '1':
                                    rank = rank + 10
                                SaveDict((fileNamePath, rank, thumbnail),
                                         TheTVDB_dict, metanames[bannerType],
                                         fileNameURL)
                            Log.Info(
                                "[!] bannerType: {:>7} subKey: {:>9} rank: {:>3} filename: {} thumbnail: {} resolution: {} average: {} count: {}"
                                .format(metanames[bannerType], subKey, rank,
                                        fileNameURL, thumbnail,
                                        Dict(image, 'resolution'),
                                        Dict(image, 'ratingsInfo', 'average'),
                                        Dict(image, 'ratingsInfo', 'count')))

                #Log.Info("Posters : {}/{}, Season posters: {}/{}, Art: {}/{}".format(count_valid['poster'], Dict(bannerTypes, 'poster'), count_valid['season'], Dict(bannerTypes, 'season') or 0, count_valid['fanart'], Dict(bannerTypes, 'fanart')))
                if not Dict(bannerTypes, 'poster'):
                    error_log['TVDB posters missing'].append(
                        "TVDBid: %s | Title: '%s'" %
                        (common.WEB_LINK %
                         (common.TVDB_SERIE_URL + TVDBid, TVDBid),
                         Dict(TheTVDB_dict, 'title')))
                if not Dict(bannerTypes, 'season'):
                    error_log['TVDB season posters missing'].append(
                        "TVDBid: %s | Title: '%s'" %
                        (common.WEB_LINK %
                         (common.TVDB_SERIE_URL + TVDBid, TVDBid),
                         Dict(TheTVDB_dict, 'title')))

        Log.Info("--- final summary info ---".ljust(157, '-'))
        Log.Info(
            "url: '{}', IMDbid: {}, Present episodes: {}, Missing: {}".format(
                TVDB_SERIES_URL.format(id=TVDBid), IMDbid, ep_count,
                sorted(episode_missing, key=common.natural_sort_key)))

    Log.Info("--- return ---".ljust(157, '-'))
    Log.Info("absolute_map: {}".format(
        DictString(Dict(mappingList, 'absolute_map', default={}), 0)))
    Log.Info("season_map: {}".format(
        DictString(Dict(mappingList, 'season_map', default={}), 0)))
    Log.Info("TheTVDB_dict: {}".format(DictString(TheTVDB_dict, 4)))
    return TheTVDB_dict, IMDbid
Ejemplo n.º 30
0
def GetMetadata(media, movie, error_log, id):
    Log.Info("=== AnimeLists.GetMetadata() ===".ljust(157, '='))
    mappingList, AnimeLists_dict = {}, {}
    found = False
    source, id = id.split('-', 1) if '-' in id else ("", id)
    AniDB_id = id if source.startswith('anidb') else ""
    TVDB_id = id if source.startswith('tvdb') else ""
    TMDB_id = id if source.startswith('tmdb') else ""
    IMDB_id = id if source.startswith('imdb') else ""
    AniDBid = ""
    TVDBid = ""
    TMDBid = ""
    IMDBid = ""
    tvdb_numbering = True if not movie and (
        TVDB_id
        or AniDB_id and max(map(int, media.seasons.keys())) > 1) else False
    tvdbcounts = {}

    ### Search for match ###
    Log.Info("tvdb_numbering: {}".format(tvdb_numbering))
    AniDB_id2, TVDB_id2 = "", ""

    AniDBTVDBMapCustom = GetAniDBTVDBMapCustom(media, movie)
    if AniDBTVDBMapCustom:
        AniDBTVDBMapFull = MergeMaps(AniDBTVDBMap, AniDBTVDBMapCustom)
    else:
        AniDBTVDBMapFull = AniDBTVDBMap

    def anime_core(anime):
        defaulttvdbseason = anime.get('defaulttvdbseason') if anime.get(
            'defaulttvdbseason'
        ) and anime.get('defaulttvdbseason') != 'a' else '1'
        episodeoffset = anime.get('episodeoffset') if anime.get(
            'episodeoffset') else '0'
        s1_mapping_count = len(
            anime.xpath(
                "mapping-list/mapping[@anidbseason='1'][@tvdbseason='0' or @tvdbseason='1']"
            ))
        s1e1_mapping = True if anime.xpath(
            "mapping-list/mapping[@anidbseason='1'][@tvdbseason='1'][contains(text(), '-1;')]"
        ) else False
        is_primary_series = True if defaulttvdbseason == '1' and episodeoffset == '0' and (
            s1_mapping_count == 0 or s1e1_mapping) else False
        return defaulttvdbseason, episodeoffset, s1_mapping_count, is_primary_series

    Log.Info("--- AniDBTVDBMap ---".ljust(157, '-'))
    forcedID = {
        'anidbid': AniDB_id,
        'tvdbid': TVDB_id,
        'tmdbid': TMDB_id,
        'imdbid': IMDB_id
    }
    for anime in AniDBTVDBMapFull.iter('anime') if AniDBTVDBMapFull else []:
        # gather any manually specified source ids
        foundID, wantedID = {}, {}
        for check in forcedID.keys():
            foundID[check] = anime.get(check, "")
            wantedID[check] = True if foundID[check] == forcedID[
                check] and forcedID[check] != '' else False

        # if this row matches our specified source-id
        if True in wantedID.values():
            # save the found values for later use in other GetMetadata that dont depend on AniDB etc.
            IMDBid, TMDBid, TVDBid, AniDBid = foundID['imdbid'], foundID[
                'tmdbid'], foundID['tvdbid'], foundID['anidbid']
            # use the old check to decide whether to proceed
            if TVDBid == '' and AniDBid == '': continue
        # nothing found, skip
        else:
            continue
        found = True

        # record the number of entries using the same tvdb id
        SaveDict(Dict(tvdbcounts, TVDBid, default=0) + 1, tvdbcounts, TVDBid)

        defaulttvdbseason, episodeoffset, s1_mapping_count, is_primary_series = anime_core(
            anime)

        if not tvdb_numbering and not TVDB_id: TVDB_id2 = TVDBid
        if tvdb_numbering and AniDBid and TVDBid.isdigit(
        ) and is_primary_series and not AniDB_id:
            AniDB_id2 = AniDBid
        Log.Info(
            "[+] AniDBid: {:>5}, TVDBid: {:>6}, defaulttvdbseason: {:>4}, offset: {:>3}, TMDBid: {:>7}, IMDBid: {:>10}, name: {}"
            .format(
                AniDBid, TVDBid,
                ("({})".format(anime.get('defaulttvdbseason')) if
                 anime.get('defaulttvdbseason') != defaulttvdbseason else '') +
                defaulttvdbseason, episodeoffset, TMDBid, IMDBid,
                GetXml(anime, 'name')))

        ### AniDB/TMDB/IMDB numbered series ###
        if AniDB_id or TMDB_id or IMDB_id:
            AniDB_id2 = AniDBid  # Needs to be set if TMDB/IMDB
            TVDB_id2 = TVDBid
            SaveDict(TMDBid, mappingList, 'tmdbid')
            SaveDict(IMDBid, mappingList, 'imdbid')
            SaveDict(defaulttvdbseason, mappingList, 'defaulttvdbseason')
            SaveDict(True if anime.get('defaulttvdbseason') == 'a' else False,
                     mappingList, 'defaulttvdbseason_a')
            SaveDict(episodeoffset, mappingList, 'episodeoffset')
            SaveDict(GetXml(anime, 'name'), mappingList, 'name')
            SaveDict(GetXml(anime, "supplemental-info/studio"),
                     AnimeLists_dict, 'studio')
            SaveDict(GetXml(anime, "supplemental-info/director"),
                     AnimeLists_dict, 'director')
            SaveDict(GetXml(anime, "supplemental-info/credits"),
                     AnimeLists_dict, 'writer')
            for genre in anime.xpath('supplemental-info/genre'):
                SaveDict([genre.text], AnimeLists_dict, 'genres')
            for art in anime.xpath('supplemental-info/fanart/thumb'):
                SaveDict(
                    {
                        art.text: ('/'.join(
                            art.text.split('/')[3:]), 1, art.get('preview'))
                    }, AnimeLists_dict, 'art')

        ### TheTVDB/multi-season numbered series and the Primary/Starting(s1e1) AniDB id ###
        if (TVDB_id or not movie and max(map(int, media.seasons.keys())) > 1
                and AniDB_id == '') and TVDBid.isdigit() and is_primary_series:
            AniDB_id2 = AniDBid
            SaveDict(TMDBid, mappingList, 'tmdbid')
            SaveDict(IMDBid, mappingList, 'imdbid')
            SaveDict(defaulttvdbseason, mappingList, 'defaulttvdbseason')
            SaveDict(True if anime.get('defaulttvdbseason') == 'a' else False,
                     mappingList, 'defaulttvdbseason_a')

        ###
        if TVDBid.isdigit():
            SaveDict(episodeoffset, mappingList, 'TVDB',
                     's-1' if defaulttvdbseason == '0'
                     and s1_mapping_count >= 1 else 's' + defaulttvdbseason,
                     AniDBid)  #mappingList['TVDB'][s1][anidbid]=episodeoffset
            SaveDict(
                {
                    'min': defaulttvdbseason,
                    'max': defaulttvdbseason
                }, mappingList, 'season_map',
                AniDBid)  # Set the min/max season to the 'defaulttvdbseason'
            if source == "tvdb6" and int(episodeoffset) > 0:
                SaveDict(
                    {
                        'min': '0',
                        'max': '0'
                    }, mappingList, 'season_map', AniDBid
                )  # Force series as special if not starting the TVDB season

            for season in anime.iter(
                    'mapping'
            ):  ### mapping list: <mapping-list> <mapping anidbseason="0" tvdbseason="0">;1-12;2-14;3-16;4-18;</mapping> </mapping-list>
                anidbseason, tvdbseason, offset, start, end = season.get(
                    'anidbseason'), season.get(
                        'tvdbseason'), season.get('offset') or '0', season.get(
                            'start'), season.get('end')
                Log.Info(
                    "    - season: [{:>2}],           [{:>2}], range:       [{:>3}-{:>3}], offset: {:>3}, text: {}"
                    .format(anidbseason, tvdbseason, start or '000', end
                            or '000', offset, (season.text or '').strip(';')))
                for ep in range(int(start), int(end) + 1) if start else []:
                    #Log.Info("[?] start: {}, end: {}, ep: {}".format(start, end, ep))
                    if not Dict(mappingList, 'TVDB', 's' + tvdbseason + 'e' +
                                str(ep + int(offset))):
                        SaveDict(
                            (anidbseason, str(ep), AniDBid), mappingList,
                            'TVDB',
                            's' + tvdbseason + 'e' + str(ep + int(offset))
                        )  #mappingList['TVDB'][s1e1]=(AniDB_season, AniDB_episode, AniDBid) for start-end mappings
                    #else: Log.Info("already present")
                for ep in filter(
                        None, season.text.split(';')) if season.text else []:
                    if not '-' in ep:
                        Log.Info(
                            '[!] MAPPING ERROR, season.text: "{}", ep mapping missing hyphen: "{}"'
                            .format(season.text, ep))
                    elif not Dict(mappingList, 'TVDB',
                                  's' + tvdbseason + 'e' + ep.split('-')[1]):
                        SaveDict(
                            (anidbseason, ep.split('-')[0], AniDBid),
                            mappingList, 'TVDB',
                            's' + tvdbseason + 'e' + ep.split('-')[1]
                        )  #mappingList['TVDB'][s1e1]=(AniDB_season, AniDB_episode, AniDBid) for manual mapping like '1-12'
                    #elif '-' not in (mappingList, 'TVDB', 's'+tvdbseason+'e'+ep.split('-')[1]):
                    #  SaveDict((anidbseason, Dict(mappingList, 'TVDB', 's'+tvdbseason+'e'+ep.split('-')[1])[1]+'-'+ep.split('-')[0], AniDBid), mappingList, 'TVDB', 's'+tvdbseason+'e'+ep.split('-')[1])
                    #  Log.Info("already present so converting to range but range not supported")
                if int(Dict(mappingList, 'season_map', AniDBid, 'max')) < int(
                        season.get("tvdbseason")):
                    SaveDict(
                        season.get("tvdbseason"), mappingList, 'season_map',
                        AniDBid, 'max'
                    )  # Update the max season to the largest 'tvdbseason' season seen in 'mapping-list'

        ###
        if TVDBid == "hentai": SaveDict("X", AnimeLists_dict, 'content_rating')
        elif TVDBid in ("", "unknown", None):
            link = SCHUDLEE_FEEDBACK.format(
                title="aid:%s &#39;%s&#39; TVDBid:" % (AniDB_id, "title"),
                body=String.StripTags(
                    XML.StringFromElement(anime, encoding='utf8')))
            error_log['anime-list TVDBid missing'].append(
                'AniDBid: "{}" | Title: "{}" | Has no matching TVDBid "{}" in mapping file | <a href="{}" target="_blank">Submit bug report</a>'
                .format(AniDB_id, "title", TVDBid, link))
            Log.Info(
                '"anime-list TVDBid missing.htm" log added as tvdb serie id missing in mapping file: "{}"'
                .format(TVDBid))

        # guid need 1 entry only, not an TheTVDB numbered serie with anidb guid
        if (AniDB_id or TMDB_id
                or IMDB_id) and (movie
                                 or max(map(int, media.seasons.keys())) <= 1):
            break

    else:
        # Loop has gone through all entries. This only happens when the exact entry is not found or a TVDB entry that needs to loop through all.
        if not found:
            Log.Info("ERROR: Could not find %s: %s" % (source, id))
            if AniDB_id != "":
                error_log['anime-list AniDBid missing'].append(
                    "AniDBid: " + common.WEB_LINK %
                    (common.ANIDB_SERIE_URL + AniDB_id, AniDB_id))
            # Reset the variables used for matching so it does not just keep the value of the last entry in the loop
            IMDBid, TMDBid, TVDBid, AniDBid = '', '', '', ''

    AniDB_winner = AniDB_id or AniDB_id2
    TVDB_winner = TVDB_id or TVDB_id2

    Log.Info('             -----          ------')
    Log.Info('             {:>5}          {:>6}'.format(
        AniDB_winner, TVDB_winner))

    SaveDict(Dict(tvdbcounts, TVDB_winner), mappingList, 'tvdbcount')

    if source == "tvdb":
        for s in media.seasons:
            for e in media.seasons[s].episodes:
                if int(e) > 100:
                    SaveDict(True, mappingList, 'possible_anidb3')
                    break
        else:
            SaveDict(False, mappingList, 'possible_anidb3')
    else:
        SaveDict(False, mappingList, 'possible_anidb3')

    for values in Dict(mappingList, 'TVDB', default={}).values():
        if isinstance(values, tuple) and values[0] == '1' and values[1] == '1':
            SaveDict(True, mappingList, 's1e1_mapped')
            break
    else:
        SaveDict(False, mappingList, 's1e1_mapped')

    ### Update collection/studio
    TVDB_collection, title, studio = [], '', ''
    for anime in AniDBTVDBMapFull.iter(
            'anime') if AniDBTVDBMapFull and TVDB_winner.isdigit() else []:
        if anime.get('tvdbid', "") == TVDB_winner:
            TVDB_collection.append(anime.get("anidbid", ""))
            if anime_core(anime)[3]:  #[3]==is_primary_series
                title = AniDB.GetAniDBTitle(
                    AniDB.AniDBTitlesDB.xpath(
                        '/animetitles/anime[@aid="{}"]/title'.format(
                            anime.get("anidbid", ""))))[
                                0]  #returns [title, main, language_rank]
                studio = GetXml(anime, "supplemental-info/studio")
    if len(
            TVDB_collection
    ) > 1 and title:  # Require that there be at least 2 anidb mappings for a collection
        Log.Info(
            "[ ] collection: TVDBid '%s' is part of collection: '%s', related_anime_list: %s"
            % (TVDB_winner,
               SaveDict([title + ' Collection'], AnimeLists_dict,
                        'collections'), TVDB_collection))
    else:
        Log.Info("[ ] collection: TVDBid '%s' is not part of any collection" %
                 TVDB_winner)
    Log.Info("[ ] studio: {}".format(
        SaveDict(studio, AnimeLists_dict, 'studio')))

    Log.Info("--- return ---".ljust(157, '-'))
    Log.Info(
        "AniDB_id: '{}', AniDB_id2: '{}', AniDBid: '{}', TVDB_id: '{}', TVDB_id2: '{}', TVDBid: '{}'"
        .format(AniDB_id, AniDB_id2, AniDBid, TVDB_id, TVDB_id2, TVDBid))
    Log.Info("mappingList: {}".format(DictString(mappingList, 1)))
    Log.Info("AnimeLists_dict: {}".format(DictString(AnimeLists_dict, 1)))
    return AnimeLists_dict, AniDB_winner, TVDB_winner if TVDB_winner.isdigit(
    ) else "", Dict(mappingList, 'tmdbid'), Dict(mappingList,
                                                 'imdbid'), mappingList