def buildList(query, opts): # TEMPORARY FIX: # replace all dashes from queries to work around search behavior # as negative to all text that comes afterwards query = query.replace('-',' ') from MythTV.tmdb3 import searchMovie from MythTV import VideoMetadata from lxml import etree results = searchMovie(query) tree = etree.XML(u'<metadata></metadata>') mapping = [['runtime', 'runtime'], ['title', 'originaltitle'], ['releasedate', 'releasedate'], ['tagline', 'tagline'], ['description', 'overview'], ['homepage', 'homepage'], ['userrating', 'userrating'], ['popularity', 'popularity']] count = 0 for res in results: m = VideoMetadata() for i,j in mapping: if getattr(res, j): setattr(m, i, getattr(res, j)) m.inetref = str(res.id) m.imdb = str(res.imdb).lstrip("t") if res.title: m.title = res.title #TODO: # should releasedate and year be pulled from the country-specific data # or should it be left to the default information to cut down on # traffic from searches if res.releasedate: m.year = res.releasedate.year if res.backdrop: b = res.backdrop m.images.append({'type':'fanart', 'url':b.geturl(), 'thumb':b.geturl(b.sizes()[0])}) if res.poster: p = res.poster m.images.append({'type':'coverart', 'url':p.geturl(), 'thumb':p.geturl(p.sizes()[0])}) tree.append(m.toXML()) count += 1 if count >= 60: # page limiter, dont want to overload the server break sys.stdout.write(etree.tostring(tree, encoding='UTF-8', pretty_print=True, xml_declaration=True)) sys.exit(0)
def buildCollection(tvinetref, opts): # option -C inetref from lxml import etree from MythTV import VideoMetadata, datetime from MythTV.utility import levenshtein from MythTV.tvmaze import tvmaze_api as tvmaze from MythTV.tvmaze import locales # set the session if opts.session: tvmaze.set_session(opts.session) if opts.debug: print("Function 'buildCollection' called with argument '%s'" % tvinetref) show_info = tvmaze.get_show(tvinetref) if opts.debug: for k, v in show_info.__dict__.items(): print(k, " : ", v) tree = etree.XML(u'<metadata></metadata>') m = VideoMetadata() m.title = check_item(m, ("title", show_info.name), ignore=False) m.description = check_item(m, ("description", show_info.summary)) if show_info.genres is not None and len(show_info.genres) > 0: for g in show_info.genres: try: if g is not None and len(g) > 0: m.categories.append(g) except: pass m.inetref = check_item(m, ("inetref", str(show_info.id)), ignore=False) m.collectionref = check_item(m, ("collectionref", str(show_info.id)), ignore=False) m.imdb = check_item(m, ("imdb", str(show_info.external_ids['imdb']))) m.language = check_item( m, ("language", str(locales.Language.getstored(show_info.language)))) m.userrating = check_item(m, ("userrating", show_info.rating['average'])) try: m.popularity = check_item(m, ("popularity", float(show_info.weight)), ignore=False) except (TypeError, ValueError): pass if show_info.premiere_date: m.releasedate = check_item(m, ("releasedate", show_info.premiere_date)) m.year = check_item(m, ("year", show_info.premiere_date.year)) try: sinfo = show_info.network['name'] if sinfo is not None and len(sinfo) > 0: m.studios.append(sinfo) except: pass posterList, fanartList, bannerList = get_show_art_lists(show_info.id) # Generate image lines for every piece of artwork for posterEntry in posterList: if (posterEntry[0] is not None) and (posterEntry[1] is not None): m.images.append({ 'type': 'coverart', 'url': posterEntry[0], 'thumb': posterEntry[1] }) elif posterEntry[0] is not None: m.images.append({'type': 'coverart', 'url': posterEntry[0]}) for fanartEntry in fanartList: if (fanartEntry[0] is not None) and (fanartEntry[1] is not None): m.images.append({ 'type': 'fanart', 'url': fanartEntry[0], 'thumb': fanartEntry[1] }) elif fanartEntry[0] is not None: m.images.append({'type': 'fanart', 'url': fanartEntry[0]}) for bannerEntry in bannerList: if (bannerEntry[0] is not None) and (bannerEntry[1] is not None): m.images.append({ 'type': 'banner', 'url': bannerEntry[0], 'thumb': bannerEntry[1] }) elif bannerEntry[0] is not None: m.images.append({'type': 'banner', 'url': bannerEntry[0]}) tree.append(m.toXML()) print_etree( etree.tostring(tree, encoding='UTF-8', pretty_print=True, xml_declaration=True))
def _format_xml(self, ser_x, sea_x=None, epi_x=None): m = VideoMetadata() m.inetref = check_item(m, ("inetref", str(ser_x.id)), ignore=False) # try to get title and description for the preferred language list: # note: there could be a title but no description: # $ ttvdb4.py -l ja -C 360893 --debug # get series name and overview: ser_title, desc = self._get_info_from_translations(ser_x.fetched_translations) if not ser_title: ser_title = ser_x.name m.title = check_item(m, ("title", ser_title), ignore=False) # get subtitle and overview: if epi_x: sub_title, sub_desc = self._get_info_from_translations(epi_x.fetched_translations) if not sub_title: sub_title = epi_x.name m.subtitle = check_item(m, ("subtitle", sub_title), ignore=False) if sub_desc: desc = sub_desc m.season = check_item(m, ("season", epi_x.seasonNumber), ignore=False) m.episode = check_item(m, ("episode", epi_x.number), ignore=False) m.runtime = check_item(m, ("runtime", epi_x.runtime), ignore=True) desc = strip_tags(desc).replace("\r\n", "").replace("\n", "") m.description = check_item(m, ("description", desc)) try: if ser_x.originalLanguage: lang = ser_x.originalLanguage else: lang = self.language if ser_x.originalCountry: country = ser_x.originalCountry else: country = ser_x.country m.language = check_item(m, ("language", Language.getstored(lang).ISO639_1)) if country: m.countries.append(country.upper()) # could be 'None' m.year = check_item(m, ('year', int(ser_x.firstAired.split('-')[0]))) m.userrating = check_item(m, ('userrating', ser_x.score)) for h in [x.id for x in ser_x.remoteIds if x.type == 4]: # type 4 is 'Official Website' m.homepage = check_item(m, ('homepage', h)) # MythTV supports only one entry break except: # considered as nice to have pass # add categories: try: for genre in ser_x.genres: if genre.name: m.categories.append(genre.name) except: pass # add optional fields: if epi_x: try: # add studios for c in epi_x.companies: m.studios.append(c.name) except: pass try: # add IMDB reference for r in epi_x.remoteIds: if r.sourceName == 'IMDB': m.imdb = check_item(m, ('imdb', r.id)) break except: raise if self.country: try: # add certificates: area = Country.getstored(self.country).alpha3 for c in epi_x.contentRatings: if c.country.lower() == area.lower(): m.certifications[self.country] = c.name break except: pass if self.ShowPeople: # add characters: see class 'People' # characters of type 'Actor' are sorted in ascending order actors = [c for c in ser_x.characters if c.type == People.Actor] actors_sorted = sort_list_by_key(actors, "sort", 99, reverse=False) # prefer actors that are sorted, i.e.: 'sort' > 0 actors_s_1 = [x for x in actors_sorted if x.sort > 0] # append the rest, i.e.: actors with sort == 0 actors_s_1.extend([x for x in actors_sorted if x.sort == 0]) m.people.extend(self._get_crew_for_xml(actors_s_1, 'Actor', list_character=True)) # on episodes, characters of type 'Guest Star' are sorted in ascending order if epi_x: guests = [c for c in epi_x.characters if c.type == People.Guest_Star] guests_sorted = sort_list_by_key(guests, "sort", 99, reverse=False) m.people.extend(self._get_crew_for_xml(guests_sorted, 'Guest Star')) directors = [c for c in epi_x.characters if c.type == People.Showrunner] directors_sorted = sort_list_by_key(directors, "sort", 99, reverse=False) m.people.extend(self._get_crew_for_xml(directors_sorted, 'Director')) # no we have all extended records for series, season, episode, create xml for them series_banners = []; season_banners = [] series_posters = []; season_posters = [] series_fanarts = []; season_fanarts = [] # add the artworks, season preferred # art_name what art_type(s) from_r / from_a arts = [('coverart', season_posters, (7,), sea_x, sea_x.artwork if sea_x else []), ('coverart', series_posters, (2,), ser_x, ser_x.artworks), ('fanart', season_fanarts, (8, 9), sea_x, sea_x.artwork if sea_x else []), ('fanart', series_fanarts, (3, 4), ser_x, ser_x.artworks), ('banner', season_banners, (6,), sea_x, sea_x.artwork if sea_x else []), ('banner', series_banners, (1,), ser_x, ser_x.artworks), ] # avoid duplicates used_urls = [] for (art_name, what, art_types, from_r, from_a) in arts: artlist = [art for art in from_a if art.type in art_types] what.extend(sort_list_by_lang(artlist, self.languagelist, other_key='score')) for entry in what: try: if entry.image not in used_urls: used_urls.append(entry.image) m.images.append({'type': art_name, 'url': entry.image, 'thumb': entry.thumbnail}) except: pass if epi_x and epi_x.imageType in (11, 12): m.images.append({'type': 'screenshot', 'url': epi_x.image}) self.tree.append(m.toXML())
def buildSingle(inetref, opts): from MythTV.tmdb3 import Movie from MythTV import VideoMetadata from lxml import etree try: if len(inetref) == 7: movie = Movie.fromIMDB(inetref) else: movie = Movie(inetref) except: sys.exit(1) tree = etree.XML(u'<metadata></metadata>') mapping = [['runtime', 'runtime'], ['title', 'originaltitle'], ['releasedate', 'releasedate'], ['tagline', 'tagline'], ['description', 'overview'], ['homepage', 'homepage'], ['userrating', 'userrating'], ['popularity', 'popularity'], ['budget', 'budget'], ['revenue', 'revenue']] m = VideoMetadata() for i,j in mapping: if getattr(movie, j): setattr(m, i, getattr(movie, j)) if movie.title: m.title = movie.title releases = movie.releases.items() if opts.country: try: # resort releases with selected country at top to ensure it # is selected by the metadata libraries index = zip(*releases)[0].index(opts.country) releases.insert(0, releases.pop(index)) except ValueError: pass else: m.releasedate = releases[0][1].releasedate m.inetref = str(movie.id) m.imdb = str(movie.imdb).lstrip("t") if movie.collection: m.collectionref = str(movie.collection.id) if movie.releasedate: m.year = movie.releasedate.year for country, release in releases: if release.certification: m.certifications[country] = release.certification for genre in movie.genres: m.categories.append(genre.name) for studio in movie.studios: m.studios.append(studio.name) for country in movie.countries: m.countries.append(country.name) for cast in movie.cast: d = {'name':cast.name, 'character':cast.character, 'department':'Actors', 'job':'Actor', 'url':'http://www.themoviedb.org/people/{0}'.format(cast.id)} if cast.profile: d['thumb'] = cast.profile.geturl() m.people.append(d) for crew in movie.crew: d = {'name':crew.name, 'job':crew.job, 'department':crew.department, 'url':'http://www.themoviedb.org/people/{0}'.format(crew.id)} if crew.profile: d['thumb'] = crew.profile.geturl() m.people.append(d) for backdrop in movie.backdrops: m.images.append({'type':'fanart', 'url':backdrop.geturl(), 'thumb':backdrop.geturl(backdrop.sizes()[0])}) for poster in movie.posters: m.images.append({'type':'coverart', 'url':poster.geturl(), 'thumb':poster.geturl(poster.sizes()[0])}) tree.append(m.toXML()) sys.stdout.write(etree.tostring(tree, encoding='UTF-8', pretty_print=True, xml_declaration=True)) sys.exit()