def add_items(self): helper.start('MovieListing.add_items') helper.set_content('movies') action, is_folder = self._get_action_and_isfolder() for link in self.links: name = link.string.strip() url = link['href'] if self.mismatch: metadata = self.get_metadata(self.clean_name(args.full_title)) if self.meta.is_metadata_empty(metadata, 'movie'): metadata = self.get_metadata(args.base_title) else: metadata = self.get_metadata(args.base_title) query = self._construct_query(url, action, metadata) metadata['title'] = name contextmenu_items = self._get_contextmenu_items(url, name) helper.add_directory(query, metadata, img=args.icon, fanart=args.fanart, is_folder=is_folder, contextmenu_items=contextmenu_items) self._add_related_links() self._add_bookmark_link() helper.end_of_directory() helper.end('MovieListing.add_items') return
def add_items(self, title_prefix=None): timestamper = t_s.TimeStamper('MediaContainerList.add_items') end_dir = title_prefix == None title_prefix = title_prefix if title_prefix else '' if end_dir: helper.set_content('tvshows') iter_links = self.links[:-2] if self.has_next_page else self.links # Filter out the episode links for ongoing series mc_links = [] idx = 0 for link in iter_links: url = link['href'] if re.search('(\?id=)', url) != None: continue mc_links.append((link.string.strip(), url, idx)) idx += 1 timestamper.stamp('Initial loop') self.links_with_metadata = [None] * idx pool = threadpool.ThreadPool( 4 if not helper.debug_metadata_threads() else 1) pool.map(self.worker, mc_links) pool.wait_completion() timestamper.stamp('Grabbing metadata with threads') for (name, url, metadata, media_type) in self.links_with_metadata: icon, fanart = self._get_art_from_metadata(metadata) if media_type == 'tvshow' and ( ' (OVA)' in name or ' Specials' in name or re.search('( OVA)( \(((Sub)|(Dub))\))?$', name) != None or re.search(' (Special)$', name) != None): media_type = 'special' query = self._construct_query(url, 'mediaList', metadata, name, media_type) contextmenu_items = self._get_contextmenu_items( url, name, metadata, media_type) metadata[ 'title'] = title_prefix + name # adjust title for sub and dub helper.add_directory(query, metadata, img=icon, fanart=fanart, contextmenu_items=contextmenu_items, total_items=len(mc_links)) if self.has_next_page: query = self._construct_query(self.links[-2]['href'], 'mediaContainerList') helper.add_directory(query, {'title': 'Next'}) query = self._construct_query(self.links[-1]['href'], 'mediaContainerList') helper.add_directory(query, {'title': 'Last'}) if end_dir: helper.end_of_directory() timestamper.stamp_and_dump('Adding all items')
def add_directories(self, src): helper.start('LocalList.add_directories') helper.set_content('addons') for (name, query) in src: icon = query.get('icon', '') fanart = query.get('fanart', '') helper.add_directory(query, infolabels={'title':name}, img=icon, fanart=fanart, total_items=len(src)) helper.end_of_directory() helper.end('LocalList.add_directories') return
def add_items(self): helper.start('EpisodeList.add_items') if self.links == []: return # We now have a list of episodes in links, and we need to figure out # which season those episodes belong to, as well as filter out stray # specials/OVAs. I have a numbered FSM for this. The caller should # invoke get_actual_media_type before this function to get the first state. # 2) Otherwise, we have a tv show. The most reliable way to figure out # what data to use is to use the first air date with the number of # episodes. self.season = None if self.first_air_date == '': # 3) If we don't have the air date, we will try our best to # determine which season this is based on the data we scraped self.season = self.__determine_season() if self.season == None: # I'm not sure what the next best step is here, but I have to # assume it's the first season to catch a lot of actual first # seasons... helper.log_debug('|COUNT|LEFTOVER| %s' % args.full_title) else: helper.log_debug('|COUNT|AIR| %s' % args.full_title) specials = [] episodes = [] double_eps, half_eps = 0, 0 for link in self.links: name = link.string.strip() url = link['href'] if isinstance(name, unicode): ascii_name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore') else: ascii_name = name name_minus_show = ascii_name.replace(args.full_title, '') if self.__is_episode_special(name, name_minus_show): specials.append((name, url)) else: if self.__is_double_episode(name): double_eps += 1 elif self.__is_half_episode(name): half_eps += 1 episodes.append((name, url)) self.num_episodes = len(episodes) + double_eps - half_eps helper.log_debug('We have effectively %d episodes with %s double episodes and %d half episodes' % (self.num_episodes, double_eps, half_eps)) all_metadata = self.get_metadata(args.base_title) helper.log_debug('We have %d metadata entries' % len(all_metadata)) offset = 0 for idx, (name, url) in enumerate(episodes): if self.__is_half_episode(name): offset -= 1 metadata = all_metadata[idx+offset] if idx+offset < len(all_metadata) else {'title':name} icon, fanart = self._get_art_from_metadata(metadata) query = self._construct_query(url, 'qualityPlayer', metadata) if self.__is_double_episode(name): metadata['title'] = '%d & %d - %s' % ((idx+offset+1), (idx+offset+2), metadata['title']) offset += 1 else: metadata['title'] = '%d - %s' % ((idx+offset+1), metadata['title']) contextmenu_items = self._get_contextmenu_items(url, name) helper.add_video_item(query, metadata, img=icon, fanart=fanart, contextmenu_items=contextmenu_items) if len(specials) > 0: icon, fanart = self._get_art_for_season0() for (name, url) in specials: metadata = {'title':name} query = self._construct_query(url, 'qualityPlayer', metadata) helper.add_video_item(query, metadata, img=icon, fanart=fanart) self._add_related_links() self._add_bookmark_link() helper.set_content('episodes') helper.add_sort_methods(['title']) helper.end_of_directory() helper.end('EpisodeList.add_items') return