def get_data(self, url): result = None html = self.get_url(url) if self.should_skip(): return result with BS4Parser(html) as soup: re_showid = re.compile(r'(?i)hs_showid\s*=\s*(\d+)') try: hs_id = re_showid.findall( next( filter_iter( lambda s: re_showid.search(s), map_iter(lambda t: t.get_text(), soup.find_all('script')))))[0] except (BaseException, Exception): return result html = self.get_url(self.urls['get_data'] % hs_id) if self.should_skip(): return result with BS4Parser(html) as soup: try: result = sorted( map_iter( lambda t: t.get('href'), soup.find(id=re.findall(r'.*#(\d+-\d+\w)$', url)[0]). find_all( 'a', href=re.compile('(?i)(torrent$|^magnet:)'))))[0] except (BaseException, Exception): pass return result
def _xem_get_ids(infosrc_name, xem_origin): """ :param infosrc_name: :type infosrc_name: AnyStr :param xem_origin: :type xem_origin: AnyStr :return: :rtype: List """ xem_ids = [] url = 'http://thexem.de/map/havemap?origin=%s' % xem_origin task = 'Fetching show ids with%s xem scene mapping%s for origin' logger.log(u'%s %s' % (task % ('', 's'), infosrc_name)) parsed_json = helpers.get_url(url, parse_json=True, timeout=90) if not isinstance(parsed_json, dict) or not parsed_json: logger.log( u'Failed %s %s, Unable to get URL: %s' % (task.lower() % ('', 's'), infosrc_name, url), logger.ERROR) else: if 'success' == parsed_json.get('result', '') and 'data' in parsed_json: xem_ids = list( set( filter_iter( lambda prodid: 0 < prodid, map_iter(lambda pid: helpers.try_int(pid), parsed_json['data'])))) if 0 == len(xem_ids): logger.log( u'Failed %s %s, no data items parsed from URL: %s' % (task.lower() % ('', 's'), infosrc_name, url), logger.WARNING) logger.log( u'Finished %s %s' % (task.lower() % (' %s' % len(xem_ids), helpers.maybe_plural(xem_ids)), infosrc_name)) return xem_ids
def _show_data(self, show_obj): # type: (sickbeard.tv.TVShow) -> Optional[Union[bool, etree.Element]] """ Creates an elementTree XML structure for a Kodi-style tvshow.nfo and returns the resulting data object. show_obj: a TVShow instance to create the NFO for """ show_ID = show_obj.prodid show_lang = show_obj.lang tvinfo_config = sickbeard.TVInfoAPI(show_obj.tvid).api_params.copy() tvinfo_config['actors'] = True if show_lang and not 'en' == show_lang: tvinfo_config['language'] = show_lang if 0 != show_obj.dvdorder: tvinfo_config['dvdorder'] = True t = sickbeard.TVInfoAPI(show_obj.tvid).setup(**tvinfo_config) tv_node = etree.Element('tvshow') try: show_info = t[int(show_ID)] except BaseTVinfoShownotfound as e: logger.log( 'Unable to find show with id %s on %s, skipping it' % (show_ID, sickbeard.TVInfoAPI(show_obj.tvid).name), logger.ERROR) raise e except BaseTVinfoError as e: logger.log( '%s is down, can\'t use its data to add this show' % sickbeard.TVInfoAPI(show_obj.tvid).name, logger.ERROR) raise e if not self._valid_show(show_info, show_obj): return # check for title and id if None is getattr(show_info, 'seriesname', None) or None is getattr( show_info, 'id', None): logger.log( 'Incomplete info for show with id %s on %s, skipping it' % (show_ID, sickbeard.TVInfoAPI(show_obj.tvid).name), logger.ERROR) return False title = etree.SubElement(tv_node, 'title') if None is not getattr(show_info, 'seriesname', None): title.text = '%s' % show_info['seriesname'] # year = etree.SubElement(tv_node, 'year') premiered = etree.SubElement(tv_node, 'premiered') premiered_text = self.get_show_year(show_obj, show_info, year_only=False) if premiered_text: premiered.text = '%s' % premiered_text has_id = False tvdb_id = None for tvid, slug in map_iter( lambda _tvid: (_tvid, sickbeard.TVInfoAPI(_tvid).config.get('kodi_slug')), list(sickbeard.TVInfoAPI().all_sources)): mid = slug and show_obj.ids[tvid].get('id') if mid: has_id = True kwargs = dict(type=slug) if TVINFO_TVDB == tvid: kwargs.update(dict(default='true')) tvdb_id = str(mid) uniqueid = etree.SubElement(tv_node, 'uniqueid', **kwargs) uniqueid.text = '%s%s' % (('', 'tt')[TVINFO_IMDB == tvid], mid) if not has_id: logger.log( 'Incomplete info for show with id %s on %s, skipping it' % (show_ID, sickbeard.TVInfoAPI(show_obj.tvid).name), logger.ERROR) return False ratings = etree.SubElement(tv_node, 'ratings') if None is not getattr(show_info, 'rating', None): # todo: name dynamic depending on source rating = etree.SubElement(ratings, 'rating', name='thetvdb', max='10') rating_value = etree.SubElement(rating, 'value') rating_value.text = '%s' % show_info['rating'] if None is not getattr(show_info, 'siteratingcount', None): ratings_votes = etree.SubElement(rating, 'votes') ratings_votes.text = '%s' % show_info['siteratingcount'] plot = etree.SubElement(tv_node, 'plot') if None is not getattr(show_info, 'overview', None): plot.text = '%s' % show_info['overview'] episodeguide = etree.SubElement(tv_node, 'episodeguide') episodeguideurl = etree.SubElement(episodeguide, 'url', post='yes', cache='auth.json') if tvdb_id: episodeguideurl.text = sickbeard.TVInfoAPI( TVINFO_TVDB).config['epg_url'].replace('{MID}', tvdb_id) mpaa = etree.SubElement(tv_node, 'mpaa') if None is not getattr(show_info, 'contentrating', None): mpaa.text = '%s' % show_info['contentrating'] genre = etree.SubElement(tv_node, 'genre') if None is not getattr(show_info, 'genre', None): if isinstance(show_info['genre'], string_types): genre.text = ' / '.join([ x.strip() for x in show_info['genre'].split('|') if x.strip() ]) studio = etree.SubElement(tv_node, 'studio') if None is not getattr(show_info, 'network', None): studio.text = '%s' % show_info['network'] self.add_actor_element(show_info, etree, tv_node) # Make it purdy sg_helpers.indent_xml(tv_node) # output valid xml # data = etree.ElementTree(tv_node) # output non valid xml that Kodi accepts data = decode_str(etree.tostring(tv_node)) parts = data.split('episodeguide') if 3 == len(parts): data = 'episodeguide'.join( [parts[0], parts[1].replace('&quot;', '"'), parts[2]]) return data
def add_cache_entry(self, name, # type: AnyStr url, # type: AnyStr parse_result=None, # type: ParseResult tvid_prodid=None # type: Union[AnyStr, None] ): # type: (...) -> Union[List[AnyStr, List[Any]], None] """ :param name: name :param url: url :param parse_result: parse result :param tvid_prodid: tvid_prodid :return: """ # check if we passed in a parsed result or should we try and create one if not parse_result: # create show_obj from tvid_prodid if available show_obj = None if tvid_prodid: try: show_obj = helpers.find_show_by_id(tvid_prodid, no_mapped_ids=False) except MultipleShowObjectsException: return try: np = NameParser(show_obj=show_obj, convert=True, indexer_lookup=False) parse_result = np.parse(name) except InvalidNameException: logger.log('Unable to parse the filename %s into a valid episode' % name, logger.DEBUG) return except InvalidShowException: return if not parse_result or not parse_result.series_name: return # if we made it this far then lets add the parsed result to cache for usage later on season_number = parse_result.season_number if parse_result.season_number else 1 episode_numbers = parse_result.episode_numbers if season_number and episode_numbers: # store episodes as a separated string episode_text = '|%s|' % '|'.join(map_iter(str, episode_numbers)) # get the current timestamp cur_timestamp = int(time.mktime(datetime.datetime.today().timetuple())) # get quality of release quality = parse_result.quality if PY2 and not isinstance(name, text_type): name = text_type(name, 'utf-8', 'replace') # get release group release_group = parse_result.release_group # get version version = parse_result.version logger.log('Add to cache: [%s]' % name, logger.DEBUG) return [ 'INSERT OR IGNORE INTO provider_cache' ' (provider, name, season, episodes,' ' indexerid,' ' url, time, quality, release_group, version,' ' indexer)' ' VALUES (?,?,?,?,?,?,?,?,?,?,?)', [self.providerID, name, season_number, episode_text, parse_result.show_obj.prodid, url, cur_timestamp, quality, release_group, version, parse_result.show_obj.tvid]]
def search_backlog( self, which_shows=None, # type: Optional[List[TVShow]] force_type=NORMAL_BACKLOG, # type: int force=False # type: bool ): """ start backlog for given list of shows or start next scheduled backlog :param which_shows: optional list of shows to backlog search :param force_type: type of backlog :param force: force backlog :return: nothing :rtype: None """ if self.amActive and not which_shows: logger.log(u'Backlog is still running, not starting it again', logger.DEBUG) return if which_shows: show_list = which_shows standard_backlog = False else: show_list = sickbeard.showList standard_backlog = True now = datetime.datetime.now() any_torrent_enabled = continued_backlog = False if not force and standard_backlog and ( datetime.datetime.now() - datetime.datetime.fromtimestamp( self._get_last_runtime())) < datetime.timedelta(hours=23): any_torrent_enabled = any( map_iter( lambda x: x.is_active() and x.enable_backlog and x. providerType == GenericProvider.TORRENT, sickbeard.providers.sortedProviderList())) if not any_torrent_enabled: logger.log( 'Last scheduled backlog run was within the last day, skipping this run.', logger.DEBUG) return if not self.providers_active(any_torrent_enabled, standard_backlog): logger.log( 'No NZB/Torrent provider has active searching enabled in config/Media Providers,' ' cannot start backlog.', logger.WARNING) return self._get_last_backlog() self.amActive = True self.amPaused = False cur_date = datetime.date.today().toordinal() from_date = datetime.date.fromordinal(1) limited_from_date = datetime.date.today() - datetime.timedelta( days=sickbeard.BACKLOG_DAYS) limited_backlog = False if standard_backlog and (any_torrent_enabled or sickbeard.BACKLOG_NOFULL): logger.log( u'Running limited backlog for episodes missed during the last %s day(s)' % str(sickbeard.BACKLOG_DAYS)) from_date = limited_from_date limited_backlog = True runparts = [] if standard_backlog and not any_torrent_enabled and sickbeard.BACKLOG_NOFULL: logger.log( u'Skipping automated full backlog search because it is disabled in search settings' ) my_db = db.DBConnection('cache.db') if standard_backlog and not any_torrent_enabled and not sickbeard.BACKLOG_NOFULL: sql_result = my_db.select( 'SELECT * FROM backlogparts WHERE part in (SELECT MIN(part) FROM backlogparts)' ) if sql_result: sl = [] part_nr = int(sql_result[0]['part']) for s in sql_result: show_obj = find_show_by_id( {int(s['indexer']): int(s['indexerid'])}) if show_obj: sl.append(show_obj) runparts.append(show_obj.tvid_prodid) show_list = sl continued_backlog = True my_db.action('DELETE FROM backlogparts WHERE part = ?', [part_nr]) forced = standard_backlog and force_type != NORMAL_BACKLOG wanted_list = [] for cur_show_obj in show_list: if not cur_show_obj.paused: w = wanted_episodes( cur_show_obj, from_date, make_dict=True, unaired=(sickbeard.SEARCH_UNAIRED and not sickbeard.UNAIRED_RECENT_SEARCH_ONLY)) if w: wanted_list.append(w) parts = [] if standard_backlog and not any_torrent_enabled and not continued_backlog and not sickbeard.BACKLOG_NOFULL: fullbacklogparts = sum([len(w) for w in wanted_list if w ]) // sickbeard.BACKLOG_FREQUENCY h_part = [] counter = 0 for w in wanted_list: # type: Dict f = False for season, segment in iteritems( w): # type: int, List[TVEpisode] counter += 1 if not f: h_part.append(segment[0].show_obj.tvid_prodid) f = True if counter > fullbacklogparts: counter = 0 parts.append(h_part) h_part = [] if h_part: parts.append(h_part) if not runparts and parts: runparts = parts[0] wanted_list = filter_list( lambda wi: wi and next(itervalues(wi))[0].show_obj.tvid_prodid in runparts, wanted_list) limited_wanted_list = [] if standard_backlog and not any_torrent_enabled and runparts: for cur_show_obj in sickbeard.showList: if not cur_show_obj.paused and cur_show_obj.tvid_prodid not in runparts: w = wanted_episodes( cur_show_obj, limited_from_date, make_dict=True, unaired=(sickbeard.SEARCH_UNAIRED and not sickbeard.UNAIRED_RECENT_SEARCH_ONLY)) if w: limited_wanted_list.append(w) self.add_backlog_item(wanted_list, standard_backlog, limited_backlog, forced, any_torrent_enabled) if standard_backlog and not any_torrent_enabled and limited_wanted_list: self.add_backlog_item(limited_wanted_list, standard_backlog, True, forced, any_torrent_enabled) if standard_backlog and not sickbeard.BACKLOG_NOFULL and not any_torrent_enabled and not continued_backlog: # noinspection SqlConstantCondition cl = ([], [['DELETE FROM backlogparts WHERE 1=1']])[any(parts)] for i, l in enumerate(parts): if 0 == i: continue cl += map_list( lambda m: [ 'INSERT INTO backlogparts (part, indexer, indexerid) VALUES (?,?,?)', [i + 1] + TVidProdid(m).list ], l) if 0 < len(cl): my_db.mass_action(cl) # don't consider this an actual backlog search if we only did recent eps # or if we only did certain shows if from_date == datetime.date.fromordinal(1) and standard_backlog: self._set_last_backlog(cur_date) self._get_last_backlog() if standard_backlog and not any_torrent_enabled: self._set_last_runtime(now) self.amActive = False self._reset_progress_indicator()