def _load_network_conversions(): # type: (...) -> None conversions_in = [] # network conversions are stored on github pages url = 'https://raw.githubusercontent.com/prinz23/sg_network_conversions/master/conversions.txt' url_data = helpers.get_url(url) if url_data in (None, ''): update_last_retry() # when no url_data, trouble connecting to github logger.debug(u'Updating network conversions failed, this can happen from time to time. URL: %s' % url) return reset_last_retry() try: for line in url_data.splitlines(): (tvdb_network, tvrage_network, tvrage_country) = line.strip().rsplit(u'::', 2) if not (tvdb_network and tvrage_network and tvrage_country): continue conversions_in.append( dict(tvdb_network=tvdb_network, tvrage_network=tvrage_network, tvrage_country=tvrage_country)) except (IOError, OSError): pass my_db = db.DBConnection('cache.db') sql_result = my_db.select('SELECT * FROM network_conversions') conversions_db = helpers.build_dict(sql_result, 'tvdb_network') # list of sql commands to update the network_conversions table cl = [] for cur_network in conversions_in: cl.append([ 'INSERT OR REPLACE INTO network_conversions (tvdb_network, tvrage_network, tvrage_country) VALUES (?,?,?)', [cur_network['tvdb_network'], cur_network['tvrage_network'], cur_network['tvrage_country']]]) try: del conversions_db[cur_network['tvdb_network']] except (BaseException, Exception): pass # remove deleted records if 0 < len(conversions_db): network_name = list_keys(conversions_db) cl.append(['DELETE FROM network_conversions WHERE tvdb_network' ' IN (%s)' % ','.join(['?'] * len(network_name)), network_name]) # change all network conversion info at once (much faster) if 0 < len(cl): my_db.mass_action(cl)
def _search_provider(self, search_params, **kwargs): results = [] if not self._authorised(): return results items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'get': 'magnet'})]) urls = [] for mode in search_params: for search_string in search_params[mode]: if 'Cache' == mode: search_url = self.urls['browse'] else: search_string = unidecode(search_string) show_name = filter_list( lambda x: x.lower() == re.sub(r'\s.*', '', search_string.lower()), list_values(self.shows)) if not show_name: continue search_url = self.urls['search'] % list_keys( self.shows)[list_values(self.shows).index( show_name[0])] if search_url in urls: continue urls += [search_url] html = self.get_url(search_url) if self.should_skip(): return results cnt = len(items[mode]) try: if not html or self._has_no_results(html): raise generic.HaltParseException with BS4Parser(html) as soup: tbl_rows = soup.select('ul.user-timeline > li') if not len(tbl_rows): raise generic.HaltParseException for tr in tbl_rows: try: anchor = tr.find('a', href=rc['get']) title = self.regulate_title(anchor) download_url = self._link(anchor['href']) except (AttributeError, TypeError, ValueError): continue if title and download_url: items[mode].append( (title, download_url, None, None)) except generic.HaltParseException: pass except (BaseException, Exception): logger.log( u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) self._log_search(mode, len(items[mode]) - cnt, search_url) results = self._sort_seeding(mode, results + items[mode]) return results
def _update_collection(self, ep_obj): """ Sends a request to trakt indicating that the given episode is part of our collection. :param ep_obj: The TVEpisode object to add to trakt """ if sickbeard.TRAKT_ACCOUNTS: # URL parameters data = dict(shows=[ dict(title=ep_obj.show_obj.name, year=ep_obj.show_obj.startyear, ids={}) ]) from sickbeard.indexers.indexer_config import TVINFO_TVDB, TVINFO_TVRAGE, TVINFO_IMDB, TVINFO_TMDB, \ TVINFO_TRAKT supported_indexer = {TVINFO_TRAKT: 'trakt', TVINFO_TVDB: 'tvdb', TVINFO_TVRAGE: 'tvrage', TVINFO_IMDB: 'imdb', TVINFO_TMDB: 'tmdb'} indexer_priorities = [TVINFO_TRAKT, TVINFO_TVDB, TVINFO_TVRAGE, TVINFO_IMDB, TVINFO_TMDB] tvid = prodid = None if ep_obj.show_obj.tvid in supported_indexer: tvid, prodid = supported_indexer[ep_obj.show_obj.tvid], ep_obj.show_obj.prodid else: for i in indexer_priorities: if 0 < ep_obj.show_obj.ids.get(i, {'id': 0}).get('id', 0): tvid, prodid = supported_indexer[i], ep_obj.show_obj.ids[i]['id'] break if None is tvid or None is prodid: self._log_warning('Missing trakt supported id, could not add to collection') return data['shows'][0]['ids'][tvid] = prodid # Add Season and Episode + Related Episodes data['shows'][0]['seasons'] = [{'number': ep_obj.season, 'episodes': []}] for cur_ep_obj in [ep_obj] + ep_obj.related_ep_obj: data['shows'][0]['seasons'][0]['episodes'].append({'number': cur_ep_obj.episode}) for tid, locations in iteritems(sickbeard.TRAKT_UPDATE_COLLECTION): if tid not in list_keys(sickbeard.TRAKT_ACCOUNTS): continue for loc in locations: if not ep_obj.location.startswith('%s%s' % (loc.rstrip(os.path.sep), os.path.sep)): continue warn, msg = False, '' try: resp = TraktAPI().trakt_request('sync/collection', data, send_oauth=tid) if 'added' in resp and 'episodes' in resp['added'] \ and 0 < sickbeard.helpers.try_int(resp['added']['episodes']): msg = 'Added episode to' elif 'updated' in resp and 'episodes' in resp['updated'] \ and 0 < sickbeard.helpers.try_int(resp['updated']['episodes']): msg = 'Updated episode in' elif 'existing' in resp and 'episodes' in resp['existing'] \ and 0 < sickbeard.helpers.try_int(resp['existing']['episodes']): msg = 'Episode is already in' elif 'not_found' in resp and 'episodes' in resp['not_found'] \ and 0 < sickbeard.helpers.try_int(resp['not_found']['episodes']): msg = 'Episode not found on Trakt, not adding to' else: warn, msg = True, 'Could not add episode to' except (exceptions.TraktAuthException, exceptions.TraktException): warn, msg = True, 'Error adding episode to' msg = 'Trakt: %s your %s collection' % (msg, sickbeard.TRAKT_ACCOUNTS[tid].name) if not warn: self._log(msg) else: self._log_warning(msg)
def is_enabled_library(cls): if sickbeard.TRAKT_ACCOUNTS: for tid, locations in iteritems(sickbeard.TRAKT_UPDATE_COLLECTION): if tid in list_keys(sickbeard.TRAKT_ACCOUNTS): return True return False
def _parse_string(self, name): # type: (AnyStr) -> Optional[ParseResult] """ :param name: name to parse :type name: AnyStr :return: :rtype: ParseResult or None """ if not name: return matches = [] initial_best_result = None for reg_ex in self.compiled_regexes: for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[reg_ex]: new_name = helpers.remove_non_release_groups(name, 'anime' in cur_regex_name) match = cur_regex.match(new_name) if not match: continue if 'garbage_name' == cur_regex_name: return result = ParseResult(new_name) result.which_regex = [cur_regex_name] result.score = 0 - cur_regex_num named_groups = list_keys(match.groupdict()) if 'series_name' in named_groups: result.series_name = match.group('series_name') if result.series_name: result.series_name = self.clean_series_name(result.series_name) name_parts = re.match(r'(?i)(.*)[ -]((?:part|pt)[ -]?\w+)$', result.series_name) try: result.series_name = name_parts.group(1) result.extra_info = name_parts.group(2) except (AttributeError, IndexError): pass result.score += 1 if 'anime' in cur_regex_name and not (self.show_obj and self.show_obj.is_anime): p_show_obj = helpers.get_show(result.series_name, True) if p_show_obj and self.show_obj and not (p_show_obj.tvid == self.show_obj.tvid and p_show_obj.prodid == self.show_obj.prodid): p_show_obj = None if not p_show_obj and self.show_obj: p_show_obj = self.show_obj if p_show_obj and not p_show_obj.is_anime: continue if 'series_num' in named_groups and match.group('series_num'): result.score += 1 if 'season_num' in named_groups: tmp_season = int(match.group('season_num')) if 'bare' == cur_regex_name and tmp_season in (19, 20): continue result.season_number = tmp_season result.score += 1 def _process_epnum(captures, capture_names, grp_name, extra_grp_name, ep_numbers, parse_result): ep_num = self._convert_number(captures.group(grp_name)) extra_grp_name = 'extra_%s' % extra_grp_name ep_numbers = '%sepisode_numbers' % ep_numbers if extra_grp_name in capture_names and captures.group(extra_grp_name): try: if hasattr(self.show_obj, 'get_episode'): _ep_obj = self.show_obj.get_episode(parse_result.season_number, ep_num) else: tmp_show_obj = helpers.get_show(parse_result.series_name, True) if tmp_show_obj and hasattr(tmp_show_obj, 'get_episode'): _ep_obj = tmp_show_obj.get_episode(parse_result.season_number, ep_num) else: _ep_obj = None except (BaseException, Exception): _ep_obj = None en = _ep_obj and _ep_obj.name and re.match(r'^\W*(\d+)', _ep_obj.name) or None es = en and en.group(1) or None extra_ep_num = self._convert_number(captures.group(extra_grp_name)) parse_result.__dict__[ep_numbers] = list_range(ep_num, extra_ep_num + 1) if not ( _ep_obj and es and es != captures.group(extra_grp_name)) and ( 0 < extra_ep_num - ep_num < 10) else [ep_num] parse_result.score += 1 else: parse_result.__dict__[ep_numbers] = [ep_num] parse_result.score += 1 return parse_result if 'ep_num' in named_groups: result = _process_epnum(match, named_groups, 'ep_num', 'ep_num', '', result) if 'ep_ab_num' in named_groups: result = _process_epnum(match, named_groups, 'ep_ab_num', 'ab_ep_num', 'ab_', result) if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups: year = int(match.group('air_year')) try: month = int(match.group('air_month')) except ValueError: try: month = time.strptime(match.group('air_month')[0:3], '%b').tm_mon except ValueError as e: raise InvalidNameException(ex(e)) day = int(match.group('air_day')) # make an attempt to detect YYYY-DD-MM formats if 12 < month: tmp_month = month month = day day = tmp_month try: result.air_date = datetime.date( year + ((1900, 2000)[0 < year < 28], 0)[1900 < year], month, day) except ValueError as e: raise InvalidNameException(ex(e)) if 'extra_info' in named_groups: tmp_extra_info = match.group('extra_info') # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season if tmp_extra_info and 'season_only' == cur_regex_name and re.search( r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I): continue if tmp_extra_info: if result.extra_info: tmp_extra_info = '%s %s' % (result.extra_info, tmp_extra_info) result.extra_info = tmp_extra_info result.score += 1 if 'release_group' in named_groups: result.release_group = match.group('release_group') result.score += 1 if 'version' in named_groups: # assigns version to anime file if detected using anime regex. Non-anime regex receives -1 version = match.group('version') if version: result.version = helpers.try_int(version) else: result.version = 1 else: result.version = -1 if None is result.season_number and result.episode_numbers and not result.air_date and \ cur_regex_name in ['no_season', 'no_season_general', 'no_season_multi_ep'] and \ re.search(r'(?i)\bpart.?\d{1,2}\b', result.original_name): result.season_number = 1 matches.append(result) if len(matches): # pick best match with highest score based on placement best_result = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score) show_obj = None if not self.naming_pattern: # try and create a show object for this result show_obj = helpers.get_show(best_result.series_name, self.try_scene_exceptions) # confirm passed in show object tvid_prodid matches result show object tvid_prodid if show_obj and not self.testing: if self.show_obj and show_obj.tvid_prodid != self.show_obj.tvid_prodid: show_obj = None elif not show_obj and self.show_obj: show_obj = self.show_obj best_result.show_obj = show_obj if not best_result.series_name and getattr(show_obj, 'name', None): best_result.series_name = show_obj.name if show_obj and show_obj.is_anime and 1 < len(self.compiled_regexes[1]) and 1 != reg_ex: continue # if this is a naming pattern test then return best result if not show_obj or self.naming_pattern: if not show_obj and not self.naming_pattern and not self.testing: # ensure anime regex test but use initial best if show still not found if 0 == reg_ex: initial_best_result = best_result matches = [] # clear non-anime match scores continue return initial_best_result return best_result # get quality new_name = helpers.remove_non_release_groups(name, show_obj.is_anime) best_result.quality = common.Quality.nameQuality(new_name, show_obj.is_anime) new_episode_numbers = [] new_season_numbers = [] new_absolute_numbers = [] # if we have an air-by-date show then get the real season/episode numbers if best_result.is_air_by_date: season_number, episode_numbers = None, [] airdate = best_result.air_date.toordinal() my_db = db.DBConnection() sql_result = my_db.select( 'SELECT season, episode, name' ' FROM tv_episodes' ' WHERE indexer = ? AND showid = ?' ' AND airdate = ?', [show_obj.tvid, show_obj.prodid, airdate]) if sql_result: season_number = int(sql_result[0]['season']) episode_numbers = [int(sql_result[0]['episode'])] if 1 < len(sql_result): # multi-eps broadcast on this day nums = {'1': 'one', '2': 'two', '3': 'three', '4': 'four', '5': 'five', '6': 'six', '7': 'seven', '8': 'eight', '9': 'nine', '10': 'ten'} patt = '(?i)(?:e(?:p(?:isode)?)?|part|pt)[. _-]?(%s)' try: src_num = str(re.findall(patt % r'\w+', best_result.extra_info)[0]) alt_num = nums.get(src_num) or list(iterkeys(nums))[ list(itervalues(nums)).index(src_num)] re_partnum = re.compile(patt % ('%s|%s' % (src_num, alt_num))) for ep_details in sql_result: if re_partnum.search(ep_details['name']): season_number = int(ep_details['season']) episode_numbers = [int(ep_details['episode'])] break except (BaseException, Exception): pass if self.indexer_lookup and not season_number or not len(episode_numbers): try: tvinfo_config = sickbeard.TVInfoAPI(show_obj.tvid).api_params.copy() if show_obj.lang: tvinfo_config['language'] = show_obj.lang t = sickbeard.TVInfoAPI(show_obj.tvid).setup(**tvinfo_config) ep_obj = t[show_obj.prodid].aired_on(best_result.air_date)[0] season_number = int(ep_obj['seasonnumber']) episode_numbers = [int(ep_obj['episodenumber'])] except BaseTVinfoEpisodenotfound as e: logger.log(u'Unable to find episode with date ' + str(best_result.air_date) + ' for show ' + show_obj.name + ', skipping', logger.WARNING) episode_numbers = [] except BaseTVinfoError as e: logger.log(u'Unable to contact ' + sickbeard.TVInfoAPI(show_obj.tvid).name + ': ' + ex(e), logger.WARNING) episode_numbers = [] for epNo in episode_numbers: s = season_number e = epNo if self.convert and show_obj.is_scene: (s, e) = scene_numbering.get_indexer_numbering( show_obj.tvid, show_obj.prodid, season_number, epNo) new_episode_numbers.append(e) new_season_numbers.append(s) elif show_obj.is_anime and len(best_result.ab_episode_numbers) and not self.testing: scene_season = scene_exceptions.get_scene_exception_by_name(best_result.series_name)[2] for epAbsNo in best_result.ab_episode_numbers: a = epAbsNo if self.convert and show_obj.is_scene: a = scene_numbering.get_indexer_absolute_numbering( show_obj.tvid, show_obj.prodid, epAbsNo, True, scene_season) (s, e) = helpers.get_all_episodes_from_absolute_number(show_obj, [a]) new_absolute_numbers.append(a) new_episode_numbers.extend(e) new_season_numbers.append(s) elif best_result.season_number and len(best_result.episode_numbers) and not self.testing: for epNo in best_result.episode_numbers: s = best_result.season_number e = epNo if self.convert and show_obj.is_scene: (s, e) = scene_numbering.get_indexer_numbering( show_obj.tvid, show_obj.prodid, best_result.season_number, epNo) if show_obj.is_anime: a = helpers.get_absolute_number_from_season_and_episode(show_obj, s, e) if a: new_absolute_numbers.append(a) new_episode_numbers.append(e) new_season_numbers.append(s) # need to do a quick sanity check here. It's possible that we now have episodes # from more than one season (by tvdb numbering), and this is just too much, so flag it. new_season_numbers = list(set(new_season_numbers)) # remove duplicates if 1 < len(new_season_numbers): raise InvalidNameException('Scene numbering results episodes from ' 'seasons %s, (i.e. more than one) and ' 'SickGear does not support this. ' 'Sorry.' % (str(new_season_numbers))) # I guess it's possible that we'd have duplicate episodes too, so lets # eliminate them new_episode_numbers = list(set(new_episode_numbers)) new_episode_numbers.sort() # maybe even duplicate absolute numbers so why not do them as well new_absolute_numbers = list(set(new_absolute_numbers)) new_absolute_numbers.sort() if len(new_absolute_numbers): best_result.ab_episode_numbers = new_absolute_numbers if len(new_season_numbers) and len(new_episode_numbers): best_result.episode_numbers = new_episode_numbers best_result.season_number = new_season_numbers[0] if self.convert and show_obj.is_scene: logger.log(u'Converted parsed result %s into %s' % (best_result.original_name, decode_str(str(best_result), errors='xmlcharrefreplace')), logger.DEBUG) helpers.cpu_sleep() return best_result