def submit_jobs(args): calname = utils.get_caller_name() future[calname] = [] while True: try: arg = next(args) submit_job_calname(calname, *arg) except StopIteration: break return wait_calname(calname)
def episode_query(self, simple_info, auto_query=True, single_query=False, caller_name=None): if caller_name is None: caller_name = get_caller_name() results = [] for scraper in self._torrent_scrapers: result = scraper.episode_query(simple_info, auto_query, single_query, caller_name) for item in result: results.append(item) return results
def movie_query(self, title, year, caller_name=None, single_query=False): if caller_name is None: caller_name = get_caller_name() results = [] for scraper in self._torrent_scrapers: result = scraper.movie_query(title, year, caller_name) for item in result: results.append(item) return results
def movie_query(self, title, year, single_query=False, caller_name=None): title = strip_accents(title) if self.caller_name is None: if caller_name is None: caller_name = get_caller_name() self.caller_name = caller_name self.title = source_utils.clean_title(title) self.year = year full_query = '%s %s' % (title, year) use_cache_only = self._get_cache(full_query) if use_cache_only: return self._get_movie_results() skip_set_cache = False try: self._url = self._find_url() if self._url is None: self._set_cache(full_query) return self._get_movie_results() movie = lambda query: self._query_thread(query, [self.filter_movie_title]) queries = [movie(self.title + ' ' + self.year)] try: alternative_title = replace_text_with_int(self.title) if self.title != alternative_title: queries.append(movie(alternative_title + ' ' + self.year)) except: pass wait_threads(queries) if len( self._temp_results ) == 0 and not single_query and not self._request.self.has_timeout_exc: self._set_cache(full_query) skip_set_cache = True wait_threads([movie(self.title)]) if not skip_set_cache: self._set_cache(full_query) return self._get_movie_results() except: if not skip_set_cache: self._set_cache(full_query) return self._get_movie_results()
def get_scraper(soup_filter, title_filter, info, search_request, request=None, use_thread_for_info=False, custom_filter=None, caller_name=None): if caller_name is None: caller_name = get_caller_name() if caller_name not in trackers and caller_name not in hosters: return NoResultsScraper() if request is None: request = Request() if caller_name in trackers: scraper_urls = trackers[caller_name] elif caller_name in hosters: scraper_urls = hosters[caller_name] urls = list( map(lambda t: UrlParts(base=t['base'], search=t['search']), scraper_urls)) if DEV_MODE_ALL: scrapers = [] for url in urls: scraper = TorrentScraper(None, request, search_request, soup_filter, title_filter, info, use_thread_for_info, custom_filter, url=url) scrapers.append(scraper) return MultiUrlScraper(scrapers) return TorrentScraper(urls, request, search_request, soup_filter, title_filter, info, use_thread_for_info, custom_filter, caller_name=caller_name)
def get_scraper(soup_filter, title_filter, info, request=None, search_request=None, use_thread_for_info=False, custom_filter=None, caller_name=None): if caller_name is None: caller_name = get_caller_name() if caller_name not in trackers: return NoResultsScraper() if request is None: request = Request() def search(url, query): if '=%s' in url.search: query = quote_plus(query) else: query = query.decode('utf-8') return request.get(url.base + url.search % query) if search_request is None: search_request = search tracker_urls = trackers[caller_name] urls = list( map(lambda t: UrlParts(base=t['base'], search=t['search']), tracker_urls)) if DEV_MODE_ALL: scrapers = [] for url in urls: scraper = TorrentScraper(url, search_request, soup_filter, title_filter, info, use_thread_for_info, custom_filter) scrapers.append(scraper) return MultiUrlScraper(scrapers) url = request.find_url(urls) if url is None: return NoResultsScraper() return TorrentScraper(url, search_request, soup_filter, title_filter, info, use_thread_for_info, custom_filter)
def split_submit_job(dict1, dict2, func, *args): calname = utils.get_caller_name() curCPU = 0 subDict = [utils.DictCaseInsensitive() for x in range(pool.ncpus)] for key in dict1: subDict[curCPU][key] = dict1[key] curCPU += 1 if (curCPU == pool.ncpus): curCPU = 0 future[calname] = [] for i in range(pool.ncpus): submit_job_calname(calname, func, subDict[i], *args) fs = wait_calname(calname) for f in fs: subDict = f.result() for key in subDict: dict2[key] = subDict[key]
def movie_query(self, title, year, caller_name=None): if caller_name is None: caller_name = get_caller_name() self.title = title self.year = year movie = lambda query: self._query_thread(query, [self.filterMovieTitle]) try: wait_threads([movie(title + ' ' + year)]) except ConnectTimeoutError: return [] except ReadTimeout: return [] if len(self._torrent_list) == 0: wait_threads([movie(title)]) self._movie_notice(caller_name) return self._torrent_list
def episode_query(self, simple_info, auto_query=True, single_query=False, caller_name=None, exact_pack=False): simple_info['show_title'] = strip_accents(simple_info['show_title']) if self.caller_name is None: if caller_name is None: caller_name = get_caller_name() self.caller_name = caller_name simple_info['show_aliases'] = list(set(simple_info['show_aliases'])) if '.' in simple_info['show_title']: no_dot_show_title = simple_info['show_title'].replace('.', '') simple_info['show_aliases'].append(no_dot_show_title) for alias in simple_info['show_aliases']: if '.' in alias: simple_info['show_aliases'].append(alias.replace('.', '')) self.simple_info = simple_info self.year = simple_info['year'] self.country = simple_info['country'] self.show_title = source_utils.clean_title(simple_info['show_title']) if self.year in self.show_title: self.show_title_fallback = re.sub( r'\s+', ' ', self.show_title.replace(self.year, '')) else: self.show_title_fallback = None self.episode_title = source_utils.clean_title( simple_info['episode_title']) self.season_x = simple_info['season_number'] self.episode_x = simple_info['episode_number'] self.season_xx = self.season_x.zfill(2) self.episode_xx = self.episode_x.zfill(2) #full_query = '%s %s %s %s %s' % (self.show_title, self.year, self.season_xx, self.episode_xx, self.episode_title) # use_cache_only = self._get_cache(full_query) # if use_cache_only: # return self._get_episode_results() try: self._url = self._find_url() if self._url is None: #self._set_cache(full_query) return self._get_episode_results() if auto_query is False: wait_threads([self._episode('')]) #self._set_cache(full_query) return self._get_episode_results() def query_results(): if DEV_MODE: if self.caller_name != 'eztv': wait_threads([ self._season(self.show_title + ' S%s' % self.season_xx) ]) else: wait_threads([ self._episode(self.show_title + ' S%sE%s' % (self.season_xx, self.episode_xx)) ]) return # specials if self.season_x == '0': wait_threads([ self._episode_special(self.show_title + ' %s' % self.episode_title) ]) #self._set_cache(full_query) return queries = [ self._episode(self.show_title + ' S%sE%s' % (self.season_xx, self.episode_xx)) ] if single_query: #self._set_cache(full_query) wait_threads(queries) return if exact_pack: queries = queries + [ self._season_and_pack(self.show_title + '.S%s.' % self.season_xx) ] else: queries = queries + [ self._season(self.show_title + ' Season ' + self.season_x), self._season(self.show_title + ' S%s' % self.season_xx), self._pack(self.show_title + ' Seasons'), self._season_and_pack(self.show_title + ' Complete') ] if simple_info.get('isanime', False) and simple_info.get( 'absolute_number', None) is not None: queries.insert( 0, self._episode(self.show_title + ' %s' % simple_info['absolute_number'])) if self._use_thread_for_info: wait_threads([queries[0]]) else: wait_threads(queries) query_results() if len(self._temp_results ) == 0 and self.show_title_fallback is not None: self.show_title = self.show_title_fallback self.simple_info['show_title'] = self.show_title_fallback query_results() #self._set_cache(full_query) return self._get_episode_results() except: #self._set_cache(full_query) return self._get_episode_results()
def submit_job(func, *args): calname = utils.get_caller_name() submit_job_from(calname, func, *args)
def wait(): calname = utils.get_caller_name() return wait_calname(calname)
def episode_query(self, simple_info, auto_query=True, single_query=False, caller_name=None): if caller_name is None: caller_name = get_caller_name() if '.' in simple_info['show_title']: no_dot_show_title = simple_info['show_title'].replace('.', '') simple_info['show_aliases'].append( source_utils.cleanTitle(no_dot_show_title)) simple_info['show_aliases'] = list(set( simple_info['show_aliases'])) simple_info['show_title'] = no_dot_show_title self.simple_info = simple_info self.year = simple_info['year'] self.country = simple_info['country'] self.show_title = source_utils.cleanTitle(simple_info['show_title']) self.episode_title = source_utils.cleanTitle( simple_info['episode_title']) self.season_x = simple_info['season_number'] self.episode_x = simple_info['episode_number'] self.season_xx = self.season_x.zfill(2) self.episode_xx = self.episode_x.zfill(2) if auto_query is False: wait_threads([self._episode('')]) self._episode_notice(caller_name) return self._torrent_list # specials if self.season_x == '0': wait_threads([ self._episode_special(self.show_title + ' %s' % self.episode_title) ]) self._episode_notice(caller_name) return self._torrent_list try: wait_threads([ self._episode(self.show_title + ' S%sE%s' % (self.season_xx, self.episode_xx)) ]) except ConnectTimeoutError: return [] except ReadTimeout: return [] if single_query or DEV_MODE: self._episode_notice(caller_name) return self._torrent_list queries = [ self._season(self.show_title + ' Season ' + self.season_x), self._season(self.show_title + ' S%s' % self.season_xx), self._pack(self.show_title + ' Seasons'), self._season_and_pack(self.show_title + ' Complete') ] if self._use_thread_for_info: wait_threads([queries[0]]) else: wait_threads(queries) self._episode_notice(caller_name) return self._torrent_list