def findPropers(self, search_date=None): results = [] search_terms = ['%.proper.%', '%.repack.%'] for term in search_terms: for item in self._doSearch({'release': term}, age=4 * 24 * 60 * 60): if item['Time']: try: result_date = datetime.fromtimestamp( float(item['Time'])) except TypeError: result_date = None if result_date: if not search_date or result_date > search_date: title, url = self._get_title_and_url(item) results.append( classes.Proper(title, url, result_date, self.show)) return results
def findPropers(self, date=None): results = [] for curString in (".PROPER.", ".REPACK."): for curResult in self._doSearch(curString): match = re.search( '(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', curResult.findtext('pubDate')) if not match: continue resultDate = datetime.datetime.strptime( match.group(1), "%a, %d %b %Y %H:%M:%S") if date == None or resultDate > date: results.append( classes.Proper(curResult.findtext('title'), curResult.findtext('link'), resultDate)) return results
def findPropers(self, date=None): results = [] for item in self._doSearch("v2|v3|v4|v5"): (title, url) = self._get_title_and_url(item) if item.has_key('published_parsed') and item['published_parsed']: result_date = item.published_parsed if result_date: result_date = datetime.datetime(*result_date[0:6]) else: logger.log(u"Unable to figure out the date for entry " + title + ", skipping it") continue if not date or result_date > date: search_result = classes.Proper(title, url, result_date, self.show) results.append(search_result) return results
def findPropers(self, search_date=None): results = [] search_terms = [' proper ', ' repack '] for term in search_terms: for item in self._doSearch( self._make_post_data_JSON(search_term=term)): if item['utadded']: try: result_date = datetime.datetime.fromtimestamp( int(item['utadded'])) except: result_date = None if result_date: if not search_date or result_date > search_date: title, url = self._get_title_and_url(item) results.append( classes.Proper(title, url, result_date, self.show)) return results
def find_propers(self, **kwargs): search_terms = ['.PROPER.', '.REPACK.'] results = [] for term in search_terms: for item in self._search_provider(term, search_mode='Propers', retention=4): if 'usenetage' in item: title, url = self._title_and_url(item) try: result_date = datetime.fromtimestamp( int(item['usenetage'])) except (StandardError, Exception): result_date = None if result_date: results.append( classes.Proper(title, url, result_date, self.show)) return results
def findPropers(self, search_date=datetime.datetime.today()): results = [] myDB = db.DBConnection() sqlResults = myDB.select( 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))') if not sqlResults: return [] for sqlshow in sqlResults: self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"])) if self.show: curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) searchString = self._get_episode_search_strings( curEp, add_string='PROPER|REPACK') for item in self._doSearch(searchString[0]): title, url = self._get_title_and_url(item) if re.search('(PROPER|REPACK)', title, re.I): results.append( classes.Proper(title, url, datetime.datetime.today(), self.show)) return results
def find_propers(self, search_date=datetime.datetime.today()): """ Searches providers for PROPER or REPACK releases Returns a list of objects of type classes.Proper """ results = [] myDB = db.DBConnection() sqlResults = myDB.select( 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))') if not sqlResults: return results for sqlshow in sqlResults: self.show = Show.find(sickbeard.showList, int(sqlshow["showid"])) if self.show: curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) searchStrings = self._get_episode_search_strings( curEp, add_string='PROPER|REPACK') for searchString in searchStrings: for item in self.search(searchString): title, url = self._get_title_and_url(item) if re.match(r'.*(REPACK|PROPER).*', title, re.I): results.append( classes.Proper(title, url, datetime.datetime.today(), self.show)) return results
def findPropers(self, date=None): results = [] for curResult in self._doSearch("(PROPER,REPACK)"): (title, url) = self._get_title_and_url(curResult) pubDate_node = curResult.find('pubDate') pubDate = helpers.get_xml_text(pubDate_node) dateStr = re.search( '(\w{3}, \d{1,2} \w{3} \d{4} \d\d:\d\d:\d\d) [\+\-]\d{4}', pubDate) if not dateStr: logger.log(u"Unable to figure out the date for entry " + title + ", skipping it") continue else: resultDate = parseDate(dateStr.group(1)).replace(tzinfo=None) if date == None or resultDate > date: results.append(classes.Proper(title, url, resultDate)) return results
def findPropers(self, date=None): results = [] for curResult in self._doSearch("(PROPER,REPACK)"): title = curResult.findtext('title') url = curResult.findtext('link').replace('&', '&') descriptionStr = curResult.findtext('description') dateStr = re.search( '<b>Added:</b> (\d{4}-\d\d-\d\d \d\d:\d\d:\d\d)', descriptionStr).group(1) if not dateStr: logger.log(u"Unable to figure out the date for entry " + title + ", skipping it") continue else: resultDate = datetime.datetime.strptime( dateStr, "%Y-%m-%d %H:%M:%S") if date == None or resultDate > date: results.append(classes.Proper(title, url, resultDate)) return results
def find_propers(self, search_date=None): results = [] search_terms = [' proper ', ' repack '] for term in search_terms: for item in self._do_search( self._build_search_strings(search_term=term)): if item['utadded']: try: result_date = datetime.datetime.fromtimestamp( int(item['utadded'])) except: result_date = None if result_date and (not search_date or result_date > search_date): title, url = self._get_title_and_url(item) if not re.search('(?i)(?:%s)' % term.strip(), title): continue results.append( classes.Proper(title, url, result_date, self.show)) return results
def find_propers(self, search_date=None, **kwargs): results = self.cache.listPropers(search_date) return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in results]
def find_propers(self, search_date=None, shows=None, anime=None, **kwargs): cache_results = self.cache.listPropers(search_date) results = [ classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in cache_results ] check = self._check_auth() if isinstance(check, bool) and not check: return results index = 0 # alt_search = ('nzbs_org' == self.get_id()) # do_search_alt = False search_terms = [] regex = [] if shows: search_terms += ['.proper.', '.repack.', '.real.'] regex += ['proper|repack', Quality.real_check] proper_check = re.compile(r'(?i)(\b%s\b)' % '|'.join(regex)) if anime: terms = 'v2|v3|v4|v5|v6|v7|v8|v9' search_terms += [terms] regex += [terms] proper_check = re.compile(r'(?i)(%s)' % '|'.join(regex)) urls = [] while index < len(search_terms): if self.should_skip(log_warning=False): break search_params = { 'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2 } # if alt_search: # # if do_search_alt: # search_params['t'] = 'search' # index += 1 # # do_search_alt = not do_search_alt # # else: # index += 1 index += 1 items, n_space = self._search_provider( {'Propers': [search_params]}) for item in items: (title, url) = self._title_and_url(item) if not proper_check.search(title) or url in urls: continue urls.append(url) result_date = self._parse_pub_date(item) if not result_date: logger.log( u'Unable to figure out the date for entry %s, skipping it' % title) continue result_size, result_uid = self._parse_size_uid(item, ns=n_space) if not search_date or search_date < result_date: show_obj = self.get_show(item, name_space=n_space) search_result = classes.Proper(title, url, result_date, self.show, parsed_show=show_obj, size=result_size, puid=result_uid) results.append(search_result) time.sleep(0.5) return results
def findPropers(self, search_date=None): search_terms = ['.proper.', '.repack.'] cache_results = self.cache.listPropers(search_date) results = [ classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in cache_results ] index = 0 alt_search = ('nzbs_org' == self.getID()) term_items_found = False do_search_alt = False while index < len(search_terms): search_params = {'q': search_terms[index]} if alt_search: if do_search_alt: index += 1 if term_items_found: do_search_alt = True term_items_found = False else: if do_search_alt: search_params['t'] = "search" do_search_alt = (True, False)[do_search_alt] else: index += 1 for item in self._doSearch(search_params, age=4): (title, url) = self._get_title_and_url(item) try: result_date = datetime.datetime( *item['published_parsed'][0:6]) except AttributeError: try: result_date = datetime.datetime( *item['updated_parsed'][0:6]) except AttributeError: try: result_date = datetime.datetime( *item['created_parsed'][0:6]) except AttributeError: try: result_date = datetime.datetime( *item['date'][0:6]) except AttributeError: logger.log( u"Unable to figure out the date for entry " + title + ", skipping it") continue if not search_date or result_date > search_date: search_result = classes.Proper(title, url, result_date, self.show) results.append(search_result) term_items_found = True do_search_alt = False time.sleep(0.2) return results
def findPropers(self, date=None): results = self.cache.listPropers(date) return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time'])) for x in results]
def find_propers(self, search_date=None, shows=None, anime=None, **kwargs): cache_results = self.cache.listPropers(search_date) results = [ classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in cache_results ] index = 0 alt_search = ('nzbs_org' == self.get_id()) do_search_alt = False search_terms = [] regex = [] if shows: search_terms += ['.proper.', '.repack.'] regex += ['proper|repack'] proper_check = re.compile(r'(?i)(\b%s\b)' % '|'.join(regex)) if anime: terms = 'v1|v2|v3|v4|v5' search_terms += [terms] regex += [terms] proper_check = re.compile(r'(?i)(%s)' % '|'.join(regex)) urls = [] while index < len(search_terms): search_params = { 'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2 } if alt_search: if do_search_alt: search_params['t'] = 'search' index += 1 do_search_alt = not do_search_alt else: index += 1 items, n_space = self._search_provider( {'Propers': [search_params]}) for item in items: (title, url) = self._title_and_url(item) if not proper_check.search(title) or url in urls: continue urls.append(url) result_date = self._parse_pub_date(item) if not result_date: logger.log( u'Unable to figure out the date for entry %s, skipping it' % title) continue if not search_date or search_date < result_date: show_obj = self.get_show(item, name_space=n_space) search_result = classes.Proper(title, url, result_date, self.show, parsed_show=show_obj) results.append(search_result) time.sleep(0.5) return results
def _find_propers(self, search_date=None): cache_results = self.cache.listPropers(search_date) results = [ classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in cache_results ] index = 0 alt_search = ('nzbs_org' == self.get_id()) term_items_found = False do_search_alt = False search_terms = ['.proper.', '.repack.'] proper_check = re.compile(r'(?i)\b(proper)|(repack)\b') while index < len(search_terms): search_params = {'q': search_terms[index]} if alt_search: if do_search_alt: index += 1 if term_items_found: do_search_alt = True term_items_found = False else: if do_search_alt: search_params['t'] = 'search' do_search_alt = (True, False)[do_search_alt] else: index += 1 for item in self._do_search(search_params, age=4): (title, url) = self._get_title_and_url(item) if not proper_check.search(title): continue if 'published_parsed' in item and item['published_parsed']: result_date = item.published_parsed if result_date: result_date = datetime.datetime(*result_date[0:6]) else: logger.log( u'Unable to figure out the date for entry %s, skipping it', title) continue if not search_date or search_date < result_date: search_result = classes.Proper(title, url, result_date, self.show) results.append(search_result) term_items_found = True do_search_alt = False time.sleep(0.2) return results
def find_propers(self, search_date=None, shows=None, anime=None, **kwargs): cache_results = self.cache.listPropers(search_date) results = [ classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in cache_results ] index = 0 alt_search = ('nzbs_org' == self.get_id()) term_items_found = False do_search_alt = False search_terms = [] regex = [] if shows: search_terms += ['.proper.', '.repack.'] regex += ['proper|repack'] proper_check = re.compile(r'(?i)(\b%s\b)' % '|'.join(regex)) if anime: terms = 'v1|v2|v3|v4|v5' search_terms += [terms] regex += [terms] proper_check = re.compile(r'(?i)(%s)' % '|'.join(regex)) while index < len(search_terms): search_params = {'q': search_terms[index], 'maxage': 4} if alt_search: if do_search_alt: index += 1 if term_items_found: do_search_alt = True term_items_found = False else: if do_search_alt: search_params['t'] = 'search' do_search_alt = (True, False)[do_search_alt] else: index += 1 for item in self._search_provider({'Propers': [search_params]}): (title, url) = self._title_and_url(item) if not proper_check.search(title): continue if 'published_parsed' in item and item['published_parsed']: result_date = item.published_parsed if result_date: result_date = datetime.datetime(*result_date[0:6]) else: logger.log( u'Unable to figure out the date for entry %s, skipping it', title) continue if not search_date or search_date < result_date: search_result = classes.Proper(title, url, result_date, self.show) results.append(search_result) term_items_found = True do_search_alt = False time.sleep(0.2) return results