def _download_propers(self, proper_list): """ Download proper (snatch it). :param proper_list: """ for candidate in proper_list: history_limit = datetime.datetime.today() - datetime.timedelta( days=30) main_db_con = db.DBConnection() history_results = main_db_con.select( 'SELECT resource, proper_tags FROM history ' 'WHERE showid = ? ' 'AND season = ? ' 'AND episode IN ({episodes}) ' 'AND quality = ? ' 'AND date >= ? ' 'AND action IN (?, ?, ?, ?)'.format(episodes=','.join( text_type(ep) for ep in candidate.actual_episodes), ), [ candidate.indexerid, candidate.actual_season, candidate.quality, history_limit.strftime(History.date_format), DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST ]) proper_tags_len = len(candidate.proper_tags) proper_name = self._canonical_name(candidate.name, clear_extension=True) proper_name_ext = self._canonical_name(candidate.name) for result in history_results: proper_tags = result['proper_tags'] if proper_tags and len( proper_tags.split('|')) >= proper_tags_len: log.debug( 'Current release has the same or more proper tags,' ' skipping new proper {result!r}', {'result': candidate.name}, ) break # make sure that none of the existing history downloads are the same proper we're # trying to downloadif the result exists in history already we need to skip it if proper_name == self._canonical_name( result['resource'], clear_extension=True ) or proper_name_ext == self._canonical_name( result['resource']): log.debug( 'This proper {result!r} is already in history, skipping it', {'result': candidate.name}, ) break else: candidate.create_episode_object() # snatch it snatch_episode(candidate)
def _download_propers(self, proper_list): """ Download proper (snatch it). :param proper_list: """ for candidate in proper_list: history_limit = datetime.datetime.today() - datetime.timedelta(days=30) main_db_con = db.DBConnection() history_results = main_db_con.select( 'SELECT resource, proper_tags FROM history ' 'WHERE showid = ? ' 'AND season = ? ' 'AND episode IN ({episodes}) ' 'AND quality = ? ' 'AND date >= ? ' 'AND action IN (?, ?, ?, ?)'.format( episodes=','.join( text_type(ep) for ep in candidate.actual_episodes ), ), [candidate.indexerid, candidate.actual_season, candidate.quality, history_limit.strftime(History.date_format), DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST]) proper_tags_len = len(candidate.proper_tags) proper_name = self._canonical_name(candidate.name, clear_extension=True) proper_name_ext = self._canonical_name(candidate.name) for result in history_results: proper_tags = result['proper_tags'] if proper_tags and len(proper_tags.split('|')) >= proper_tags_len: log.debug( 'Current release has the same or more proper tags,' ' skipping new proper {result!r}', {'result': candidate.name}, ) break # make sure that none of the existing history downloads are the same proper we're # trying to downloadif the result exists in history already we need to skip it if proper_name == self._canonical_name( result['resource'], clear_extension=True ) or proper_name_ext == self._canonical_name(result['resource']): log.debug( 'This proper {result!r} is already in history, skipping it', {'result': candidate.name}, ) break else: candidate.create_episode_object() # snatch it snatch_episode(candidate)
def _download_propers(self, proper_list): """ Download proper (snatch it). :param proper_list: """ for cur_proper in proper_list: history_limit = datetime.datetime.today() - datetime.timedelta( days=30) main_db_con = db.DBConnection() history_results = main_db_con.select( b'SELECT resource FROM history ' b'WHERE showid = ? ' b'AND season = ? ' b'AND episode = ? ' b'AND quality = ? ' b'AND date >= ? ' b"AND (action LIKE '%02' OR action LIKE '%04' OR action LIKE '%09' OR action LIKE '%12')", [ cur_proper.indexerid, cur_proper.actual_season, cur_proper.actual_episode, cur_proper.quality, history_limit.strftime(History.date_format) ]) # make sure that none of the existing history downloads are the same proper we're trying to download # if the result exists in history already we need to skip it clean_proper_name = self._canonical_name(cur_proper.name, clear_extension=True) if any(clean_proper_name == self._canonical_name( cur_result[b'resource'], clear_extension=True) for cur_result in history_results): log.debug( 'This proper {result!r} is already in history, skipping it', {'result': cur_proper.name}) continue else: # make sure that none of the existing history downloads are the same proper we're trying to download clean_proper_name = self._canonical_name(cur_proper.name) if any(clean_proper_name == self._canonical_name( cur_result[b'resource']) for cur_result in history_results): log.debug( 'This proper {result!r} is already in history, skipping it', {'result': cur_proper.name}) continue cur_proper.create_episode_object() # snatch it snatch_episode(cur_proper) time.sleep(cpu_presets[app.CPU_PRESET])
def run(self): """Run manual snatch job.""" generic_queue.QueueItem.run(self) self.started = True result = self.search_result try: log.info('Beginning to snatch release: {name}', {'name': result.name}) # Push an update to any open Web UIs through the WebSocket msg = ws.Message('QueueItemUpdate', self.to_json) msg.push() if result: if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers' ' and size {size} from {provider}, through a {search_type} search', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, 'search_type': result.search_type } ) else: log.info( 'Downloading {name} with size: {size} from {provider}, through a {search_type} search', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, 'search_type': result.search_type } ) self.success = snatch_episode(result) else: log.info('Unable to snatch release: {name}', {'name': result.name}) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) except Exception: self.success = False log.exception('Snatch failed! For result: {name}', {'name': result.name}) ui.notifications.message('Error while snatching selected result', 'Unable to snatch the result for <i>{name}</i>'.format(name=result.name)) if self.success is None: self.success = False # Push an update to any open Web UIs through the WebSocket msg = ws.Message('QueueItemUpdate', self.to_json) msg.push() self.finish()
def run(self): """Run backlog search thread.""" generic_queue.QueueItem.run(self) self.started = True if not self.show.paused: try: log.info('Beginning backlog search for: {name}', {'name': self.show.name}) search_result = search_providers(self.show, self.segment) if search_result: for result in search_result: # just use the first result for now if result.seeders not in ( -1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: log.info( 'No needed episodes found during backlog search for: {name}', {'name': self.show.name}) # TODO: Remove the catch all exception. except Exception: self.success = False log.debug(traceback.format_exc()) if self.success is None: self.success = False self.finish()
def run(self): """Run manual snatch job.""" generic_queue.QueueItem.run(self) self.started = True result = self.search_result try: log.info('Beginning to snatch release: {name}', {'name': result.name}) if result: if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers' ' and size {size} from {provider}, through a {search_type} search', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, 'search_type': result.search_type } ) else: log.info( 'Downloading {name} with size: {size} from {provider}, through a {search_type} search', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, 'search_type': result.search_type } ) self.success = snatch_episode(result) else: log.info('Unable to snatch release: {name}', {'name': result.name}) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) except Exception: self.success = False log.exception('Snatch failed! For result: {name}', {'name': result.name}) ui.notifications.message('Error while snatching selected result', 'Unable to snatch the result for <i>{name}</i>'.format(name=result.name)) if self.success is None: self.success = False self.finish()
def run(self): """Run daily search thread.""" generic_queue.QueueItem.run(self) self.started = True try: log.info('Beginning daily search for new episodes') found_results = search_for_needed_episodes(force=self.force) if not found_results: log.info('No needed episodes found') else: for result in found_results: # just use the first result for now if result.seeders not in ( -1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers' ' and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) except Exception as error: self.success = False log.exception('DailySearchQueueItem Exception, error: {error}', {'error': error}) if self.success is None: self.success = False self.finish()
def run(self): """Run failed thread.""" generic_queue.QueueItem.run(self) self.started = True try: for ep_obj in self.segment: log.info('Marking episode as bad: {ep}', {'ep': ep_obj.pretty_name()}) failed_history.mark_failed(ep_obj) (release, provider) = failed_history.find_release(ep_obj) if release: failed_history.log_failed(release) history.log_failed(ep_obj, release, provider) failed_history.revert_episode(ep_obj) log.info('Beginning failed download search for: {ep}', {'ep': ep_obj.pretty_name()}) # If it is wanted, self.down_cur_quality doesnt matter # if it isn't wanted, we need to make sure to not overwrite the existing ep that we reverted to! search_result = search_providers(self.show, self.segment, True) if search_result: for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: log.info('No needed episodes found during failed search for: {name}', {'name': self.show.name}) # TODO: Replace the catch all exception with a more specific one. except Exception: self.success = False log.info(traceback.format_exc()) # ## Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def run(self): """Run manual snatch job.""" generic_queue.QueueItem.run(self) self.started = True result = providers.get_provider_class(self.provider).get_result(self.segment) result.series = self.show result.url = self.cached_result['url'] result.quality = int(self.cached_result['quality']) result.name = self.cached_result['name'] result.size = int(self.cached_result['size']) result.seeders = int(self.cached_result['seeders']) result.leechers = int(self.cached_result['leechers']) result.release_group = self.cached_result['release_group'] result.version = int(self.cached_result['version']) result.proper_tags = self.cached_result['proper_tags'].split('|') \ if self.cached_result['proper_tags'] else '' result.manually_searched = True try: log.info('Beginning to manual snatch release: {name}', {'name': result.name}) if result: if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers' ' and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) self.success = snatch_episode(result) else: log.info('Unable to snatch release: {name}', {'name': result.name}) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) except Exception: self.success = False log.exception('Manual snatch failed! For result: {name}', {'name': result.name}) ui.notifications.message('Error while snatching selected result', 'Unable to snatch the result for <i>{name}</i>'.format(name=result.name)) if self.success is None: self.success = False self.finish()
def run(self): """Run forced search thread.""" generic_queue.QueueItem.run(self) self.started = True try: log.info( 'Beginning {search_type} {season_pack}search for: {ep}', { 'search_type': ('forced', 'manual')[bool(self.manual_search)], 'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')], 'ep': self.segment[0].pretty_name() } ) search_result = search_providers(self.show, self.segment, True, self.down_cur_quality, self.manual_search, self.manual_search_type) if not self.manual_search and search_result: for result in search_result: # Just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) self.success = snatch_episode(result) # Give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) elif self.manual_search and search_result: self.results = search_result self.success = True if self.manual_search_type == 'season': ui.notifications.message('We have found season packs for {show_name}' .format(show_name=self.show.name), 'These should become visible in the manual select page.') else: ui.notifications.message('We have found results for {ep}' .format(ep=self.segment[0].pretty_name()), 'These should become visible in the manual select page.') else: ui.notifications.message('No results were found') log.info( 'Unable to find {search_type} {season_pack}results for: {ep}', { 'search_type': ('forced', 'manual')[bool(self.manual_search)], 'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')], 'ep': self.segment[0].pretty_name() } ) # TODO: Remove catch all exception. except Exception: self.success = False log.debug(traceback.format_exc()) # Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()