def run(self): """Run manual search thread.""" generic_queue.QueueItem.run(self) self.started = True try: log.info( 'Beginning {search_type} {season_pack}search for: {ep}', { 'search_type': 'manual', 'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')], 'ep': self.segment[0].pretty_name() } ) # Push an update to any open Web UIs through the WebSocket ws.Message('QueueItemUpdate', self.to_json).push() search_result = search_providers(self.show, self.segment, forced_search=True, down_cur_quality=True, manual_search=True, manual_search_type=self.manual_search_type) if search_result: self.results = search_result self.success = True if self.manual_search_type == 'season': ui.notifications.message('We have found season packs for {show_name}' .format(show_name=self.show.name), 'These should become visible in the manual select page.') else: ui.notifications.message('We have found results for {ep}' .format(ep=self.segment[0].pretty_name()), 'These should become visible in the manual select page.') else: ui.notifications.message('No results were found') log.info( 'Unable to find {search_type} {season_pack}results for: {ep}', { 'search_type': 'manual', 'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')], 'ep': self.segment[0].pretty_name() } ) # TODO: Remove catch all exception. except Exception: self.success = False log.debug(traceback.format_exc()) # Keep a list with the 100 last executed searches fifo(SEARCH_HISTORY, self, SEARCH_HISTORY_SIZE) if self.success is None: self.success = False # Push an update to any open Web UIs through the WebSocket msg = ws.Message('QueueItemUpdate', self.to_json) msg.push() self.finish()
def run(self): """Run backlog search thread.""" generic_queue.QueueItem.run(self) self.started = True if not self.show.paused: try: log.info('Beginning backlog search for: {name}', {'name': self.show.name}) search_result = search_providers(self.show, self.segment) if search_result: for result in search_result: # just use the first result for now if result.seeders not in ( -1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) self.success = snatch_episode(result) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: log.info( 'No needed episodes found during backlog search for: {name}', {'name': self.show.name}) # TODO: Remove the catch all exception. except Exception: self.success = False log.debug(traceback.format_exc()) if self.success is None: self.success = False self.finish()
def do_test(): """Test to perform.""" global search_items # pylint: disable=global-statement search_items = cur_data["i"] show = Series(1, tvdb_id) show.name = show_name show.quality = cur_data["q"] show.save_to_db() app.showList.append(show) episode = None for epNumber in cur_data["e"]: episode = Episode(show, cur_data["s"], epNumber) episode.status = common.WANTED episode.save_to_db() best_result = search_providers(show, episode.episode, force_search) if not best_result: assert cur_data["b"] == best_result # pylint: disable=no-member assert cur_data["b"] == best_result.name # first is expected, second is chosen one
def run(self): """Run failed thread.""" generic_queue.QueueItem.run(self) self.started = True try: for ep_obj in self.segment: log.info('Marking episode as bad: {ep}', {'ep': ep_obj.pretty_name()}) failed_history.mark_failed(ep_obj) (release, provider) = failed_history.find_release(ep_obj) if release: failed_history.log_failed(release) history.log_failed(ep_obj, release, provider) failed_history.revert_episode(ep_obj) log.info('Beginning failed download search for: {ep}', {'ep': ep_obj.pretty_name()}) # If it is wanted, self.down_cur_quality doesnt matter # if it isn't wanted, we need to make sure to not overwrite the existing ep that we reverted to! search_result = search_providers(self.show, self.segment, True) if search_result: for result in search_result: # just use the first result for now if result.seeders not in ( -1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) # Set the search_type for the result. result.search_type = SearchType.FAILED_SEARCH # Create the queue item snatch_queue_item = SnatchQueueItem( result.series, result.episodes, result) # Add the queue item to the queue app.manual_snatch_scheduler.action.add_item( snatch_queue_item) self.success = False while snatch_queue_item.success is False: if snatch_queue_item.started and snatch_queue_item.success: self.success = True time.sleep(1) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: log.info( 'No needed episodes found during failed search for: {name}', {'name': self.show.name}) # TODO: Replace the catch all exception with a more specific one. except Exception: self.success = False log.info(traceback.format_exc()) # ## Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def run(self): """Run backlog search thread.""" generic_queue.QueueItem.run(self) self.started = True if not self.show.paused: try: log.info('Beginning backlog search for: {name}', {'name': self.show.name}) search_result = search_providers(self.show, self.segment) if search_result: for result in search_result: # just use the first result for now if result.seeders not in ( -1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) # Set the search_type for the result. result.search_type = SearchType.BACKLOG_SEARCH # Create the queue item snatch_queue_item = SnatchQueueItem( result.series, result.episodes, result) # Add the queue item to the queue app.manual_snatch_scheduler.action.add_item( snatch_queue_item) self.success = False while snatch_queue_item.success is False: if snatch_queue_item.started and snatch_queue_item.success: self.success = True time.sleep(1) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: log.info( 'No needed episodes found during backlog search for: {name}', {'name': self.show.name}) # TODO: Remove the catch all exception. except Exception: self.success = False log.debug(traceback.format_exc()) if self.success is None: self.success = False self.finish()
def run(self): """Run forced search thread.""" generic_queue.QueueItem.run(self) self.started = True try: log.info( 'Beginning {search_type} {season_pack}search for: {ep}', { 'search_type': ('forced', 'manual')[bool(self.manual_search)], 'season_pack': ('', 'season pack ')[bool( self.manual_search_type == 'season')], 'ep': self.segment[0].pretty_name() }) search_result = search_providers(self.show, self.segment, True, self.down_cur_quality, self.manual_search, self.manual_search_type) if not self.manual_search and search_result: for result in search_result: # Just use the first result for now if result.seeders not in ( -1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, }) # Set the search_type for the result. result.search_type = SearchType.FORCED_SEARCH # Create the queue item snatch_queue_item = SnatchQueueItem( result.series, result.episodes, result) # Add the queue item to the queue app.manual_snatch_scheduler.action.add_item( snatch_queue_item) self.success = False while snatch_queue_item.success is False: if snatch_queue_item.started and snatch_queue_item.success: self.success = True time.sleep(1) # Give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) elif self.manual_search and search_result: self.results = search_result self.success = True if self.manual_search_type == 'season': ui.notifications.message( 'We have found season packs for {show_name}'.format( show_name=self.show.name), 'These should become visible in the manual select page.' ) else: ui.notifications.message( 'We have found results for {ep}'.format( ep=self.segment[0].pretty_name()), 'These should become visible in the manual select page.' ) else: ui.notifications.message('No results were found') log.info( 'Unable to find {search_type} {season_pack}results for: {ep}', { 'search_type': ('forced', 'manual')[bool(self.manual_search)], 'season_pack': ('', 'season pack ')[bool( self.manual_search_type == 'season')], 'ep': self.segment[0].pretty_name() }) # TODO: Remove catch all exception. except Exception: self.success = False log.debug(traceback.format_exc()) # Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def run(self): """Run failed thread.""" generic_queue.QueueItem.run(self) self.started = True try: for ep_obj in self.segment: log.info('Marking episode as bad: {ep}', {'ep': ep_obj.pretty_name()}) failed_history.mark_failed(ep_obj) (release, provider) = failed_history.find_release(ep_obj) if release: failed_history.log_failed(release) history.log_failed(ep_obj, release, provider) failed_history.revert_episode(ep_obj) log.info('Beginning failed download search for: {ep}', {'ep': ep_obj.pretty_name()}) # If it is wanted, self.down_cur_quality doesnt matter # if it isn't wanted, we need to make sure to not overwrite the existing ep that we reverted to! search_result = search_providers(self.show, self.segment, True) if search_result: for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) # Set the search_type for the result. result.search_type = SearchType.FAILED_SEARCH # Create the queue item snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result) # Add the queue item to the queue app.manual_snatch_scheduler.action.add_item(snatch_queue_item) self.success = False while snatch_queue_item.success is False: if snatch_queue_item.started and snatch_queue_item.success: self.success = True time.sleep(1) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: log.info('No needed episodes found during failed search for: {name}', {'name': self.show.name}) # TODO: Replace the catch all exception with a more specific one. except Exception: self.success = False log.info(traceback.format_exc()) # ## Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()
def run(self): """Run backlog search thread.""" generic_queue.QueueItem.run(self) self.started = True if not self.show.paused: try: log.info('Beginning backlog search for: {name}', {'name': self.show.name}) search_result = search_providers(self.show, self.segment) if search_result: for result in search_result: # just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) # Set the search_type for the result. result.search_type = SearchType.BACKLOG_SEARCH # Create the queue item snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result) # Add the queue item to the queue app.manual_snatch_scheduler.action.add_item(snatch_queue_item) self.success = False while snatch_queue_item.success is False: if snatch_queue_item.started and snatch_queue_item.success: self.success = True time.sleep(1) # give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) else: log.info('No needed episodes found during backlog search for: {name}', {'name': self.show.name}) # TODO: Remove the catch all exception. except Exception: self.success = False log.debug(traceback.format_exc()) if self.success is None: self.success = False self.finish()
def run(self): """Run forced search thread.""" generic_queue.QueueItem.run(self) self.started = True try: log.info( 'Beginning {search_type} {season_pack}search for: {ep}', { 'search_type': ('forced', 'manual')[bool(self.manual_search)], 'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')], 'ep': self.segment[0].pretty_name() } ) search_result = search_providers(self.show, self.segment, True, self.down_cur_quality, self.manual_search, self.manual_search_type) if not self.manual_search and search_result: for result in search_result: # Just use the first result for now if result.seeders not in (-1, None) and result.leechers not in (-1, None): log.info( 'Downloading {name} with {seeders} seeders and {leechers} leechers ' 'and size {size} from {provider}', { 'name': result.name, 'seeders': result.seeders, 'leechers': result.leechers, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) else: log.info( 'Downloading {name} with size: {size} from {provider}', { 'name': result.name, 'size': pretty_file_size(result.size), 'provider': result.provider.name, } ) # Set the search_type for the result. result.search_type = SearchType.FORCED_SEARCH # Create the queue item snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result) # Add the queue item to the queue app.manual_snatch_scheduler.action.add_item(snatch_queue_item) self.success = False while snatch_queue_item.success is False: if snatch_queue_item.started and snatch_queue_item.success: self.success = True time.sleep(1) # Give the CPU a break time.sleep(common.cpu_presets[app.CPU_PRESET]) elif self.manual_search and search_result: self.results = search_result self.success = True if self.manual_search_type == 'season': ui.notifications.message('We have found season packs for {show_name}' .format(show_name=self.show.name), 'These should become visible in the manual select page.') else: ui.notifications.message('We have found results for {ep}' .format(ep=self.segment[0].pretty_name()), 'These should become visible in the manual select page.') else: ui.notifications.message('No results were found') log.info( 'Unable to find {search_type} {season_pack}results for: {ep}', { 'search_type': ('forced', 'manual')[bool(self.manual_search)], 'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')], 'ep': self.segment[0].pretty_name() } ) # TODO: Remove catch all exception. except Exception: self.success = False log.debug(traceback.format_exc()) # Keep a list with the 100 last executed searches fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE) if self.success is None: self.success = False self.finish()