Exemple #1
0
    def run(self):
        """Run daily search thread."""
        generic_queue.QueueItem.run(self)
        self.started = True

        try:
            log.info('Beginning daily search for new episodes')
            found_results = search_for_needed_episodes(self.scheduler_start_time, force=self.force)

            if not found_results:
                log.info('No needed episodes found')
            else:
                for result in found_results:
                    # just use the first result for now
                    if result.seeders not in (-1, None) and result.leechers not in (-1, None):
                        log.info(
                            'Downloading {name} with {seeders} seeders and {leechers} leechers'
                            ' and size {size} from {provider}', {
                                'name': result.name,
                                'seeders': result.seeders,
                                'leechers': result.leechers,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            }
                        )
                    else:
                        log.info(
                            'Downloading {name} with size: {size} from {provider}', {
                                'name': result.name,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            }
                        )

                    # Set the search_type for the result.
                    result.search_type = SearchType.DAILY_SEARCH

                    # Create the queue item
                    snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result)

                    # Add the queue item to the queue
                    app.manual_snatch_scheduler.action.add_item(snatch_queue_item)

                    self.success = False
                    while snatch_queue_item.success is False:
                        if snatch_queue_item.started and snatch_queue_item.success:
                            self.success = True
                        time.sleep(1)

                    # give the CPU a break
                    time.sleep(common.cpu_presets[app.CPU_PRESET])

        except Exception as error:
            self.success = False
            log.exception('DailySearchQueueItem Exception, error: {error}', {'error': error})

        if self.success is None:
            self.success = False

        self.finish()
Exemple #2
0
    def run(self):
        """Run daily search thread."""
        generic_queue.QueueItem.run(self)
        self.started = True

        try:
            log.info('Beginning daily search for new episodes')
            found_results = search_for_needed_episodes(self.scheduler_start_time, force=self.force)

            if not found_results:
                log.info('No needed episodes found')
            else:
                for result in found_results:
                    # just use the first result for now
                    if result.seeders not in (-1, None) and result.leechers not in (-1, None):
                        log.info(
                            'Downloading {name} with {seeders} seeders and {leechers} leechers'
                            ' and size {size} from {provider}', {
                                'name': result.name,
                                'seeders': result.seeders,
                                'leechers': result.leechers,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            }
                        )
                    else:
                        log.info(
                            'Downloading {name} with size: {size} from {provider}', {
                                'name': result.name,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            }
                        )

                    # Set the search_type for the result.
                    result.search_type = SearchType.DAILY_SEARCH

                    # Create the queue item
                    snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result)

                    # Add the queue item to the queue
                    app.manual_snatch_scheduler.action.add_item(snatch_queue_item)

                    self.success = False
                    while snatch_queue_item.success is False:
                        if snatch_queue_item.started and snatch_queue_item.success:
                            self.success = True
                        time.sleep(1)

                    # give the CPU a break
                    time.sleep(common.cpu_presets[app.CPU_PRESET])

        except Exception as error:
            self.success = False
            log.exception('DailySearchQueueItem Exception, error: {error}', {'error': error})

        if self.success is None:
            self.success = False

        self.finish()
Exemple #3
0
    def run(self):
        """Run manual snatch job."""
        generic_queue.QueueItem.run(self)
        self.started = True

        result = self.search_result

        try:
            log.info('Beginning to snatch release: {name}',
                     {'name': result.name})

            # Push an update to any open Web UIs through the WebSocket
            msg = ws.Message('QueueItemUpdate', self.to_json)
            msg.push()

            if result:
                if result.seeders not in (-1, None) and result.leechers not in (-1, None):
                    log.info(
                        'Downloading {name} with {seeders} seeders and {leechers} leechers'
                        ' and size {size} from {provider}, through a {search_type} search', {
                            'name': result.name,
                            'seeders': result.seeders,
                            'leechers': result.leechers,
                            'size': pretty_file_size(result.size),
                            'provider': result.provider.name,
                            'search_type': result.search_type
                        }
                    )
                else:
                    log.info(
                        'Downloading {name} with size: {size} from {provider}, through a {search_type} search', {
                            'name': result.name,
                            'size': pretty_file_size(result.size),
                            'provider': result.provider.name,
                            'search_type': result.search_type
                        }
                    )
                self.success = snatch_episode(result)
            else:
                log.info('Unable to snatch release: {name}',
                         {'name': result.name})

            # give the CPU a break
            time.sleep(common.cpu_presets[app.CPU_PRESET])

        except Exception:
            self.success = False
            log.exception('Snatch failed! For result: {name}', {'name': result.name})
            ui.notifications.message('Error while snatching selected result',
                                     'Unable to snatch the result for <i>{name}</i>'.format(name=result.name))

        if self.success is None:
            self.success = False

        # Push an update to any open Web UIs through the WebSocket
        msg = ws.Message('QueueItemUpdate', self.to_json)
        msg.push()

        self.finish()
Exemple #4
0
    def run(self):
        """Run backlog search thread."""
        generic_queue.QueueItem.run(self)
        self.started = True

        if not self.show.paused:
            try:
                log.info('Beginning backlog search for: {name}',
                         {'name': self.show.name})
                search_result = search_providers(self.show, self.segment)

                if search_result:
                    for result in search_result:
                        # just use the first result for now
                        if result.seeders not in (
                                -1, None) and result.leechers not in (-1,
                                                                      None):
                            log.info(
                                'Downloading {name} with {seeders} seeders and {leechers} leechers '
                                'and size {size} from {provider}', {
                                    'name': result.name,
                                    'seeders': result.seeders,
                                    'leechers': result.leechers,
                                    'size': pretty_file_size(result.size),
                                    'provider': result.provider.name,
                                })
                        else:
                            log.info(
                                'Downloading {name} with size: {size} from {provider}',
                                {
                                    'name': result.name,
                                    'size': pretty_file_size(result.size),
                                    'provider': result.provider.name,
                                })
                        self.success = snatch_episode(result)

                        # give the CPU a break
                        time.sleep(common.cpu_presets[app.CPU_PRESET])
                else:
                    log.info(
                        'No needed episodes found during backlog search for: {name}',
                        {'name': self.show.name})

            # TODO: Remove the catch all exception.
            except Exception:
                self.success = False
                log.debug(traceback.format_exc())

        if self.success is None:
            self.success = False

        self.finish()
Exemple #5
0
    def run(self):
        """Run manual snatch job."""
        generic_queue.QueueItem.run(self)
        self.started = True

        result = self.search_result

        try:
            log.info('Beginning to snatch release: {name}',
                     {'name': result.name})

            if result:
                if result.seeders not in (-1, None) and result.leechers not in (-1, None):
                    log.info(
                        'Downloading {name} with {seeders} seeders and {leechers} leechers'
                        ' and size {size} from {provider}, through a {search_type} search', {
                            'name': result.name,
                            'seeders': result.seeders,
                            'leechers': result.leechers,
                            'size': pretty_file_size(result.size),
                            'provider': result.provider.name,
                            'search_type': result.search_type
                        }
                    )
                else:
                    log.info(
                        'Downloading {name} with size: {size} from {provider}, through a {search_type} search', {
                            'name': result.name,
                            'size': pretty_file_size(result.size),
                            'provider': result.provider.name,
                            'search_type': result.search_type
                        }
                    )
                self.success = snatch_episode(result)
            else:
                log.info('Unable to snatch release: {name}',
                         {'name': result.name})

            # give the CPU a break
            time.sleep(common.cpu_presets[app.CPU_PRESET])

        except Exception:
            self.success = False
            log.exception('Snatch failed! For result: {name}', {'name': result.name})
            ui.notifications.message('Error while snatching selected result',
                                     'Unable to snatch the result for <i>{name}</i>'.format(name=result.name))

        if self.success is None:
            self.success = False

        self.finish()
Exemple #6
0
    def run(self):
        """Run daily search thread."""
        generic_queue.QueueItem.run(self)
        self.started = True

        try:
            log.info('Beginning daily search for new episodes')
            found_results = search_for_needed_episodes(force=self.force)

            if not found_results:
                log.info('No needed episodes found')
            else:
                for result in found_results:
                    # just use the first result for now
                    if result.seeders not in (
                            -1, None) and result.leechers not in (-1, None):
                        log.info(
                            'Downloading {name} with {seeders} seeders and {leechers} leechers'
                            ' and size {size} from {provider}', {
                                'name': result.name,
                                'seeders': result.seeders,
                                'leechers': result.leechers,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            })
                    else:
                        log.info(
                            'Downloading {name} with size: {size} from {provider}',
                            {
                                'name': result.name,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            })
                    self.success = snatch_episode(result)

                    # give the CPU a break
                    time.sleep(common.cpu_presets[app.CPU_PRESET])

        except Exception as error:
            self.success = False
            log.exception('DailySearchQueueItem Exception, error: {error}',
                          {'error': error})

        if self.success is None:
            self.success = False

        self.finish()
Exemple #7
0
    def run(self):
        """Run failed thread."""
        generic_queue.QueueItem.run(self)
        self.started = True

        try:
            for ep_obj in self.segment:

                log.info('Marking episode as bad: {ep}',
                         {'ep': ep_obj.pretty_name()})

                failed_history.mark_failed(ep_obj)

                (release, provider) = failed_history.find_release(ep_obj)
                if release:
                    failed_history.log_failed(release)
                    history.log_failed(ep_obj, release, provider)

                failed_history.revert_episode(ep_obj)
                log.info('Beginning failed download search for: {ep}',
                         {'ep': ep_obj.pretty_name()})

            # If it is wanted, self.down_cur_quality doesnt matter
            # if it isn't wanted, we need to make sure to not overwrite the existing ep that we reverted to!
            search_result = search_providers(self.show, self.segment, True)

            if search_result:
                for result in search_result:
                    # just use the first result for now
                    if result.seeders not in (
                            -1, None) and result.leechers not in (-1, None):
                        log.info(
                            'Downloading {name} with {seeders} seeders and {leechers} leechers '
                            'and size {size} from {provider}', {
                                'name': result.name,
                                'seeders': result.seeders,
                                'leechers': result.leechers,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            })
                    else:
                        log.info(
                            'Downloading {name} with size: {size} from {provider}',
                            {
                                'name': result.name,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            })

                    # Set the search_type for the result.
                    result.search_type = SearchType.FAILED_SEARCH

                    # Create the queue item
                    snatch_queue_item = SnatchQueueItem(
                        result.series, result.episodes, result)

                    # Add the queue item to the queue
                    app.manual_snatch_scheduler.action.add_item(
                        snatch_queue_item)

                    self.success = False
                    while snatch_queue_item.success is False:
                        if snatch_queue_item.started and snatch_queue_item.success:
                            self.success = True
                        time.sleep(1)

                    # give the CPU a break
                    time.sleep(common.cpu_presets[app.CPU_PRESET])
            else:
                log.info(
                    'No needed episodes found during failed search for: {name}',
                    {'name': self.show.name})

        # TODO: Replace the catch all exception with a more specific one.
        except Exception:
            self.success = False
            log.info(traceback.format_exc())

        # ## Keep a list with the 100 last executed searches
        fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE)

        if self.success is None:
            self.success = False

        self.finish()
Exemple #8
0
    def run(self):
        """Run backlog search thread."""
        generic_queue.QueueItem.run(self)
        self.started = True

        if not self.show.paused:
            try:
                log.info('Beginning backlog search for: {name}',
                         {'name': self.show.name})
                search_result = search_providers(self.show, self.segment)

                if search_result:
                    for result in search_result:
                        # just use the first result for now
                        if result.seeders not in (
                                -1, None) and result.leechers not in (-1,
                                                                      None):
                            log.info(
                                'Downloading {name} with {seeders} seeders and {leechers} leechers '
                                'and size {size} from {provider}', {
                                    'name': result.name,
                                    'seeders': result.seeders,
                                    'leechers': result.leechers,
                                    'size': pretty_file_size(result.size),
                                    'provider': result.provider.name,
                                })
                        else:
                            log.info(
                                'Downloading {name} with size: {size} from {provider}',
                                {
                                    'name': result.name,
                                    'size': pretty_file_size(result.size),
                                    'provider': result.provider.name,
                                })

                        # Set the search_type for the result.
                        result.search_type = SearchType.BACKLOG_SEARCH

                        # Create the queue item
                        snatch_queue_item = SnatchQueueItem(
                            result.series, result.episodes, result)

                        # Add the queue item to the queue
                        app.manual_snatch_scheduler.action.add_item(
                            snatch_queue_item)

                        self.success = False
                        while snatch_queue_item.success is False:
                            if snatch_queue_item.started and snatch_queue_item.success:
                                self.success = True
                            time.sleep(1)

                        # give the CPU a break
                        time.sleep(common.cpu_presets[app.CPU_PRESET])
                else:
                    log.info(
                        'No needed episodes found during backlog search for: {name}',
                        {'name': self.show.name})

            # TODO: Remove the catch all exception.
            except Exception:
                self.success = False
                log.debug(traceback.format_exc())

        if self.success is None:
            self.success = False

        self.finish()
Exemple #9
0
    def run(self):
        """Run forced search thread."""
        generic_queue.QueueItem.run(self)
        self.started = True

        try:
            log.info(
                'Beginning {search_type} {season_pack}search for: {ep}', {
                    'search_type':
                    ('forced', 'manual')[bool(self.manual_search)],
                    'season_pack': ('', 'season pack ')[bool(
                        self.manual_search_type == 'season')],
                    'ep':
                    self.segment[0].pretty_name()
                })

            search_result = search_providers(self.show, self.segment, True,
                                             self.down_cur_quality,
                                             self.manual_search,
                                             self.manual_search_type)

            if not self.manual_search and search_result:
                for result in search_result:
                    # Just use the first result for now
                    if result.seeders not in (
                            -1, None) and result.leechers not in (-1, None):
                        log.info(
                            'Downloading {name} with {seeders} seeders and {leechers} leechers '
                            'and size {size} from {provider}', {
                                'name': result.name,
                                'seeders': result.seeders,
                                'leechers': result.leechers,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            })
                    else:
                        log.info(
                            'Downloading {name} with size: {size} from {provider}',
                            {
                                'name': result.name,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            })

                    # Set the search_type for the result.
                    result.search_type = SearchType.FORCED_SEARCH

                    # Create the queue item
                    snatch_queue_item = SnatchQueueItem(
                        result.series, result.episodes, result)

                    # Add the queue item to the queue
                    app.manual_snatch_scheduler.action.add_item(
                        snatch_queue_item)

                    self.success = False
                    while snatch_queue_item.success is False:
                        if snatch_queue_item.started and snatch_queue_item.success:
                            self.success = True
                        time.sleep(1)

                    # Give the CPU a break
                    time.sleep(common.cpu_presets[app.CPU_PRESET])

            elif self.manual_search and search_result:
                self.results = search_result
                self.success = True

                if self.manual_search_type == 'season':
                    ui.notifications.message(
                        'We have found season packs for {show_name}'.format(
                            show_name=self.show.name),
                        'These should become visible in the manual select page.'
                    )
                else:
                    ui.notifications.message(
                        'We have found results for {ep}'.format(
                            ep=self.segment[0].pretty_name()),
                        'These should become visible in the manual select page.'
                    )

            else:
                ui.notifications.message('No results were found')
                log.info(
                    'Unable to find {search_type} {season_pack}results for: {ep}',
                    {
                        'search_type':
                        ('forced', 'manual')[bool(self.manual_search)],
                        'season_pack': ('', 'season pack ')[bool(
                            self.manual_search_type == 'season')],
                        'ep':
                        self.segment[0].pretty_name()
                    })

        # TODO: Remove catch all exception.
        except Exception:
            self.success = False
            log.debug(traceback.format_exc())

        # Keep a list with the 100 last executed searches
        fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE)

        if self.success is None:
            self.success = False

        self.finish()
Exemple #10
0
    def run(self):
        """Run manual snatch job."""
        generic_queue.QueueItem.run(self)
        self.started = True

        result = providers.get_provider_class(self.provider).get_result(self.segment)
        result.series = self.show
        result.url = self.cached_result['url']
        result.quality = int(self.cached_result['quality'])
        result.name = self.cached_result['name']
        result.size = int(self.cached_result['size'])
        result.seeders = int(self.cached_result['seeders'])
        result.leechers = int(self.cached_result['leechers'])
        result.release_group = self.cached_result['release_group']
        result.version = int(self.cached_result['version'])
        result.proper_tags = self.cached_result['proper_tags'].split('|') \
            if self.cached_result['proper_tags'] else ''
        result.manually_searched = True

        try:
            log.info('Beginning to manual snatch release: {name}',
                     {'name': result.name})

            if result:
                if result.seeders not in (-1, None) and result.leechers not in (-1, None):
                    log.info(
                        'Downloading {name} with {seeders} seeders and {leechers} leechers'
                        ' and size {size} from {provider}', {
                            'name': result.name,
                            'seeders': result.seeders,
                            'leechers': result.leechers,
                            'size': pretty_file_size(result.size),
                            'provider': result.provider.name,
                        }
                    )
                else:
                    log.info(
                        'Downloading {name} with size: {size} from {provider}', {
                            'name': result.name,
                            'size': pretty_file_size(result.size),
                            'provider': result.provider.name,
                        }
                    )
                self.success = snatch_episode(result)
            else:
                log.info('Unable to snatch release: {name}',
                         {'name': result.name})

            # give the CPU a break
            time.sleep(common.cpu_presets[app.CPU_PRESET])

        except Exception:
            self.success = False
            log.exception('Manual snatch failed! For result: {name}', {'name': result.name})
            ui.notifications.message('Error while snatching selected result',
                                     'Unable to snatch the result for <i>{name}</i>'.format(name=result.name))

        if self.success is None:
            self.success = False

        self.finish()
Exemple #11
0
    def run(self):
        """Run failed thread."""
        generic_queue.QueueItem.run(self)
        self.started = True

        try:
            for ep_obj in self.segment:

                log.info('Marking episode as bad: {ep}',
                         {'ep': ep_obj.pretty_name()})

                failed_history.mark_failed(ep_obj)

                (release, provider) = failed_history.find_release(ep_obj)
                if release:
                    failed_history.log_failed(release)
                    history.log_failed(ep_obj, release, provider)

                failed_history.revert_episode(ep_obj)
                log.info('Beginning failed download search for: {ep}',
                         {'ep': ep_obj.pretty_name()})

            # If it is wanted, self.down_cur_quality doesnt matter
            # if it isn't wanted, we need to make sure to not overwrite the existing ep that we reverted to!
            search_result = search_providers(self.show, self.segment, True)

            if search_result:
                for result in search_result:
                    # just use the first result for now
                    if result.seeders not in (-1, None) and result.leechers not in (-1, None):
                        log.info(
                            'Downloading {name} with {seeders} seeders and {leechers} leechers '
                            'and size {size} from {provider}', {
                                'name': result.name,
                                'seeders': result.seeders,
                                'leechers': result.leechers,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            }
                        )
                    else:
                        log.info(
                            'Downloading {name} with size: {size} from {provider}', {
                                'name': result.name,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            }
                        )

                    # Set the search_type for the result.
                    result.search_type = SearchType.FAILED_SEARCH

                    # Create the queue item
                    snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result)

                    # Add the queue item to the queue
                    app.manual_snatch_scheduler.action.add_item(snatch_queue_item)

                    self.success = False
                    while snatch_queue_item.success is False:
                        if snatch_queue_item.started and snatch_queue_item.success:
                            self.success = True
                        time.sleep(1)

                    # give the CPU a break
                    time.sleep(common.cpu_presets[app.CPU_PRESET])
            else:
                log.info('No needed episodes found during failed search for: {name}',
                         {'name': self.show.name})

        # TODO: Replace the catch all exception with a more specific one.
        except Exception:
            self.success = False
            log.info(traceback.format_exc())

        # ## Keep a list with the 100 last executed searches
        fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE)

        if self.success is None:
            self.success = False

        self.finish()
Exemple #12
0
    def run(self):
        """Run backlog search thread."""
        generic_queue.QueueItem.run(self)
        self.started = True

        if not self.show.paused:
            try:
                log.info('Beginning backlog search for: {name}',
                         {'name': self.show.name})
                search_result = search_providers(self.show, self.segment)

                if search_result:
                    for result in search_result:
                        # just use the first result for now
                        if result.seeders not in (-1, None) and result.leechers not in (-1, None):
                            log.info(
                                'Downloading {name} with {seeders} seeders and {leechers} leechers '
                                'and size {size} from {provider}', {
                                    'name': result.name,
                                    'seeders': result.seeders,
                                    'leechers': result.leechers,
                                    'size': pretty_file_size(result.size),
                                    'provider': result.provider.name,
                                }
                            )
                        else:
                            log.info(
                                'Downloading {name} with size: {size} from {provider}', {
                                    'name': result.name,
                                    'size': pretty_file_size(result.size),
                                    'provider': result.provider.name,
                                }
                            )

                        # Set the search_type for the result.
                        result.search_type = SearchType.BACKLOG_SEARCH

                        # Create the queue item
                        snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result)

                        # Add the queue item to the queue
                        app.manual_snatch_scheduler.action.add_item(snatch_queue_item)

                        self.success = False
                        while snatch_queue_item.success is False:
                            if snatch_queue_item.started and snatch_queue_item.success:
                                self.success = True
                            time.sleep(1)

                        # give the CPU a break
                        time.sleep(common.cpu_presets[app.CPU_PRESET])
                else:
                    log.info('No needed episodes found during backlog search for: {name}',
                             {'name': self.show.name})

            # TODO: Remove the catch all exception.
            except Exception:
                self.success = False
                log.debug(traceback.format_exc())

        if self.success is None:
            self.success = False

        self.finish()
Exemple #13
0
    def run(self):
        """Run forced search thread."""
        generic_queue.QueueItem.run(self)
        self.started = True

        try:
            log.info(
                'Beginning {search_type} {season_pack}search for: {ep}', {
                    'search_type': ('forced', 'manual')[bool(self.manual_search)],
                    'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')],
                    'ep': self.segment[0].pretty_name()
                }
            )

            search_result = search_providers(self.show, self.segment, True, self.down_cur_quality,
                                             self.manual_search, self.manual_search_type)

            if not self.manual_search and search_result:
                for result in search_result:
                    # Just use the first result for now
                    if result.seeders not in (-1, None) and result.leechers not in (-1, None):
                        log.info(
                            'Downloading {name} with {seeders} seeders and {leechers} leechers '
                            'and size {size} from {provider}', {
                                'name': result.name,
                                'seeders': result.seeders,
                                'leechers': result.leechers,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            }
                        )
                    else:
                        log.info(
                            'Downloading {name} with size: {size} from {provider}', {
                                'name': result.name,
                                'size': pretty_file_size(result.size),
                                'provider': result.provider.name,
                            }
                        )

                    # Set the search_type for the result.
                    result.search_type = SearchType.FORCED_SEARCH

                    # Create the queue item
                    snatch_queue_item = SnatchQueueItem(result.series, result.episodes, result)

                    # Add the queue item to the queue
                    app.manual_snatch_scheduler.action.add_item(snatch_queue_item)

                    self.success = False
                    while snatch_queue_item.success is False:
                        if snatch_queue_item.started and snatch_queue_item.success:
                            self.success = True
                        time.sleep(1)

                    # Give the CPU a break
                    time.sleep(common.cpu_presets[app.CPU_PRESET])

            elif self.manual_search and search_result:
                self.results = search_result
                self.success = True

                if self.manual_search_type == 'season':
                    ui.notifications.message('We have found season packs for {show_name}'
                                             .format(show_name=self.show.name),
                                             'These should become visible in the manual select page.')
                else:
                    ui.notifications.message('We have found results for {ep}'
                                             .format(ep=self.segment[0].pretty_name()),
                                             'These should become visible in the manual select page.')

            else:
                ui.notifications.message('No results were found')
                log.info(
                    'Unable to find {search_type} {season_pack}results for: {ep}', {
                        'search_type': ('forced', 'manual')[bool(self.manual_search)],
                        'season_pack': ('', 'season pack ')[bool(self.manual_search_type == 'season')],
                        'ep': self.segment[0].pretty_name()
                    }
                )

        # TODO: Remove catch all exception.
        except Exception:
            self.success = False
            log.debug(traceback.format_exc())

        # Keep a list with the 100 last executed searches
        fifo(FORCED_SEARCH_HISTORY, self, FORCED_SEARCH_HISTORY_SIZE)

        if self.success is None:
            self.success = False

        self.finish()