def get_url_with_papi_access(self, url, type, cookies=None, timeout=None, saveToDb=True):
     papiaccess = IndexerApiAccess(indexer=self.indexer, type=type, url=url, time=arrow.utcnow().datetime)
     try:
         papiaccess.username = request.authorization.username if request.authorization is not None else None
     except RuntimeError:
         #Is thrown when we're searching which is run in a thread. When downloading NFOs or whatever this will work
         pass
     indexerStatus = None
     try:
         time_before = arrow.utcnow()
         response = self.get(url, cookies=cookies, timeout=timeout)
         response.raise_for_status()
         time_after = arrow.utcnow()
         papiaccess.response_time = (time_after - time_before).seconds * 1000 + ((time_after - time_before).microseconds / 1000)
         papiaccess.response_successful = True
         indexerStatus = self.handle_indexer_success(saveIndexerStatus=saveToDb)
     except RequestException as e:
         self.error("Error while connecting to URL %s: %s" % (url, str(e)))
         papiaccess.error = "Connection failed: %s" % removeSensitiveData(str(e))
         response = None
         indexerStatus = self.handle_indexer_failure("Connection failed: %s" % removeSensitiveData(str(e)), saveIndexerStatus=saveToDb)
     finally:
         if saveToDb:
             papiaccess.save()
     return response, papiaccess, indexerStatus
Exemple #2
0
 def get_url_with_papi_access(self, url, type, cookies=None, timeout=None, saveToDb=True):
     papiaccess = IndexerApiAccess(indexer=self.indexer, type=type, url=url, time=arrow.utcnow().datetime)
     try:
         papiaccess.username = request.authorization.username if request.authorization is not None else None
     except RuntimeError:
         #Is thrown when we're searching which is run in a thread. When downloading NFOs or whatever this will work
         pass
     indexerStatus = None
     try:
         time_before = arrow.utcnow()
         response = self.get(url, cookies=cookies, timeout=timeout)
         response.raise_for_status()
         time_after = arrow.utcnow()
         papiaccess.response_time = (time_after - time_before).seconds * 1000 + ((time_after - time_before).microseconds / 1000)
         papiaccess.response_successful = True
         indexerStatus = self.handle_indexer_success(saveIndexerStatus=saveToDb)
     except RequestException as e:
         self.logger.error("Error while connecting to URL %s: %s" % (url, str(e)))
         papiaccess.error = "Connection failed: %s" % removeSensitiveData(str(e))
         response = None
         indexerStatus = self.handle_indexer_failure("Connection failed: %s" % removeSensitiveData(str(e)), saveIndexerStatus=saveToDb)
     finally:
         if saveToDb:
             papiaccess.save()
     return response, papiaccess, indexerStatus
    def get_url_with_papi_access(self, url, type, cookies=None, timeout=None):
        papiaccess = IndexerApiAccess(indexer=self.indexer, type=type, url=url, time=arrow.utcnow().datetime)

        try:
            time_before = arrow.utcnow()
            response = self.get(url, cookies=cookies, timeout=timeout)
            response.raise_for_status()
            time_after = arrow.utcnow()
            papiaccess.response_time = (time_after - time_before).seconds * 1000 + ((time_after - time_before).microseconds / 1000)
            papiaccess.response_successful = True
            self.handle_indexer_success()
        except RequestException as e:
            self.logger.error("Error while connecting to URL %s: %s" % (url, str(e)))
            papiaccess.error = "Connection failed: %s" % str(e)
            response = None
            self.handle_indexer_failure("Connection failed: %s" % str(e))
        finally:
            papiaccess.save()
        return response, papiaccess
Exemple #4
0
def download_nzb_and_log(indexer_name, provider_guid, title, searchid):
    """
    Gets the NZB link from the indexer using the guid, downloads it and logs the download

    :param indexer_name: name of the indexer
    :param provider_guid: guid to build link
    :param title: the title to build the link
    :param searchid: the id of the IndexerSearch entry so we can link the download to a search
    :return: IndexerNzbDownloadResult
    """
    for p in indexers.enabled_indexers:
        if p.name == indexer_name:

            link = p.get_nzb_link(provider_guid, title)
            indexer = Indexer.get(fn.lower(Indexer.name) == indexer_name.lower())
            psearch = IndexerSearch.get((IndexerSearch.indexer == indexer) & (IndexerSearch.search == searchid))
            papiaccess = IndexerApiAccess(indexer=p.indexer, type="nzb", url=link, indexer_search=psearch)
            papiaccess.save()

            internallink, guid = get_nzb_link_and_guid(indexer_name, provider_guid, searchid, title)
            pnzbdl = IndexerNzbDownload(indexer=indexer, indexer_search=searchid, api_access=papiaccess, mode="serve", title=title, guid=internallink)
            pnzbdl.save()
            try:
                r = p.get(link, timeout=10)
                r.raise_for_status()

                papiaccess.response_successful = True
                papiaccess.response_time = r.elapsed.microseconds / 1000

                return IndexerNzbDownloadResult(content=r.content, headers=r.headers)
            except RequestException as e:
                logger.error("Error while connecting to URL %s: %s" % (link, str(e)))
                papiaccess.error = str(e)
                return None
            finally:
                papiaccess.save()
    else:
        return "Unable to find NZB link"
    def execute_queries(self, queries, searchRequest):
        if len(queries) == 0:
            return QueriesExecutionResult(didsearch=False, results=[], indexerSearchEntry=None, indexerApiAccessEntry=None, indexerStatus=None, total=0, loaded_results=0, total_known=True, has_more=False)
        results = []
        executed_queries = set()
        psearch = IndexerSearch(indexer=self.indexer)
        papiaccess = IndexerApiAccess()
        indexerStatus = None
        #psearch.save()
        total_results = 0
        total_known = False
        has_more = False
        while len(queries) > 0:
            query = queries.pop()
            if query in executed_queries:
                # To make sure that in case an offset is reported wrong or we have a bug we don't get stuck in an endless loop 
                continue

            try:
                request, papiaccess, indexerStatus = self.get_url_with_papi_access(query, "search", saveToDb=False)
                papiaccess.indexer_search = psearch

                executed_queries.add(query)
                #papiaccess.save()

                if request is not None:
                    self.check_auth(request.text)
                    self.debug("Successfully loaded URL %s" % request.url)
                    try:

                        parsed_results = self.process_query_result(request.content, searchRequest)
                        results.extend(parsed_results.entries)  # Retrieve the processed results
                        queries.extend(parsed_results.queries)  # Add queries that were added as a result of the parsing, e.g. when the next result page should also be loaded
                        total_results += parsed_results.total
                        total_known = parsed_results.total_known
                        has_more = parsed_results.has_more

                        papiaccess.response_successful = True
                        self.handle_indexer_success(False)
                    except IndexerResultParsingException as e:
                        self.error("Error while processing search results from indexer %s" % e)
                    except Exception:
                        self.exception("Error while processing search results from indexer %s" % self)
                        raise IndexerResultParsingException("Error while parsing the results from indexer", self)
            except IndexerAuthException as e:
                self.error("Unable to authorize with %s: %s" % (e.search_module, e.message))
                papiaccess.error = "Authorization error :%s" % e.message
                self.handle_indexer_failure(reason="Authentication failed", disable_permanently=True)
                papiaccess.response_successful = False
            except IndexerAccessException as e:
                self.error("Unable to access %s: %s" % (e.search_module, e.message))
                papiaccess.error = "Access error: %s" % e.message
                self.handle_indexer_failure(reason="Access failed")
                papiaccess.response_successful = False
            except IndexerResultParsingException as e:
                papiaccess.exception = "Access error: %s" % e.message
                self.handle_indexer_failure(reason="Parsing results failed")
                papiaccess.response_successful = False
            except Exception as e:
                self.exception("An error error occurred while searching: %s", e)
                if papiaccess is not None:
                    papiaccess.error = "Unknown error :%s" % e
                    papiaccess.response_successful = False
            finally:
                if papiaccess is not None:
                    #papiaccess.save()
                    psearch.successful = papiaccess.response_successful
                else:
                    self.error("Unable to save API response to database")
                psearch.resultsCount = total_results
                #psearch.save()
        return QueriesExecutionResult(didsearch= True, results=results, indexerSearchEntry=psearch, indexerApiAccessEntry=papiaccess, indexerStatus=indexerStatus, total=total_results, loaded_results=len(results), total_known=total_known, has_more=has_more)
Exemple #6
0
    def execute_queries(self, queries, searchRequest):
        if len(queries) == 0:
            return QueriesExecutionResult(didsearch=False,
                                          results=[],
                                          indexerSearchEntry=None,
                                          indexerApiAccessEntry=None,
                                          indexerStatus=None,
                                          total=0,
                                          loaded_results=0,
                                          total_known=True,
                                          has_more=False,
                                          rejected=self.getRejectedCountDict())
        results = []
        executed_queries = set()
        psearch = IndexerSearch(indexer=self.indexer)
        papiaccess = IndexerApiAccess()
        indexerStatus = None
        total_results = 0
        total_known = False
        has_more = False
        rejected = self.getRejectedCountDict()
        while len(queries) > 0:
            query = queries.pop()
            if query in executed_queries:
                # To make sure that in case an offset is reported wrong or we have a bug we don't get stuck in an endless loop
                continue

            try:
                request, papiaccess, indexerStatus = self.get_url_with_papi_access(
                    query, "search", saveToDb=False)
                papiaccess.indexer_search = psearch

                executed_queries.add(query)

                if request is not None:
                    if request.text == "":
                        raise IndexerResultParsingException(
                            "Indexer returned an empty page", self)
                    self.check_auth(request.text)
                    self.debug("Successfully loaded URL %s" % request.url)
                    try:

                        parsed_results = self.process_query_result(
                            request.content, searchRequest)
                        results.extend(parsed_results.entries
                                       )  # Retrieve the processed results
                        queries.extend(
                            parsed_results.queries
                        )  # Add queries that were added as a result of the parsing, e.g. when the next result page should also be loaded
                        total_results += parsed_results.total
                        total_known = parsed_results.total_known
                        has_more = parsed_results.has_more
                        rejected = parsed_results.rejected

                        papiaccess.response_successful = True
                        indexerStatus = self.handle_indexer_success(False)
                    except Exception:
                        self.exception(
                            "Error while processing search results from indexer %s"
                            % self)
                        raise IndexerResultParsingException(
                            "Error while parsing the results from indexer",
                            self)
            except IndexerAuthException as e:
                papiaccess.error = "Authorization error :%s" % e.message
                self.error(papiaccess.error)
                indexerStatus = self.handle_indexer_failure(
                    reason="Authentication failed", disable_permanently=True)
                papiaccess.response_successful = False
            except IndexerAccessException as e:
                papiaccess.error = "Access error: %s" % e.message
                self.error(papiaccess.error)
                indexerStatus = self.handle_indexer_failure(
                    reason="Access failed")
                papiaccess.response_successful = False
            except IndexerResultParsingException as e:
                papiaccess.error = "Access error: %s" % e.message
                self.error(papiaccess.error)
                indexerStatus = self.handle_indexer_failure(
                    reason="Parsing results failed")
                papiaccess.response_successful = False
            except Exception as e:
                self.exception("An error error occurred while searching: %s",
                               e)
                if papiaccess is not None:
                    papiaccess.error = "Unknown error :%s" % e
                    papiaccess.response_successful = False
            finally:
                if papiaccess is not None:
                    psearch.successful = papiaccess.response_successful
                else:
                    self.error("Unable to save API response to database")
                psearch.resultsCount = total_results
        return QueriesExecutionResult(didsearch=True,
                                      results=results,
                                      indexerSearchEntry=psearch,
                                      indexerApiAccessEntry=papiaccess,
                                      indexerStatus=indexerStatus,
                                      total=total_results,
                                      loaded_results=len(results),
                                      total_known=total_known,
                                      has_more=has_more,
                                      rejected=rejected)