示例#1
0
def search_and_handle_db(dbsearch, indexers_and_search_requests):
    results_by_indexer = start_search_futures(indexers_and_search_requests)
    dbsearch.username = request.authorization.username if request.authorization is not None else None
    saveSearch(dbsearch)
    with databaseLock:
        with db.atomic():
            for indexer, result in results_by_indexer.items():
                if result.didsearch:
                    indexersearchentry = result.indexerSearchEntry
                    indexersearchentry.search = dbsearch
                    indexersearchentry.save()
                    result.indexerApiAccessEntry.username = request.authorization.username if request.authorization is not None else None
                    try:
                        result.indexerApiAccessEntry.indexer = Indexer.get(
                            Indexer.name == indexer)
                        result.indexerApiAccessEntry.save()
                        result.indexerStatus.save()
                    except Indexer.DoesNotExist:
                        logger.error(
                            "Tried to save indexer API access but no indexer with name %s was found in the database. Adding it now. This shouldn't've happened. If possible send a bug report with a full log."
                            % indexer)
                        Indexer().create(name=indexer)
                    except Exception as e:
                        logger.exception("Error saving IndexerApiAccessEntry")

    return {"results": results_by_indexer, "dbsearch": dbsearch}
示例#2
0
def init_indexer_table_entry(indexer_name):
    try:
        Indexer.get(fn.lower(Indexer.name) == indexer_name.lower())
    except Indexer.DoesNotExist as e:
        logger.info("Unable to find indexer with name %s in database. Will add it" % indexer_name)
        indexer = Indexer().create(name=indexer_name)
        IndexerStatus.create_or_get(indexer=indexer, first_failure=None, latest_failure=None, disabled_until=None)
示例#3
0
def init_indexer_table_entry(indexer_name):
    try:
        Indexer.get(fn.lower(Indexer.name) == indexer_name.lower())
    except Indexer.DoesNotExist as e:
        logger.info(
            "Unable to find indexer with name %s in database. Will add it" %
            indexer_name)
        Indexer().create(name=indexer_name)
示例#4
0
 def setUp(self):    
     set_and_drop()
     config.load("testsettings.cfg")
     self.nzbsorgdb = Indexer(name="NZBs.org")
     self.nzbsorgdb.save()
     self.dognzbdb = Indexer(name="DOGNzb")
     self.dognzbdb.save()
     
     
     config.indexerSettings.newznab1.enabled = True
     config.indexerSettings.newznab1.host.set("http://127.0.0.1:5001/nzbsorg")
     config.indexerSettings.newznab1.apikey.set("apikeynzbsorg")
     self.n1 = NewzNab(config.indexerSettings.newznab1)
     self.n2 = NewzNab(config.indexerSettings.newznab2)
示例#5
0
    def setUp(self):
        set_and_drop()

        self.indexercom = Indexer(name="indexer.com")
        self.indexercom.save()

        self.newznab1 = Bunch()
        self.newznab1.enabled = True
        self.newznab1.name = "indexer.com"
        self.newznab1.host = "https://indexer.com"
        self.newznab1.apikey = "apikeyindexer.com"
        self.newznab1.timeout = None
        self.newznab1.score = 0
        self.newznab1.search_ids = ["imdbid", "rid", "tvdbid"]
        self.n1 = NewzNab(self.newznab1)
示例#6
0
def getIndexerDownloadStats():
    results = []
    allDownloadsCount = IndexerNzbDownload.select().count()
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug("Skipping download stats for %s" % p.name)
                continue
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
        dlCount = IndexerNzbDownload().\
            select(Indexer.name, IndexerApiAccess.response_successful). \
            join(IndexerApiAccess, JOIN.LEFT_OUTER). \
            join(Indexer, JOIN.LEFT_OUTER).\
            where(Indexer.id == p).\
            count()
        results.append({
            "name":
            p.name,
            "total":
            dlCount,
            "share":
            100 / (allDownloadsCount / dlCount)
            if allDownloadsCount > 0 and dlCount > 0 else 0
        })
    results = sorted(results, key=lambda x: x["name"])
    results = sorted(results, key=lambda x: x["share"], reverse=True)
    return results
示例#7
0
def get_avg_indexer_response_times(after, before):
    result = []
    response_times = []
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug("Skipping download stats for %s" % p.name)
                continue
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
        where = (IndexerApiAccess.response_successful) & (
            IndexerApiAccess.indexer == p
        ) & (IndexerApiAccess.time > after) & (IndexerApiAccess.time < before)
        avg_response_time = IndexerApiAccess().select(
            fn.AVG(IndexerApiAccess.response_time)).where(where).tuples()[0][0]
        if avg_response_time:
            response_times.append({
                "name": p.name,
                "avgResponseTime": int(avg_response_time)
            })
    where = (IndexerApiAccess.response_successful) & (
        IndexerApiAccess.response_time is not None) & (
            IndexerApiAccess.time > after) & (IndexerApiAccess.time < before)
    avg_response_time = IndexerApiAccess().select(
        fn.AVG(IndexerApiAccess.response_time)).where(where).tuples()[0][0]
    for i in response_times:
        delta = i["avgResponseTime"] - avg_response_time
        i["delta"] = delta
        result.append(i)
    result = sorted(result, key=lambda x: x["name"])
    result = sorted(result, key=lambda x: x["avgResponseTime"])

    return result
示例#8
0
def get_indexer_response_times():
    result = []
    for p in Indexer.select().order_by(Indexer.name):
        print("Limiting stats to 100 for testing only!")
        result.append({"key": p.name,
                       "values": [{"responseTime": x.response_time, "date": x.time.timestamp} for x in IndexerApiAccess().select(IndexerApiAccess.response_time, IndexerApiAccess.time).where((IndexerApiAccess.response_successful) & (IndexerApiAccess.indexer == p)).join(Indexer).limit(1)]})
    return result
示例#9
0
文件: stats.py 项目: gspu/nzbhydra
def get_avg_indexer_response_times(after, before):
    result = []
    response_times = []
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug("Skipping download stats for %s" % p.name)
                continue
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
        where = (IndexerApiAccess.response_successful) & (IndexerApiAccess.indexer == p) & (IndexerApiAccess.time > after) & (IndexerApiAccess.time < before)
        avg_response_time = IndexerApiAccess().select(fn.AVG(IndexerApiAccess.response_time)).where(where).tuples()[0][0]
        if avg_response_time:
            response_times.append({"name": p.name, "avgResponseTime": int(avg_response_time)})
    where = (IndexerApiAccess.response_successful) & (IndexerApiAccess.response_time is not None) & (IndexerApiAccess.time > after) & (IndexerApiAccess.time < before)
    avg_response_time = IndexerApiAccess().select(fn.AVG(IndexerApiAccess.response_time)).where(where).tuples()[0][0]
    for i in response_times:
        delta = i["avgResponseTime"] - avg_response_time
        i["delta"] = delta
        result.append(i)
    result = sorted(result, key=lambda x: x["name"])
    result = sorted(result, key=lambda x: x["avgResponseTime"])

    return result
示例#10
0
def get_avg_indexer_search_results_share():
    results = []
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug("Skipping download stats for %s" % p.name)
                continue
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
        result = database.db.execute_sql(
            "select (100 * (select cast(sum(ps.resultsCount) as float) from indexersearch ps "
            "where ps.search_id in (select ps.search_id from indexersearch ps, search s where ps.indexer_id == %d and ps.search_id = s.id and ps.successful and (s.episode NOT NULL  or s.season not NULL  or s.identifier_key not null or s.query not null)) and ps.indexer_id == %d)) "
            "/ "
            "(select sum(ps.resultsCount) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps, search s where ps.indexer_id == %d and ps.search_id = s.id and ps.successful and (s.episode NOT NULL  or s.season not NULL  or s.identifier_key not null or s.query not null))) as sumAllResults"
            % (p.id, p.id, p.id)).fetchone()
        results.append({
            "name":
            p.name,
            "avgResultsShare":
            int(result[0]) if result[0] is not None else "N/A"
        })
    results = sorted(results, key=lambda x: x["name"])
    results = sorted(
        results,
        key=lambda x: 0
        if x["avgResultsShare"] == "N/A" else x["avgResultsShare"],
        reverse=True)
    return results
示例#11
0
def get_indexer_response_times():
    result = []
    for p in Indexer.select():
        print("Limiting stats to 100 for testing only!")
        result.append({"key": p.name,
                       "values": [{"responseTime": x.response_time, "date": x.time.timestamp} for x in IndexerApiAccess().select(IndexerApiAccess.response_time, IndexerApiAccess.time).where((IndexerApiAccess.response_successful) & (IndexerApiAccess.indexer == p)).join(Indexer).limit(1)]})
    return result
示例#12
0
def get_indexer_nzb_link(indexer_name, indexerguid, title, searchid, mode, log_api_access):
    """
    Build a link that leads to the actual NZB of the indexer using the given informations. We log this as indexer API access and NZB download because this is only called
    when the NZB will be actually downloaded later (by us or a downloader) 
    :return: str
    """
    for p in indexers.enabled_indexers:
        if p.name.strip() == indexer_name.strip():
            link = p.get_nzb_link(indexerguid, title)

            # Log to database
            indexer = Indexer.get(fn.lower(Indexer.name) == indexer_name.lower())
            papiaccess = IndexerApiAccess(indexer=p.indexer, type="nzb", url=link, response_successful=None, indexer_search=searchid) if log_api_access else None
            try:
                papiaccess.username = request.authorization.username if request.authorization is not None else None
            except RuntimeError:
                pass
            papiaccess.save()
            pnzbdl = IndexerNzbDownload(indexer=indexer, indexer_search=searchid, api_access=papiaccess, mode=mode, title=title, guid=indexerguid)
            pnzbdl.save()

            return link, papiaccess, pnzbdl

    else:
        logger.error("Did not find indexer with name %s" % indexer_name)
        return None, None, None
示例#13
0
def get_avg_indexer_search_results_share():
    results = []
    for p in Indexer.select().order_by(Indexer.name):
        result = database.db.execute_sql(
            "select (100 * (select cast(sum(ps.results) as float) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d) and ps.indexer_id == %d)) / (select sum(ps.results) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d)) as sumAllResults" % (
                p.id, p.id, p.id)).fetchone()
        results.append({"name": p.name, "avgResultsShare": result[0] if result[0] is not None else "N/A"})
    return results
示例#14
0
def get_avg_indexer_search_results_share():
    results = []
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug("Skipping download stats for %s" % p.name)
                continue
            if indexer.settings.name == "Womble":
                logger.debug("Skipping download stats for Womble because we exclude update queries without specific query or ID")
                continue
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
        result = database.db.execute_sql(
            """
            SELECT (100 *
            (SELECT cast(sum(ps.resultsCount) AS FLOAT)
             FROM indexersearch ps
             WHERE ps.search_id IN (SELECT ps.search_id
                                    FROM indexersearch ps, search s
                                    WHERE ps.indexer_id == %d AND ps.search_id = s.id AND ps.successful AND (s.episode NOT NULL OR s.season NOT NULL OR s.identifier_key NOT NULL OR s.query NOT NULL)) AND ps.indexer_id == %d))
           /
           (SELECT sum(ps.resultsCount)
            FROM indexersearch ps
            WHERE ps.search_id IN (SELECT ps.search_id
                                   FROM indexersearch ps, search s
                                   WHERE ps.indexer_id == %d AND ps.search_id = s.id AND ps.successful AND (s.episode NOT NULL OR s.season NOT NULL OR s.identifier_key NOT NULL OR s.query NOT NULL))) AS sumAllResults
             """
            % (p.id, p.id, p.id)).fetchone()
        avgResultsShare = int(result[0]) if result is not None and result[0] is not None else "N/A"

        result = database.db.execute_sql(
            """
            SELECT avg(
                CASE WHEN uniqueResults > 0
                  THEN
                    100 / (processedResults * 1.0 / uniqueResults)
                ELSE 0
                END) as avgUniqueResults
            FROM indexersearch
            WHERE processedResults IS NOT NULL AND uniqueResults IS NOT NULL
                  AND indexer_id == %d
            GROUP BY indexer_id;

            """
            % p.id).fetchone()
        if p.name in ["NZBIndex", "Binsearch", "NZBClub"]:
            avgUniqueResults = "-"
        elif result is not None and result[0] is not None:
            avgUniqueResults = int(result[0])
        else:
            avgUniqueResults = "N/A"
        results.append({"name": p.name, "avgResultsShare": avgResultsShare, "avgUniqueResults": avgUniqueResults})
    results = sorted(results, key=lambda x: x["name"])
    results = sorted(results, key=lambda x: 0 if x["avgResultsShare"] == "N/A" else x["avgResultsShare"], reverse=True)
    return results
示例#15
0
def get_avg_indexer_search_results_share(afterSql, beforeSql):
    results = []
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug("Skipping download stats for %s" % p.name)
                continue
            if indexer.settings.name == "Womble":
                logger.debug("Skipping download stats for Womble because we exclude update queries without specific query or ID")
                continue
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
        innerSelect = """(SELECT ps.search_id
                                    FROM indexersearch ps, search s
                                    WHERE ps.indexer_id == %(id)d AND ps.search_id = s.id AND ps.successful AND (s.episode NOT NULL OR s.season NOT NULL OR s.identifier_key NOT NULL OR s.query NOT NULL)) AND ps.time > %(after)s and ps.time < %(before)s""" % {"id": p.id, "after": afterSql, "before": beforeSql}

        result = database.db.execute_sql(
            """
            SELECT (100 *
            (SELECT cast(sum(ps.resultsCount) AS FLOAT)
             FROM indexersearch ps
             WHERE ps.search_id IN %s AND ps.indexer_id == %d))
           /
           (SELECT sum(ps.resultsCount)
            FROM indexersearch ps
            WHERE ps.search_id IN %s) AS sumAllResults
             """
            % (innerSelect, p.id, innerSelect)).fetchone()
        avgResultsShare = int(result[0]) if result is not None and len(result) > 0 and result[0] is not None else "N/A"

        result = database.db.execute_sql(
            """
            SELECT avg(
                CASE WHEN uniqueResults > 0
                  THEN
                    100 / (processedResults * 1.0 / uniqueResults)
                ELSE 0
                END) as avgUniqueResults
            FROM indexersearch s
            WHERE processedResults IS NOT NULL AND uniqueResults IS NOT NULL
                  AND s.indexer_id == %(id)d AND s.time > %(after)s and s.time < %(before)s
            GROUP BY indexer_id;

            """
            % {"id": p.id, "after": afterSql, "before": beforeSql}).fetchone()
        if p.name in ["NZBIndex", "Binsearch", "NZBClub"]:
            avgUniqueResults = "-"
        elif result is not None and len(result) > 0 and result[0] is not None:
            avgUniqueResults = int(result[0])
        else:
            avgUniqueResults = "N/A"
        results.append({"name": p.name, "avgResultsShare": avgResultsShare, "avgUniqueResults": avgUniqueResults})
    results = sorted(results, key=lambda x: x["name"])
    results = sorted(results, key=lambda x: 0 if x["avgResultsShare"] == "N/A" else x["avgResultsShare"], reverse=True)
    return results
示例#16
0
def getIndexerDownloadStats():
    results = []
    allDownloadsCount = IndexerNzbDownload.select().count()
    for p in Indexer.select().order_by(Indexer.name):
        dlCount = IndexerNzbDownload().select(Indexer.name, IndexerApiAccess.response_successful).join(IndexerSearch, JOIN.LEFT_OUTER).join(Search, JOIN.LEFT_OUTER).switch(IndexerNzbDownload).join(IndexerApiAccess, JOIN.LEFT_OUTER).join(Indexer, JOIN.LEFT_OUTER).where(Indexer.id == p).count()
        results.append({"name": p.name,
                        "total": dlCount,
                        "share": 100 / (allDownloadsCount / dlCount) if allDownloadsCount > 0 and dlCount > 0 else 0})
    return results
示例#17
0
def get_avg_indexer_search_results_share(afterSql, beforeSql):
    results = []
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug("Skipping download stats for %s" % p.name)
                continue
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
        innerSelect = """(SELECT ps.search_id
                                    FROM indexersearch ps, search s
                                    WHERE ps.indexer_id == %(id)d AND ps.search_id = s.id AND ps.successful AND (s.episode NOT NULL OR s.season NOT NULL OR s.identifier_key NOT NULL OR s.query NOT NULL)) AND ps.time > %(after)s and ps.time < %(before)s""" % {"id": p.id, "after": afterSql,
                                                                                                                                                                                                                                                                   "before": beforeSql}

        result = database.db.execute_sql(
            """
            SELECT (100 *
            (SELECT cast(sum(ps.resultsCount) AS FLOAT)
             FROM indexersearch ps
             WHERE ps.search_id IN %s AND ps.indexer_id == %d))
           /
           (SELECT sum(ps.resultsCount)
            FROM indexersearch ps
            WHERE ps.search_id IN %s) AS sumAllResults
             """
            % (innerSelect, p.id, innerSelect)).fetchone()
        avgResultsShare = int(result[0]) if result is not None and len(result) > 0 and result[0] is not None else "N/A"

        result = database.db.execute_sql(
            """
            SELECT avg(
                CASE WHEN uniqueResults > 0
                  THEN
                    100 / (processedResults * 1.0 / uniqueResults)
                ELSE 0
                END) as avgUniqueResults
            FROM indexersearch s
            WHERE processedResults IS NOT NULL AND uniqueResults IS NOT NULL
                  AND s.indexer_id == %(id)d AND s.time > %(after)s and s.time < %(before)s
            GROUP BY indexer_id;

            """
            % {"id": p.id, "after": afterSql, "before": beforeSql}).fetchone()
        if p.name in ["NZBIndex", "Binsearch", "NZBClub"]:
            avgUniqueResults = "-"
        elif result is not None and len(result) > 0 and result[0] is not None:
            avgUniqueResults = int(result[0])
        else:
            avgUniqueResults = "N/A"
        results.append({"name": p.name, "avgResultsShare": avgResultsShare, "avgUniqueResults": avgUniqueResults})
    results = sorted(results, key=lambda x: x["name"])
    results = sorted(results, key=lambda x: 0 if x["avgResultsShare"] == "N/A" else x["avgResultsShare"], reverse=True)
    return results
示例#18
0
def getIndexerBasedDownloadStats(afterSql, beforeSql):
    enabledIndexerIds = []
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug(
                    "Skipping download stats for %s because it's disabled" %
                    p.name)
                continue
            enabledIndexerIds.append(str(p.id))
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
    enabledIndexerIds = ", ".join(enabledIndexerIds)
    query = """
    SELECT
      indexer.name,
      count(*) AS total,
      CASE WHEN count(*) > 0
        THEN
          100 / (1.0 * countall.countall / count(*))
      ELSE 0
      END
               AS share
    FROM
      indexernzbdownload dl,
      (SELECT count(*) AS countall
       FROM
         indexernzbdownload dl
         LEFT OUTER JOIN indexerapiaccess api
           ON dl.apiAccess_id = api.id
       WHERE api.indexer_id IN (%(enabledIndexerIds)s)
       AND api.time > %(afterSql)s AND api.time < %(beforeSql)s
       )
      countall
      LEFT OUTER JOIN indexerapiaccess api
        ON dl.apiAccess_id = api.id
      LEFT OUTER JOIN indexer indexer
        ON api.indexer_id = indexer.id
    WHERE api.indexer_id IN (%(enabledIndexerIds)s)
    GROUP BY indexer.id
    """ % {
        "enabledIndexerIds": enabledIndexerIds,
        "afterSql": afterSql,
        "beforeSql": beforeSql
    }
    stats = database.db.execute_sql(query).fetchall()
    stats = [{"name": x[0], "total": x[1], "share": x[2]} for x in stats]

    stats = sorted(stats, key=lambda x: x["name"])
    stats = sorted(stats, key=lambda x: x["share"], reverse=True)
    return stats
示例#19
0
文件: stats.py 项目: nzbis/nzbhydra
def get_avg_indexer_search_results_share():
    results = []
    for p in Indexer.select().order_by(Indexer.name):
        result = database.db.execute_sql(
            "select (100 * (select cast(sum(ps.results) as float) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d) and ps.indexer_id == %d)) / (select sum(ps.results) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d)) as sumAllResults"
            % (p.id, p.id, p.id)).fetchone()
        results.append({
            "name":
            p.name,
            "avgResultsShare":
            result[0] if result[0] is not None else "N/A"
        })
    return results
示例#20
0
    def testHandleIndexerFailureAndSuccess(self):
        Indexer(module="newznab", name="newznab1").save()
        indexer_model = Indexer.get(Indexer.name == "newznab1")
        with freeze_time("2015-09-20 14:00:00", tz_offset=-4):
            sm = search_module.SearchModule(self.newznab1)
            sm.handle_indexer_failure(indexer_model)
            # First error, so level 1
            self.assertEqual(1, indexer_model.status.get().level)
            now = arrow.utcnow()
            first_failure = arrow.get(arrow.get(indexer_model.status.get().first_failure))
            disabled_until = arrow.get(indexer_model.status.get().disabled_until)
            self.assertEqual(now, first_failure)
            self.assertEqual(now.replace(minutes=+sm.disable_periods[1]), disabled_until)

            sm.handle_indexer_failure()
            self.assertEqual(2, indexer_model.status.get().level)
            disabled_until = arrow.get(indexer_model.status.get().disabled_until)
            self.assertEqual(now.replace(minutes=+sm.disable_periods[2]), disabled_until)

            sm.handle_indexer_success()
            self.assertEqual(1, indexer_model.status.get().level)
            self.assertEqual(arrow.get(0), indexer_model.status.get().disabled_until)
            self.assertIsNone(indexer_model.status.get().reason)
示例#21
0
    def setUp(self):
        set_and_drop()

        self.indexercom = Indexer(name="indexer.com")
        self.indexercom.save()

        self.newznab1 = Bunch()
        self.newznab1.enabled = True
        self.newznab1.name = "indexer.com"
        self.newznab1.host = "https://indexer.com"
        self.newznab1.apikey = "apikeyindexer.com"
        self.newznab1.timeout = None
        self.newznab1.score = 0
        self.newznab1.search_ids = ["imdbid", "rid", "tvdbid"]
        self.n1 = NewzNab(self.newznab1)
示例#22
0
def get_avg_indexer_response_times():
    result = []
    response_times = []
    for p in Indexer.select().order_by(Indexer.name):

        avg_response_time = IndexerApiAccess().select(fn.AVG(IndexerApiAccess.response_time)).where((IndexerApiAccess.response_successful) & (IndexerApiAccess.indexer == p)).tuples()[0][0]
        if avg_response_time:
            response_times.append({"name": p.name, "avgResponseTime": avg_response_time})
    avg_response_time = IndexerApiAccess().select(fn.AVG(IndexerApiAccess.response_time)).where((IndexerApiAccess.response_successful) & (IndexerApiAccess.response_time is not None)).tuples()[0][0]
    for i in response_times:
        delta = i["avgResponseTime"] - avg_response_time
        i["delta"] = delta
        result.append(i)

    return result
示例#23
0
文件: stats.py 项目: gspu/nzbhydra
def getIndexerBasedDownloadStats(afterSql, beforeSql):
    enabledIndexerIds = []
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug("Skipping download stats for %s because it's disabled" % p.name)
                continue
            enabledIndexerIds.append(str(p.id))
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
    enabledIndexerIds = ", ".join(enabledIndexerIds)
    query = """
    SELECT
      indexer.name,
      count(*) AS total,
      CASE WHEN count(*) > 0
        THEN
          100 / (1.0 * countall.countall / count(*))
      ELSE 0
      END
               AS share
    FROM
      indexernzbdownload dl,
      (SELECT count(*) AS countall
       FROM
         indexernzbdownload dl
         LEFT OUTER JOIN indexerapiaccess api
           ON dl.apiAccess_id = api.id
       WHERE api.indexer_id IN (%(enabledIndexerIds)s)
       AND api.time > %(afterSql)s AND api.time < %(beforeSql)s
       )
      countall
      LEFT OUTER JOIN indexerapiaccess api
        ON dl.apiAccess_id = api.id
      LEFT OUTER JOIN indexer indexer
        ON api.indexer_id = indexer.id
    WHERE api.indexer_id IN (%(enabledIndexerIds)s)
    AND api.time > %(afterSql)s AND api.time < %(beforeSql)s
    GROUP BY indexer.id
    """ % {"enabledIndexerIds": enabledIndexerIds, "afterSql": afterSql, "beforeSql": beforeSql}
    stats = database.db.execute_sql(query).fetchall()
    stats = [{"name": x[0], "total": x[1], "share": x[2]} for x in stats]

    stats = sorted(stats, key=lambda x: x["name"])
    stats = sorted(stats, key=lambda x: x["share"], reverse=True)
    return stats
示例#24
0
def get_avg_indexer_search_results_share():
    results = []
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug("Skipping download stats for %s" % p.name)
                continue
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
        result = database.db.execute_sql(
                "select (100 * (select cast(sum(ps.resultsCount) as float) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d) and ps.indexer_id == %d)) / (select sum(ps.resultsCount) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d)) as sumAllResults" % (
                    p.id, p.id, p.id)).fetchone()
        results.append({"name": p.name, "avgResultsShare": result[0] if result[0] is not None else "N/A"})
    return results
示例#25
0
def search_and_handle_db(dbsearch, indexers_and_search_requests):
    results_by_indexer = start_search_futures(indexers_and_search_requests)
    dbsearch.username = request.authorization.username if request.authorization is not None else None
    with db.atomic():
        dbsearch.save()
    for indexer, result in results_by_indexer.items():
        if result.didsearch:
            with db.atomic():
                indexersearchentry = result.indexerSearchEntry
                indexersearchentry.search = dbsearch
                indexersearchentry.save()
                result.indexerApiAccessEntry.username = request.authorization.username if request.authorization is not None else None
                try:
                    result.indexerApiAccessEntry.indexer = Indexer.get(Indexer.name == indexer)
                    result.indexerApiAccessEntry.save()
                    result.indexerStatus.save()
                except Exception:
                    logger.error("Error saving IndexerApiAccessEntry. Debug info: %s" % json.dumps(model_to_dict(result.indexerApiAccessEntry)))

    logger.debug("Returning search results now")
    return {"results": results_by_indexer, "dbsearch": dbsearch}
示例#26
0
def getIndexerDownloadStats():
    results = []
    allDownloadsCount = IndexerNzbDownload.select().count()
    for p in Indexer.select().order_by(Indexer.name):
        dlCount = IndexerNzbDownload().select(
            Indexer.name, IndexerApiAccess.response_successful).join(
                IndexerSearch, JOIN.LEFT_OUTER).join(
                    Search, JOIN.LEFT_OUTER).switch(IndexerNzbDownload).join(
                        IndexerApiAccess, JOIN.LEFT_OUTER).join(
                            Indexer,
                            JOIN.LEFT_OUTER).where(Indexer.id == p).count()
        results.append({
            "name":
            p.name,
            "total":
            dlCount,
            "share":
            100 / (allDownloadsCount / dlCount)
            if allDownloadsCount > 0 and dlCount > 0 else 0
        })
    return results
示例#27
0
def getIndexerDownloadStats():
    results = []
    allDownloadsCount = IndexerNzbDownload.select().count()
    for p in Indexer.select().order_by(Indexer.name):
        try:
            indexer = getIndexerByName(p.name)
            if not indexer.settings.enabled:
                logger.debug("Skipping download stats for %s" % p.name)
                continue
        except IndexerNotFoundException:
            logger.error("Unable to find indexer %s in configuration" % p.name)
            continue
        dlCount = IndexerNzbDownload().\
            select(Indexer.name, IndexerApiAccess.response_successful). \
            join(IndexerApiAccess, JOIN.LEFT_OUTER). \
            join(Indexer, JOIN.LEFT_OUTER).\
            where(Indexer.id == p).\
            count()
        results.append({"name": p.name,
                        "total": dlCount,
                        "share": 100 / (allDownloadsCount / dlCount) if allDownloadsCount > 0 and dlCount > 0 else 0})
    return results
示例#28
0
def get_nzb_link(indexer_name, guid, title, searchid):
    """
    Build a link that leads to the actual NZB of the indexer using the given informations. We log this as indexer API access and NZB download because this is only called
    when the NZB will be actually downloaded later (by us or a downloader) 
    :return: str
    """
    for p in indexers.enabled_indexers:
        if p.name == indexer_name:
            link = p.get_nzb_link(guid, title)

            # Log to database
            indexer = Indexer.get(fn.lower(Indexer.name) == indexer_name.lower())
            papiaccess = IndexerApiAccess(indexer=p.indexer, type="nzb", url=link, response_successful=None, indexer_search=indexer)
            papiaccess.save()
            pnzbdl = IndexerNzbDownload(indexer=indexer, indexer_search=searchid, api_access=papiaccess, mode="redirect")
            pnzbdl.save()

            return link

    else:
        logger.error("Did not find indexer with name %s" % indexer_name)
        return None
示例#29
0
    def testHandleIndexerFailureAndSuccess(self):
        indexer_model = Indexer.get(Indexer.name == "NZBs.org")
        with freeze_time("2015-09-20 14:00:00", tz_offset=-4):
            sm = search_module.SearchModule(indexer_model)
            sm.handle_indexer_failure(indexer_model)
            # First error, so level 1
            self.assertEqual(1, indexer_model.status.get().level)
            now = arrow.utcnow()
            first_failure = arrow.get(arrow.get(indexer_model.status.get().first_failure))
            disabled_until = arrow.get(indexer_model.status.get().disabled_until)
            self.assertEqual(now, first_failure)
            self.assertEqual(now.replace(minutes=+sm.disable_periods[1]), disabled_until)

            sm.handle_indexer_failure()
            self.assertEqual(2, indexer_model.status.get().level)
            disabled_until = arrow.get(indexer_model.status.get().disabled_until)
            self.assertEqual(now.replace(minutes=+sm.disable_periods[2]), disabled_until)

            sm.handle_indexer_success()
            self.assertEqual(1, indexer_model.status.get().level)
            self.assertEqual(arrow.get(0), indexer_model.status.get().disabled_until)
            self.assertIsNone(indexer_model.status.get().reason)
示例#30
0
    def testIndexersApiLimits(self):

        config.settings.searching.generate_queries = []
        self.newznab1.hitLimit = 3
        self.newznab1.hitLimitResetTime = None
        config.settings.indexers = [self.newznab1]
        read_indexers_from_config()
        search_request = SearchRequest()
        indexers = search.pick_indexers(search_request)
        self.assertEqual(1, len(indexers))
        dbsearch = Search(internal=True, time=arrow.utcnow().datetime)
        dbsearch.save()
        indexer = Indexer().get(name="newznab1")
        
        #Two accesses one and 12 hours ago
        IndexerApiAccess(indexer=indexer, search=dbsearch, time=arrow.utcnow().replace(hours=-1).datetime, type="search", url="", response_successful=True).save()
        IndexerApiAccess(indexer=indexer, search=dbsearch, time=arrow.utcnow().replace(hours=-12).datetime, type="search", url="", response_successful=True).save()
        self.assertEqual(1, len(search.pick_indexers(search_request)))

        #Another one 20 hours ago, so limit should be reached
        IndexerApiAccess(indexer=indexer, search=dbsearch, time=arrow.utcnow().replace(hours=-20).datetime, type="search", url="", response_successful=True).save()
        self.assertEqual(0, len(search.pick_indexers(search_request)))
示例#31
0
def download_nzb_and_log(indexer_name, provider_guid, title, searchid):
    """
    Gets the NZB link from the indexer using the guid, downloads it and logs the download

    :param indexer_name: name of the indexer
    :param provider_guid: guid to build link
    :param title: the title to build the link
    :param searchid: the id of the IndexerSearch entry so we can link the download to a search
    :return: IndexerNzbDownloadResult
    """
    for p in indexers.enabled_indexers:
        if p.name == indexer_name:

            link = p.get_nzb_link(provider_guid, title)
            indexer = Indexer.get(fn.lower(Indexer.name) == indexer_name.lower())
            psearch = IndexerSearch.get((IndexerSearch.indexer == indexer) & (IndexerSearch.search == searchid))
            papiaccess = IndexerApiAccess(indexer=p.indexer, type="nzb", url=link, indexer_search=psearch)
            papiaccess.save()

            internallink, guid = get_nzb_link_and_guid(indexer_name, provider_guid, searchid, title)
            pnzbdl = IndexerNzbDownload(indexer=indexer, indexer_search=searchid, api_access=papiaccess, mode="serve", title=title, guid=internallink)
            pnzbdl.save()
            try:
                r = p.get(link, timeout=10)
                r.raise_for_status()

                papiaccess.response_successful = True
                papiaccess.response_time = r.elapsed.microseconds / 1000

                return IndexerNzbDownloadResult(content=r.content, headers=r.headers)
            except RequestException as e:
                logger.error("Error while connecting to URL %s: %s" % (link, str(e)))
                papiaccess.error = str(e)
                return None
            finally:
                papiaccess.save()
    else:
        return "Unable to find NZB link"
示例#32
0
def get_indexer_nzb_link(indexer_name, indexerguid, title, searchid, mode,
                         log_api_access):
    """
    Build a link that leads to the actual NZB of the indexer using the given informations. We log this as indexer API access and NZB download because this is only called
    when the NZB will be actually downloaded later (by us or a downloader) 
    :return: str
    """
    for p in indexers.enabled_indexers:
        if p.name.strip() == indexer_name.strip():
            link = p.get_nzb_link(indexerguid, title)

            # Log to database
            indexer = Indexer.get(
                fn.lower(Indexer.name) == indexer_name.lower())
            papiaccess = IndexerApiAccess(
                indexer=p.indexer,
                type="nzb",
                url=link,
                response_successful=None,
                indexer_search=searchid) if log_api_access else None
            try:
                papiaccess.username = request.authorization.username if request.authorization is not None else None
            except RuntimeError:
                pass
            papiaccess.save()
            pnzbdl = IndexerNzbDownload(indexer=indexer,
                                        indexer_search=searchid,
                                        api_access=papiaccess,
                                        mode=mode,
                                        title=title,
                                        guid=indexerguid)
            pnzbdl.save()

            return link, papiaccess, pnzbdl

    else:
        logger.error("Did not find indexer with name %s" % indexer_name)
        return None, None, None
示例#33
0
 def testGetEntryById(self):
     Indexer(name="nzbindex").save()
     n = NzbIndex(getIndexerSettingByName("nzbindex"))
     with open("mock/nzbindex--details.html", encoding="latin-1") as f:
         xml = f.read()
     with responses.RequestsMock(
             assert_all_requests_are_fired=False) as rsps:
         url_re = re.compile(r'.*')
         rsps.add(responses.GET,
                  url_re,
                  body=xml,
                  status=200,
                  content_type='application/x-html')
         item = n.get_entry_by_id("aguid", "atitle")
         self.assertEqual("atitle", item.title)
         self.assertEqual(3816816, item.size)
         self.assertEqual(
             "alt.binaries.pwp | alt.binaries.usenetrevolution", item.group)
         self.assertEqual("[email protected] (Janusch)", item.poster)
         self.assertEqual("https://nzbindex.com/download/aguid/atitle.nzb",
                          item.link)
         self.assertEqual("https://nzbindex.com/release/aguid/atitle.nzb",
                          item.details_link)
示例#34
0
def search_and_handle_db(dbsearch, indexers_and_search_requests):
    results_by_indexer = start_search_futures(indexers_and_search_requests)
    dbsearch.username = request.authorization.username if request.authorization is not None else None
    saveSearch(dbsearch)
    with databaseLock:
        with db.atomic():
            for indexer, result in results_by_indexer.items():
                if result.didsearch:
                    indexersearchentry = result.indexerSearchEntry
                    indexersearchentry.search = dbsearch
                    indexersearchentry.save()
                    result.indexerApiAccessEntry.username = request.authorization.username if request.authorization is not None else None
                    try:
                        result.indexerApiAccessEntry.indexer = Indexer.get(Indexer.name == indexer)
                        result.indexerApiAccessEntry.save()
                        result.indexerStatus.save()
                    except Indexer.DoesNotExist:
                        logger.error("Tried to save indexer API access but no indexer with name %s was found in the database. Adding it now. This shouldn't've happened. If possible send a bug report with a full log." % indexer)
                        Indexer().create(name=indexer)
                    except Exception as e:
                        logger.exception("Error saving IndexerApiAccessEntry")

    return {"results": results_by_indexer, "dbsearch": dbsearch}
示例#35
0
文件: stats.py 项目: nzbis/nzbhydra
def get_avg_indexer_response_times():
    result = []
    response_times = []
    for p in Indexer.select().order_by(Indexer.name):

        avg_response_time = IndexerApiAccess().select(
            fn.AVG(IndexerApiAccess.response_time)).where(
                (IndexerApiAccess.response_successful)
                & (IndexerApiAccess.indexer == p)).tuples()[0][0]
        if avg_response_time:
            response_times.append({
                "name": p.name,
                "avgResponseTime": avg_response_time
            })
    avg_response_time = IndexerApiAccess().select(
        fn.AVG(IndexerApiAccess.response_time)).where(
            (IndexerApiAccess.response_successful)
            & (IndexerApiAccess.response_time is not None)).tuples()[0][0]
    for i in response_times:
        delta = i["avgResponseTime"] - avg_response_time
        i["delta"] = delta
        result.append(i)

    return result
示例#36
0
    def testThatDatabaseValuesAreStored(self):
        with self.app.test_request_context('/'):
            with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
                newznabItems = [
                    [mockbuilder.buildNewznabItem(title="title1", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1")],
                    [mockbuilder.buildNewznabItem(title="title2", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2")]
                ]

                self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems)
                # Make the second access unsuccessful
                rsps._urls.pop(1)
                rsps.add(responses.GET, r".*",
                         body="an error message", status=500,
                         content_type='application/x-html')

                searchRequest = SearchRequest(type="search", query="aquery", category="acategory", identifier_key="imdbid", identifier_value="animdbid", season=1, episode=2, indexers="newznab1|newznab2")
                result = search.search(searchRequest)
                results = result["results"]
                self.assertEqual(1, len(results))

                dbSearch = Search().get()
                self.assertEqual(True, dbSearch.internal)
                self.assertEqual("aquery", dbSearch.query)
                self.assertEqual("All", dbSearch.category)
                self.assertEqual("imdbid", dbSearch.identifier_key)
                self.assertEqual("animdbid", dbSearch.identifier_value)
                self.assertEqual("1", dbSearch.season)
                self.assertEqual("2", dbSearch.episode)
                self.assertEqual("search", dbSearch.type)
                self.assertEqual(18, dbSearch.time.hour)

                indexerSearch1 = IndexerSearch.get(IndexerSearch.indexer == Indexer.get(Indexer.name == "newznab1"))
                self.assertEqual(indexerSearch1.search, dbSearch)
                self.assertEqual(18, indexerSearch1.time.hour)

                indexerSearch2 = IndexerSearch.get(IndexerSearch.indexer == Indexer.get(Indexer.name == "newznab2"))
                self.assertEqual(indexerSearch2.search, dbSearch)
                self.assertEqual(18, indexerSearch2.time.hour)

                calledUrls = sorted([x.request.url for x in rsps.calls])

                indexerApiAccess1 = IndexerApiAccess.get(IndexerApiAccess.indexer == Indexer.get(Indexer.name == "newznab1"))
                self.assertEqual(indexerSearch1, indexerApiAccess1.indexer_search)
                self.assertEqual(18, indexerApiAccess1.time.hour)
                self.assertEqual("search", indexerApiAccess1.type)
                self.assertEqual(calledUrls[0], indexerApiAccess1.url)
                self.assertTrue(indexerApiAccess1.response_successful)
                self.assertEqual(0, indexerApiAccess1.response_time)
                self.assertIsNone(indexerApiAccess1.error)

                indexerApiAccess2 = IndexerApiAccess.get(IndexerApiAccess.indexer == Indexer.get(Indexer.name == "newznab2"))
                self.assertEqual(indexerSearch2, indexerApiAccess2.indexer_search)
                self.assertEqual(18, indexerApiAccess2.time.hour)
                self.assertEqual("search", indexerApiAccess2.type)
                self.assertEqual(calledUrls[1], indexerApiAccess2.url)
                self.assertFalse(indexerApiAccess2.response_successful)
                self.assertIsNone(indexerApiAccess2.response_time)
                self.assertTrue("Connection refused" in indexerApiAccess2.error)

                indexerStatus2 = IndexerStatus.get(IndexerStatus.indexer == Indexer.get(Indexer.name == "newznab2"))
                self.assertEqual(1, indexerStatus2.level)
                self.assertTrue("Connection refused" in indexerStatus2.reason)
示例#37
0
class MyTestCase(IndexerTestcase):

    def setUp(self):    
        set_and_drop()
        config.load("testsettings.cfg")
        self.nzbsorgdb = Indexer(name="NZBs.org")
        self.nzbsorgdb.save()
        self.dognzbdb = Indexer(name="DOGNzb")
        self.dognzbdb.save()
        
        
        config.indexerSettings.newznab1.enabled = True
        config.indexerSettings.newznab1.host.set("http://127.0.0.1:5001/nzbsorg")
        config.indexerSettings.newznab1.apikey.set("apikeynzbsorg")
        self.n1 = NewzNab(config.indexerSettings.newznab1)
        self.n2 = NewzNab(config.indexerSettings.newznab2)
        
    
    @freeze_time("2015-10-12 20:00:00", tz_offset=-4)
    def testParseSearchResult(self):
        
        #nzbsorg
        with open("mock/nzbsorg_q_avengers_3results.xml") as f:
            entries = self.n1.process_query_result(f.read(), "aquery").entries
        self.assertEqual(3, len(entries))
        
        self.assertEqual(entries[0].title, "AVENGERS AGE OF ULTRON (2015)")
        assert entries[0].size == 2893890900
        assert entries[0].guid == "eff551fbdb69d6777d5030c209ee5d4b"
        self.assertEqual(entries[0].age_days, 1)
        self.assertEqual(entries[0].epoch, 1444584857)
        self.assertEqual(entries[0].pubdate_utc, "2015-10-11T17:34:17+00:00")
        self.assertEqual(entries[0].poster, "*****@*****.**")
        self.assertEqual(entries[0].group, "alt.binaries.mom")
        
        self.assertEqual(entries[1].group, "alt.binaries.hdtv.x264")
        
        
        
    
    def testNewznabSearchQueries(self):
        
        self.args = SearchRequest(query="aquery")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        assert "http://127.0.0.1:5001/nzbsorg" in query
        assert "apikey=apikeynzbsorg" in query
        assert "t=search" in query
        assert "q=aquery" in query
        
        self.args = SearchRequest(query=None)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        assert "http://127.0.0.1:5001/nzbsorg" in query
        assert "apikey=apikeynzbsorg" in query
        assert "t=tvsearch" in query
        
        self.args = SearchRequest(category="All")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        assert "http://127.0.0.1:5001/nzbsorg" in query
        assert "apikey=apikeynzbsorg" in query
        assert "t=tvsearch" in query
        
        self.args = SearchRequest(identifier_value="8511", identifier_key="rid")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        assert "http://127.0.0.1:5001/nzbsorg" in query
        assert "apikey=apikeynzbsorg" in query
        assert "t=tvsearch" in query
        assert "rid=8511" in query
        
        self.args = SearchRequest(identifier_value="8511", identifier_key="rid", season=1)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        assert "http://127.0.0.1:5001/nzbsorg" in query
        assert "apikey=apikeynzbsorg" in query
        assert "t=tvsearch" in query
        assert "rid=8511" in query
        assert "season=1" in query
        
        self.args = SearchRequest(identifier_value="8511", identifier_key="rid", season=1, episode=2)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        assert "http://127.0.0.1:5001/nzbsorg" in query
        assert "apikey=apikeynzbsorg" in query
        assert "t=tvsearch" in query
        assert "rid=8511" in query
        assert "season=1" in query
        assert "ep=2" in query
        
        self.args = SearchRequest(identifier_value="12345678", identifier_key="imdbid")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        assert "http://127.0.0.1:5001/nzbsorg" in query
        assert "apikey=apikeynzbsorg" in query
        assert "t=movie" in query
        assert "imdbid=12345678" in query
        
        self.args = SearchRequest(identifier_value="12345678", identifier_key="imdbid", category="Movies")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        assert "http://127.0.0.1:5001/nzbsorg" in query
        assert "apikey=apikeynzbsorg" in query
        assert "t=movie" in query
        assert "imdbid=12345678" in query
        assert "cat=2000" in query
        
        
    @responses.activate
    def testGetNfo(self):
        with open("mock/dognzb--id-b4ba74ecb5f5962e98ad3c40c271dcc8--t-getnfo.xml", encoding="latin-1") as f:
            xml = f.read()
            
            url_re = re.compile(r'.*')
            responses.add(responses.GET, url_re,
                          body=xml, status=200,
                          content_type='application/x-html')
            nfo = self.n2.get_nfo("b4ba74ecb5f5962e98ad3c40c271dcc8")
            assert "Road Hard" in nfo
            
    @responses.activate
    def testOffsetStuff(self):
        
        
        mockitem_nzbs = []
        for i in range(100):
            mockitem_nzbs.append(mockbuilder.buildNewznabItem("myId", "myTitle", "myGuid", "http://nzbs.org/myId.nzb", None, None, 12345, "NZBs.org", [2000, 2040]))
        mockresponse_nzbs1 = mockbuilder.buildNewznabResponse("NZBs.org", mockitem_nzbs, offset=0, total=200)
        
        mockitem_nzbs.clear()
        for i in range(100):
            mockitem_nzbs.append(mockbuilder.buildNewznabItem("myId", "myTitle", "myGuid", "http://nzbs.org/myId.nzb", None, None, 12345, "NZBs.org", [2000, 2040]))
        mockresponse_nzbs2 = mockbuilder.buildNewznabResponse("NZBs.org", mockitem_nzbs, offset=100, total=200)

        r = self.n1.process_query_result(json.dumps(mockresponse_nzbs1), "http://127.0.0.1:5001/nzbsorg/q=whatever&offset=0&limit=0")
        further_queries = r.queries
        self.assertEqual(1, len(further_queries))
        assert "offset=100" in further_queries[0]
        
        r = self.n1.process_query_result(json.dumps(mockresponse_nzbs2), "http://127.0.0.1:5001/nzbsorg/q=whatever&offset=0&limit=0")
        further_queries = r.queries
        self.assertEqual(0, len(further_queries))
        
    
    def testGetNzbLink(self):
        link = self.n1.get_nzb_link("guid", None)
        assert "id=guid" in link
        assert "t=get" in link
        
    def testMapCats(self):
        from nzbhydra.searchmodules import newznab
        assert newznab.map_category("Movies") == [2000]
        assert newznab.map_category("2000") == [2000]
        newznabcats = newznab.map_category("2030,2040")
        assert len(newznabcats) == 2
        assert 2030 in newznabcats
        assert 2040 in newznabcats
示例#38
0
    def testThatDatabaseValuesAreStored(self):
        with self.app.test_request_context('/'):
            with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
                newznabItems = [
                    [mockbuilder.buildNewznabItem(title="title1", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1")],
                    [mockbuilder.buildNewznabItem(title="title2", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2")]
                ]

                self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems)
                # Make the second access unsuccessful
                rsps._urls.pop(1)
                rsps.add(responses.GET, r".*",
                         body="an error message", status=500,
                         content_type='application/x-html')

                searchRequest = SearchRequest(type="search", query="aquery", category="acategory", identifier_key="imdbid", identifier_value="animdbid", season=1, episode=2, indexers="newznab1|newznab2")
                result = search.search(searchRequest)
                results = result["results"]
                self.assertEqual(1, len(results))

                dbSearch = Search().get()
                self.assertEqual(True, dbSearch.internal)
                self.assertEqual("aquery", dbSearch.query)
                self.assertEqual("All", dbSearch.category)
                self.assertEqual("imdbid", dbSearch.identifier_key)
                self.assertEqual("animdbid", dbSearch.identifier_value)
                self.assertEqual("1", dbSearch.season)
                self.assertEqual("2", dbSearch.episode)
                self.assertEqual("search", dbSearch.type)
                self.assertEqual(18, dbSearch.time.hour)

                indexerSearch1 = IndexerSearch.get(IndexerSearch.indexer == Indexer.get(Indexer.name == "newznab1"))
                self.assertEqual(indexerSearch1.search, dbSearch)
                self.assertEqual(18, indexerSearch1.time.hour)

                indexerSearch2 = IndexerSearch.get(IndexerSearch.indexer == Indexer.get(Indexer.name == "newznab2"))
                self.assertEqual(indexerSearch2.search, dbSearch)
                self.assertEqual(18, indexerSearch2.time.hour)

                calledUrls = sorted([x.request.url for x in rsps.calls])

                indexerApiAccess1 = IndexerApiAccess.get(IndexerApiAccess.indexer == Indexer.get(Indexer.name == "newznab1"))
                self.assertEqual(indexerSearch1, indexerApiAccess1.indexer_search)
                self.assertEqual(18, indexerApiAccess1.time.hour)
                self.assertEqual("search", indexerApiAccess1.type)
                self.assertEqual(calledUrls[0], indexerApiAccess1.url)
                self.assertTrue(indexerApiAccess1.response_successful)
                self.assertEqual(0, indexerApiAccess1.response_time)
                self.assertIsNone(indexerApiAccess1.error)

                indexerApiAccess2 = IndexerApiAccess.get(IndexerApiAccess.indexer == Indexer.get(Indexer.name == "newznab2"))
                self.assertEqual(indexerSearch2, indexerApiAccess2.indexer_search)
                self.assertEqual(18, indexerApiAccess2.time.hour)
                self.assertEqual("search", indexerApiAccess2.type)
                self.assertEqual(calledUrls[1], indexerApiAccess2.url)
                self.assertFalse(indexerApiAccess2.response_successful)
                self.assertIsNone(indexerApiAccess2.response_time)
                self.assertTrue("Connection refused" in indexerApiAccess2.error)

                indexerStatus2 = IndexerStatus.get(IndexerStatus.indexer == Indexer.get(Indexer.name == "newznab2"))
                self.assertEqual(1, indexerStatus2.level)
                self.assertTrue("Connection refused" in indexerStatus2.reason)
示例#39
0
 def setUp(self):
     set_and_drop()
     womble = Indexer(module="womble", name="Womble", settings={"query_url": "http://127.0.0.1:5001/womble", "base_url": "http://127.0.0.1:5001/womble"}, search_types=["general"], search_ids=[])
     womble.save()
     self.womble = Womble(womble)
示例#40
0
def init_indexer_table_entry(indexer_name):
    try:
        Indexer.get(fn.lower(Indexer.name) == indexer_name.lower())
    except Indexer.DoesNotExist as e:
        logger.info("Unable to find indexer with name %s in database. Will add it" % indexer_name)
        Indexer().create(name=indexer_name)
示例#41
0
 def indexer(self):
     return Indexer.get(fn.lower(Indexer.name) == self.settings.name.lower())
示例#42
0
 def indexer(self):
     if self.indexerDb is None:
         self.indexerDb = Indexer.get(fn.lower(Indexer.name) == self.settings.name.lower())
     return self.indexerDb
示例#43
0
class NewznabTests(UrlTestCase):
    def setUp(self):
        set_and_drop()

        self.indexercom = Indexer(name="indexer.com")
        self.indexercom.save()

        self.newznab1 = Bunch()
        self.newznab1.enabled = True
        self.newznab1.name = "indexer.com"
        self.newznab1.host = "https://indexer.com"
        self.newznab1.apikey = "apikeyindexer.com"
        self.newznab1.timeout = None
        self.newznab1.score = 0
        self.newznab1.search_ids = ["imdbid", "rid", "tvdbid"]
        self.n1 = NewzNab(self.newznab1)

    @freeze_time("2015-10-12 18:00:00", tz_offset=-4)
    def testParseSearchResult(self):
        # nzbsorg
        with open("mock/indexercom_q_testtitle_3results.xml") as f:
            entries = self.n1.process_query_result(f.read(), "aquery").entries
        self.assertEqual(3, len(entries))

        self.assertEqual(entries[0].title, "testtitle1")
        assert entries[0].size == 2893890900
        assert entries[0].indexerguid == "eff551fbdb69d6777d5030c209ee5d4b"
        self.assertEqual(entries[0].age_days, 1)
        self.assertEqual(entries[0].epoch, 1444584857)
        self.assertEqual(entries[0].pubdate_utc, "2015-10-11T17:34:17+00:00")
        self.assertEqual(entries[0].poster, "*****@*****.**")
        self.assertEqual(entries[0].group, "alt.binaries.mom")
        self.assertEqual(
            entries[0].details_link,
            "https://indexer.com/details/eff551fbdb69d6777d5030c209ee5d4b")

        # Pull group from description
        self.assertEqual(entries[1].group, "alt.binaries.hdtv.x264")
        # Use "usenetdate" attribute if available
        self.assertEqual(
            entries[1].pubdate_utc,
            "2015-10-03T22:22:22+00:00")  # Sat, 03 Oct 2015 22:22:22 +0000
        # Use "info" attribute if available
        self.assertEqual(
            entries[0].details_link,
            "https://indexer.com/details/eff551fbdb69d6777d5030c209ee5d4b")

        # Don't use "not available" as group
        self.assertIsNone(entries[2].group)

    @freeze_time("2016-01-30 18:00:00", tz_offset=-4)
    def testParseSpotwebSearchResult(self):
        # nzbsorg
        with open("mock/spotweb_q_testtitle_3results.xml") as f:
            entries = self.n1.process_query_result(f.read(), "aquery").entries
        self.assertEqual(3, len(entries))

        self.assertEqual(entries[0].title, "testtitle1")
        assert entries[0].size == 3960401206
        assert entries[0].indexerguid == "*****@*****.**"
        self.assertEqual(entries[0].age_days, 5)
        self.assertEqual(entries[0].epoch, 1453663845)
        self.assertEqual(entries[0].pubdate_utc, "2016-01-24T19:30:45+00:00")
        self.assertEqual(entries[0].poster, "*****@*****.**")
        self.assertIsNone(entries[0].group)

    @freeze_time("2016-01-11 18:00:00", tz_offset=0)
    def testPirateNzbParseSearchResult(self):
        # nzbsorg
        with open("mock/piratenzb_movies_response.xml") as f:
            entries = self.n1.process_query_result(f.read(), "aquery").entries
        self.assertEqual(3, len(entries))

        self.assertEqual(entries[0].title, "title1")
        assert entries[0].size == 954926472
        assert entries[
            0].indexerguid == "d4776501c2b409c41f0649afc1e2d6d3f033119e"
        self.assertEqual(entries[0].age_days, 323)
        self.assertEqual(entries[0].epoch, 1424552357)
        self.assertEqual(entries[0].pubdate_utc, "2015-02-21T20:59:17+00:00")
        self.assertEqual(
            entries[0].details_link,
            "https://indexer.com/details/d4776501c2b409c41f0649afc1e2d6d3f033119e"
        )

    def testNewznabSearchQueries(self):
        self.args = SearchRequest(query="aquery")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&q=aquery&t=search",
            query)

        self.args = SearchRequest(query=None)
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=search",
            query)

        self.args = SearchRequest(query="")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=search",
            query)

        self.args = SearchRequest(category="Audio")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=3000&extended=1&limit=100&offset=0&t=search",
            query)

        self.args = SearchRequest()
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch",
            query)

        self.args = SearchRequest(query=None)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch",
            query)

        self.args = SearchRequest(query="")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch",
            query)

        self.args = SearchRequest(category="All")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=tvsearch",
            query)

        self.args = SearchRequest(identifier_value="8511",
                                  identifier_key="rid")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&rid=8511&t=tvsearch",
            query)

        self.args = SearchRequest(identifier_value="8511",
                                  identifier_key="rid",
                                  season=1)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&rid=8511&season=1&t=tvsearch",
            query)

        self.args = SearchRequest(identifier_value="8511",
                                  identifier_key="rid",
                                  season=1,
                                  episode=2)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&ep=2&extended=1&limit=100&offset=0&rid=8511&season=1&t=tvsearch",
            query)

        self.args = SearchRequest(identifier_value="12345678",
                                  identifier_key="imdbid")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&imdbid=12345678&limit=100&offset=0&t=movie",
            query)

        self.args = SearchRequest(identifier_value="12345678",
                                  identifier_key="imdbid",
                                  category="Movies HD")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2040,2050,2060&extended=1&imdbid=12345678&limit=100&offset=0&t=movie",
            query)

        self.args = SearchRequest(category="Movies")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie",
            query)

        self.args = SearchRequest(category="Movies", query=None)
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie",
            query)

        self.args = SearchRequest(category="Movies", query="")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie",
            query)

    @responses.activate
    def testGetNfo(self):
        with open("mock/nfo.xml", encoding="latin-1") as f:
            xml = f.read()
        with responses.RequestsMock(
                assert_all_requests_are_fired=False) as rsps:
            url_re = re.compile(r'.*')
            rsps.add(responses.GET,
                     url_re,
                     body=xml,
                     status=200,
                     content_type='application/x-html')
            hasnfo, nfo, message = self.n1.get_nfo(
                "b4ba74ecb5f5962e98ad3c40c271dcc8")
            self.assertTrue(hasnfo)
            self.assertEqual("an nfo in xml", nfo)

        with open("mock/rawnfo.txt", encoding="latin-1") as f:
            xml = f.read()
        with responses.RequestsMock(
                assert_all_requests_are_fired=False) as rsps:
            url_re = re.compile(r'.*')
            rsps.add(responses.GET,
                     url_re,
                     body=xml,
                     status=200,
                     content_type='application/x-html')
            hasnfo, nfo, message = self.n1.get_nfo(
                "b4ba74ecb5f5962e98ad3c40c271dcc8")
            self.assertTrue(hasnfo)
            self.assertEqual("a raw nfo", nfo)

        with open("mock/nfo-noresult.xml", encoding="latin-1") as f:
            xml = f.read()
        with responses.RequestsMock(
                assert_all_requests_are_fired=False) as rsps:
            url_re = re.compile(r'.*')
            rsps.add(responses.GET,
                     url_re,
                     body=xml,
                     status=200,
                     content_type='application/x-html')
            hasnfo, nfo, message = self.n1.get_nfo(
                "b4ba74ecb5f5962e98ad3c40c271dcc8")
            self.assertFalse(hasnfo)
            self.assertEqual("No NFO available", message)

        with open("mock/nfo-nosuchitem.xml", encoding="latin-1") as f:
            xml = f.read()
        with responses.RequestsMock(
                assert_all_requests_are_fired=False) as rsps:
            url_re = re.compile(r'.*')
            rsps.add(responses.GET,
                     url_re,
                     body=xml,
                     status=200,
                     content_type='application/x-html')
            hasnfo, nfo, message = self.n1.get_nfo(
                "b4ba74ecb5f5962e98ad3c40c271dcc8")
            self.assertFalse(hasnfo)
            self.assertEqual("No NFO available", message)

    def testGetNzbLink(self):
        link = self.n1.get_nzb_link("guid", None)
        assert "id=guid" in link
        assert "t=get" in link

    def testMapCats(self):
        from nzbhydra.searchmodules import newznab
        assert newznab.map_category("Movies") == [2000]
        assert newznab.map_category("2000") == [2000]
        newznabcats = newznab.map_category("2030,2040")
        assert len(newznabcats) == 2
        assert 2030 in newznabcats
        assert 2040 in newznabcats

    def testGetEbookUrls(self):
        searchRequest = SearchRequest(query="novel")
        urls = self.n1.get_ebook_urls(searchRequest)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&cat=7020,8010&limit=100&t=search&extended=1&offset=0&q=novel",
            urls[0])

    def testGetMovieSearchUrls(self):
        self.newznab1.search_ids = ["imdbid"]
        # Doing a query based movie search uses regular search with the proper category
        searchRequest = SearchRequest(type="movie", query="atitle")
        urls = self.n1.get_moviesearch_urls(searchRequest)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=search&extended=1&offset=0&cat=2000&q=atitle",
            urls[0])

        searchRequest = SearchRequest(type="movie",
                                      identifier_key="imdbid",
                                      identifier_value="123")
        urls = self.n1.get_moviesearch_urls(searchRequest)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=movie&extended=1&offset=0&cat=2000&imdbid=123",
            urls[0])

    def testGetShowSearchUrls(self):
        self.newznab1.search_ids = ["tvdbid", "rid"]
        self.args = SearchRequest(identifier_value="47566",
                                  identifier_key="rid")
        urls = self.n1.get_showsearch_urls(self.args)
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=tvsearch&extended=1&offset=0&cat=5000&rid=47566",
            urls[0])
        self.args = SearchRequest(identifier_value="299350",
                                  identifier_key="tvdbid")
        urls = self.n1.get_showsearch_urls(self.args)
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=tvsearch&extended=1&offset=0&cat=5000&tvdbid=299350",
            urls[0])

    def testThatShowSearchIdsAreConverted(self):
        self.newznab1.search_ids = ["tvdbid"]
        self.args = SearchRequest(identifier_value="47566",
                                  identifier_key="rid")
        urls = self.n1.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=tvsearch&extended=1&offset=0&cat=5000&tvdbid=299350",
            urls[0])

        self.newznab1.search_ids = ["rid"]
        self.args = SearchRequest(identifier_value="299350",
                                  identifier_key="tvdbid")
        urls = self.n1.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual(
            "https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=tvsearch&extended=1&offset=0&cat=5000&rid=47566",
            urls[0])

    def testThatNoUrlsAreReturnedIfIdCannotBeConverted(self):
        self.newznab1.search_ids = ["unknownid"]
        self.args = SearchRequest(identifier_value="299350",
                                  identifier_key="tvdbid")
        urls = self.n1.get_showsearch_urls(self.args)
        self.assertEqual(0, len(urls))

    def testCheckAuth(self):
        body = '<?xml version="1.0" encoding="utf-8" ?><error code="100" description="Incorrect user credentials" />'
        with pytest.raises(Exception) as excinfo:
            self.n1.check_auth(body)
        self.assertEqual("The API key seems to be incorrect.",
                         excinfo.value.message)

        body = '<?xml version="1.0" encoding="utf-8" ?><error code="910" description="API Temporarily Disabled (daily maintenance)" />'
        with pytest.raises(Exception) as excinfo:
            self.n1.check_auth(body)
        self.assertEqual("The API seems to be disabled for the moment.",
                         excinfo.value.message)

        body = '<?xml version="1.0" encoding="utf-8" ?><error code="200" description="Missing parameter" />'
        with pytest.raises(Exception) as excinfo:
            self.n1.check_auth(body)
        self.assertEqual(
            "Unknown error while trying to access the indexer: Missing parameter",
            excinfo.value.message)
示例#44
0
 def indexer(self):
     if self.indexerDb is None:
         self.indexerDb = Indexer.get(
             fn.lower(Indexer.name) == self.settings.name.lower())
     return self.indexerDb
示例#45
0
class NewznabTests(UrlTestCase):
    def setUp(self):
        set_and_drop()

        self.indexercom = Indexer(name="indexer.com")
        self.indexercom.save()

        self.newznab1 = Bunch()
        self.newznab1.enabled = True
        self.newznab1.name = "indexer.com"
        self.newznab1.host = "https://indexer.com"
        self.newznab1.apikey = "apikeyindexer.com"
        self.newznab1.timeout = None
        self.newznab1.score = 0
        self.newznab1.search_ids = ["imdbid", "rid", "tvdbid"]
        self.newznab1.searchTypes = []
        self.n1 = NewzNab(self.newznab1)

    @freeze_time("2015-10-12 18:00:00", tz_offset=-4)
    def testParseSearchResult(self):
        # nzbsorg
        with open("mock/indexercom_q_testtitle_3results.xml") as f:
            entries = self.n1.process_query_result(f.read(), SearchRequest()).entries
        self.assertEqual(3, len(entries))

        self.assertEqual(entries[0].title, "testtitle1")
        assert entries[0].size == 2893890900
        assert entries[0].indexerguid == "eff551fbdb69d6777d5030c209ee5d4b"
        self.assertEqual(entries[0].age_days, 1)
        self.assertEqual(entries[0].epoch, 1444584857)
        self.assertEqual(entries[0].pubdate_utc, "2015-10-11T17:34:17+00:00")
        self.assertEqual(entries[0].poster, "*****@*****.**")
        self.assertEqual(entries[0].group, "alt.binaries.mom")
        self.assertEqual(entries[0].details_link, "https://indexer.com/details/eff551fbdb69d6777d5030c209ee5d4b")

        # Pull group from description
        self.assertEqual(entries[1].group, "alt.binaries.hdtv.x264")
        # Use "usenetdate" attribute if available
        self.assertEqual(entries[1].pubdate_utc, "2015-10-03T22:22:22+00:00")  # Sat, 03 Oct 2015 22:22:22 +0000
        # Use "info" attribute if available
        self.assertEqual(entries[0].details_link, "https://indexer.com/details/eff551fbdb69d6777d5030c209ee5d4b")

        # Don't use "not available" as group
        self.assertIsNone(entries[2].group)

    @freeze_time("2016-01-30 18:00:00", tz_offset=-4)
    def testParseSpotwebSearchResult(self):
        # nzbsorg
        with open("mock/spotweb_q_testtitle_3results.xml") as f:
            entries = self.n1.process_query_result(f.read(), SearchRequest()).entries
        self.assertEqual(3, len(entries))

        self.assertEqual(entries[0].title, "testtitle1")
        assert entries[0].size == 3960401206
        assert entries[0].indexerguid == "*****@*****.**"
        self.assertEqual(entries[0].age_days, 5)
        self.assertEqual(entries[0].epoch, 1453663845)
        self.assertEqual(entries[0].pubdate_utc, "2016-01-24T19:30:45+00:00")
        self.assertEqual(entries[0].poster, "*****@*****.**")
        self.assertIsNone(entries[0].group)

    @freeze_time("2016-01-11 18:00:00", tz_offset=0)
    def testPirateNzbParseSearchResult(self):
        # nzbsorg
        with open("mock/piratenzb_movies_response.xml") as f:
            entries = self.n1.process_query_result(f.read(), SearchRequest()).entries
        self.assertEqual(3, len(entries))

        self.assertEqual(entries[0].title, "title1")
        assert entries[0].size == 954926472
        assert entries[0].indexerguid == "d4776501c2b409c41f0649afc1e2d6d3f033119e"
        self.assertEqual(entries[0].age_days, 323)
        self.assertEqual(entries[0].epoch, 1424552357)
        self.assertEqual(entries[0].pubdate_utc, "2015-02-21T20:59:17+00:00")
        self.assertEqual(entries[0].details_link, "https://indexer.com/details/d4776501c2b409c41f0649afc1e2d6d3f033119e")

    def testNewznabSearchQueries(self):
        self.args = SearchRequest(query="aquery")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&q=aquery&t=search", query)

        self.args = SearchRequest(query=None)
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=search", query)

        self.args = SearchRequest(query="")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=search", query)

        self.args = SearchRequest(category="Audio")
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=3000&extended=1&limit=100&offset=0&t=search", query)

        self.args = SearchRequest()
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch", query)

        self.args = SearchRequest(query=None)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch", query)

        self.args = SearchRequest(query="")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&t=tvsearch", query)

        self.args = SearchRequest(category="All")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&t=tvsearch", query)

        self.args = SearchRequest(identifier_value="8511", identifier_key="rid")
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&rid=8511&t=tvsearch", query)

        self.args = SearchRequest(identifier_value="8511", identifier_key="rid", season=1)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&extended=1&limit=100&offset=0&rid=8511&season=1&t=tvsearch", query)

        self.args = SearchRequest(identifier_value="8511", identifier_key="rid", season=1, episode=2)
        queries = self.n1.get_showsearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=5000&ep=2&extended=1&limit=100&offset=0&rid=8511&season=1&t=tvsearch", query)

        self.args = SearchRequest(identifier_value="12345678", identifier_key="imdbid")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&imdbid=12345678&limit=100&offset=0&t=movie", query)

        self.args = SearchRequest(identifier_value="12345678", identifier_key="imdbid", category="Movies HD")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=2040,2050,2060&extended=1&imdbid=12345678&limit=100&offset=0&t=movie", query)

        self.args = SearchRequest(category="Movies")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie", query)

        self.args = SearchRequest(category="Movies", query=None)
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie", query)

        self.args = SearchRequest(category="Movies", query="")
        queries = self.n1.get_moviesearch_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=2000&extended=1&limit=100&offset=0&t=movie", query)

        config.settings.searching.ignoreWords = "ignorethis"
        self.args = SearchRequest(query="aquery", ignoreWords=["ignorethis"])
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&q=aquery --ignorethis&t=search", query)

        config.settings.searching.ignoreWords = "ignorethis"
        self.args = SearchRequest(query="aquery", ignoreWords=["ignorethis"])
        queries = self.n1.get_search_urls(self.args)
        assert len(queries) == 1
        query = queries[0]
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&extended=1&limit=100&offset=0&q=aquery --ignorethis&t=search", query)
        
        

    @responses.activate
    def testGetNfo(self):
        with open("mock/nfo.xml", encoding="latin-1") as f:
            xml = f.read()
        with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
            url_re = re.compile(r'.*')
            rsps.add(responses.GET, url_re,
                     body=xml, status=200,
                     content_type='application/x-html')
            hasnfo, nfo, message = self.n1.get_nfo("b4ba74ecb5f5962e98ad3c40c271dcc8")
            self.assertTrue(hasnfo)
            self.assertEqual("an nfo in xml", nfo)

        with open("mock/rawnfo.txt", encoding="latin-1") as f:
            xml = f.read()
        with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
            url_re = re.compile(r'.*')
            rsps.add(responses.GET, url_re,
                     body=xml, status=200,
                     content_type='application/x-html')
            hasnfo, nfo, message = self.n1.get_nfo("b4ba74ecb5f5962e98ad3c40c271dcc8")
            self.assertTrue(hasnfo)
            self.assertEqual("a raw nfo", nfo)

        with open("mock/nfo-noresult.xml", encoding="latin-1") as f:
            xml = f.read()
        with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
            url_re = re.compile(r'.*')
            rsps.add(responses.GET, url_re,
                     body=xml, status=200,
                     content_type='application/x-html')
            hasnfo, nfo, message = self.n1.get_nfo("b4ba74ecb5f5962e98ad3c40c271dcc8")
            self.assertFalse(hasnfo)
            self.assertEqual("No NFO available", message)

        with open("mock/nfo-nosuchitem.xml", encoding="latin-1") as f:
            xml = f.read()
        with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
            url_re = re.compile(r'.*')
            rsps.add(responses.GET, url_re,
                     body=xml, status=200,
                     content_type='application/x-html')
            hasnfo, nfo, message = self.n1.get_nfo("b4ba74ecb5f5962e98ad3c40c271dcc8")
            self.assertFalse(hasnfo)
            self.assertEqual("No NFO available", message)

    def testGetNzbLink(self):
        link = self.n1.get_nzb_link("guid", None)
        assert "id=guid" in link
        assert "t=get" in link

    def testMapCats(self):
        from nzbhydra.searchmodules import newznab
        assert newznab.map_category("Movies") == [2000]
        assert newznab.map_category("2000") == [2000]
        newznabcats = newznab.map_category("2030,2040")
        assert len(newznabcats) == 2
        assert 2030 in newznabcats
        assert 2040 in newznabcats

    def testGetEbookUrls(self):
        
        searchRequest = SearchRequest(query="novel")
        urls = self.n1.get_ebook_urls(searchRequest)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=7020,8010&limit=100&t=search&extended=1&offset=0&q=novel", urls[0])

        self.args = SearchRequest(author="anauthor", title="atitle", category="7020")
        queries = self.n1.get_ebook_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&cat=7020&extended=1&limit=100&offset=0&q=anauthor+atitle&t=search", queries[0])

        self.newznab1.searchTypes = ["book"]
        self.n1 = NewzNab(self.newznab1)
        self.args = SearchRequest(author="anauthor", title="atitle", category="7020")
        queries = self.n1.get_ebook_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&author=anauthor&cat=7020&extended=1&limit=100&offset=0&t=book&title=atitle", queries[0])

    def testGetMovieSearchUrls(self):
        self.newznab1.search_ids = ["imdbid"]
        # Doing a query based movie search uses regular search with the proper category 
        searchRequest = SearchRequest(type="movie", query="atitle")
        urls = self.n1.get_moviesearch_urls(searchRequest)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=search&extended=1&offset=0&cat=2000&q=atitle", urls[0])

        searchRequest = SearchRequest(type="movie", identifier_key="imdbid", identifier_value="123")
        urls = self.n1.get_moviesearch_urls(searchRequest)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=movie&extended=1&offset=0&cat=2000&imdbid=123", urls[0])

    def testGetShowSearchUrls(self):
        self.newznab1.search_ids = ["tvdbid", "rid"]
        self.args = SearchRequest(identifier_value="47566", identifier_key="rid")
        urls = self.n1.get_showsearch_urls(self.args)
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=tvsearch&extended=1&offset=0&cat=5000&rid=47566", urls[0])
        self.args = SearchRequest(identifier_value="299350", identifier_key="tvdbid")
        urls = self.n1.get_showsearch_urls(self.args)
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=tvsearch&extended=1&offset=0&cat=5000&tvdbid=299350", urls[0])

    def testThatShowSearchIdsAreConverted(self):
        self.newznab1.search_ids = ["tvdbid"]
        self.args = SearchRequest(identifier_value="47566", identifier_key="rid")
        urls = self.n1.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=tvsearch&extended=1&offset=0&cat=5000&tvdbid=299350", urls[0])

        self.newznab1.search_ids = ["rid"]
        self.args = SearchRequest(identifier_value="299350", identifier_key="tvdbid")
        urls = self.n1.get_showsearch_urls(self.args)
        self.assertEqual(1, len(urls))
        self.assertUrlEqual("https://indexer.com/api?apikey=apikeyindexer.com&limit=100&t=tvsearch&extended=1&offset=0&cat=5000&rid=47566", urls[0])

    def testThatNoUrlsAreReturnedIfIdCannotBeConverted(self):
        self.newznab1.search_ids = ["unknownid"]
        self.args = SearchRequest(identifier_value="299350", identifier_key="tvdbid")
        urls = self.n1.get_showsearch_urls(self.args)
        self.assertEqual(0, len(urls))

    def testCheckAuth(self):
        body = '<?xml version="1.0" encoding="utf-8" ?><error code="100" description="Incorrect user credentials" />'
        with pytest.raises(Exception) as excinfo:
            self.n1.check_auth(body)
        self.assertEqual("The API key seems to be incorrect.", excinfo.value.message)

        body = '<?xml version="1.0" encoding="utf-8" ?><error code="910" description="API Temporarily Disabled (daily maintenance)" />'
        with pytest.raises(Exception) as excinfo:
            self.n1.check_auth(body)
        self.assertEqual("The API seems to be disabled for the moment.", excinfo.value.message)

        body = '<?xml version="1.0" encoding="utf-8" ?><error code="200" description="Missing parameter" />'
        with pytest.raises(Exception) as excinfo:
            self.n1.check_auth(body)
        self.assertEqual("Unknown error while trying to access the indexer: Missing parameter", excinfo.value.message)

    @responses.activate
    def testGetEntryById(self):        
        with open("mock/indexercom_details.xml", encoding="latin-1") as f:
            xml = f.read()
        with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
            url_re = re.compile(r'.*')
            rsps.add(responses.GET, url_re,
                     body=xml, status=200,
                     content_type='application/x-html')
            item = self.n1.get_entry_by_id("aguid", "atitle")
            self.assertEqual("testtitle1", item.title)
            self.assertEqual(2893890900, item.size)
示例#46
0
 def indexer(self):
     return Indexer.get(fn.lower(Indexer.name) == self.settings.name.lower())