def get_avg_indexer_search_results_share(): results = [] for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug("Skipping download stats for %s" % p.name) continue except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue result = database.db.execute_sql( "select (100 * (select cast(sum(ps.resultsCount) as float) from indexersearch ps " "where ps.search_id in (select ps.search_id from indexersearch ps, search s where ps.indexer_id == %d and ps.search_id = s.id and ps.successful and (s.episode NOT NULL or s.season not NULL or s.identifier_key not null or s.query not null)) and ps.indexer_id == %d)) " "/ " "(select sum(ps.resultsCount) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps, search s where ps.indexer_id == %d and ps.search_id = s.id and ps.successful and (s.episode NOT NULL or s.season not NULL or s.identifier_key not null or s.query not null))) as sumAllResults" % (p.id, p.id, p.id)).fetchone() results.append({ "name": p.name, "avgResultsShare": int(result[0]) if result[0] is not None else "N/A" }) results = sorted(results, key=lambda x: x["name"]) results = sorted( results, key=lambda x: 0 if x["avgResultsShare"] == "N/A" else x["avgResultsShare"], reverse=True) return results
def get_avg_indexer_response_times(after, before): result = [] response_times = [] for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug("Skipping download stats for %s" % p.name) continue except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue where = (IndexerApiAccess.response_successful) & ( IndexerApiAccess.indexer == p ) & (IndexerApiAccess.time > after) & (IndexerApiAccess.time < before) avg_response_time = IndexerApiAccess().select( fn.AVG(IndexerApiAccess.response_time)).where(where).tuples()[0][0] if avg_response_time: response_times.append({ "name": p.name, "avgResponseTime": int(avg_response_time) }) where = (IndexerApiAccess.response_successful) & ( IndexerApiAccess.response_time is not None) & ( IndexerApiAccess.time > after) & (IndexerApiAccess.time < before) avg_response_time = IndexerApiAccess().select( fn.AVG(IndexerApiAccess.response_time)).where(where).tuples()[0][0] for i in response_times: delta = i["avgResponseTime"] - avg_response_time i["delta"] = delta result.append(i) result = sorted(result, key=lambda x: x["name"]) result = sorted(result, key=lambda x: x["avgResponseTime"]) return result
def get_avg_indexer_response_times(after, before): result = [] response_times = [] for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug("Skipping download stats for %s" % p.name) continue except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue where = (IndexerApiAccess.response_successful) & (IndexerApiAccess.indexer == p) & (IndexerApiAccess.time > after) & (IndexerApiAccess.time < before) avg_response_time = IndexerApiAccess().select(fn.AVG(IndexerApiAccess.response_time)).where(where).tuples()[0][0] if avg_response_time: response_times.append({"name": p.name, "avgResponseTime": int(avg_response_time)}) where = (IndexerApiAccess.response_successful) & (IndexerApiAccess.response_time is not None) & (IndexerApiAccess.time > after) & (IndexerApiAccess.time < before) avg_response_time = IndexerApiAccess().select(fn.AVG(IndexerApiAccess.response_time)).where(where).tuples()[0][0] for i in response_times: delta = i["avgResponseTime"] - avg_response_time i["delta"] = delta result.append(i) result = sorted(result, key=lambda x: x["name"]) result = sorted(result, key=lambda x: x["avgResponseTime"]) return result
def get_indexer_response_times(): result = [] for p in Indexer.select(): print("Limiting stats to 100 for testing only!") result.append({"key": p.name, "values": [{"responseTime": x.response_time, "date": x.time.timestamp} for x in IndexerApiAccess().select(IndexerApiAccess.response_time, IndexerApiAccess.time).where((IndexerApiAccess.response_successful) & (IndexerApiAccess.indexer == p)).join(Indexer).limit(1)]}) return result
def getIndexerDownloadStats(): results = [] allDownloadsCount = IndexerNzbDownload.select().count() for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug("Skipping download stats for %s" % p.name) continue except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue dlCount = IndexerNzbDownload().\ select(Indexer.name, IndexerApiAccess.response_successful). \ join(IndexerApiAccess, JOIN.LEFT_OUTER). \ join(Indexer, JOIN.LEFT_OUTER).\ where(Indexer.id == p).\ count() results.append({ "name": p.name, "total": dlCount, "share": 100 / (allDownloadsCount / dlCount) if allDownloadsCount > 0 and dlCount > 0 else 0 }) results = sorted(results, key=lambda x: x["name"]) results = sorted(results, key=lambda x: x["share"], reverse=True) return results
def get_indexer_response_times(): result = [] for p in Indexer.select().order_by(Indexer.name): print("Limiting stats to 100 for testing only!") result.append({"key": p.name, "values": [{"responseTime": x.response_time, "date": x.time.timestamp} for x in IndexerApiAccess().select(IndexerApiAccess.response_time, IndexerApiAccess.time).where((IndexerApiAccess.response_successful) & (IndexerApiAccess.indexer == p)).join(Indexer).limit(1)]}) return result
def get_avg_indexer_search_results_share(): results = [] for p in Indexer.select().order_by(Indexer.name): result = database.db.execute_sql( "select (100 * (select cast(sum(ps.results) as float) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d) and ps.indexer_id == %d)) / (select sum(ps.results) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d)) as sumAllResults" % ( p.id, p.id, p.id)).fetchone() results.append({"name": p.name, "avgResultsShare": result[0] if result[0] is not None else "N/A"}) return results
def get_avg_indexer_search_results_share(afterSql, beforeSql): results = [] for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug("Skipping download stats for %s" % p.name) continue if indexer.settings.name == "Womble": logger.debug("Skipping download stats for Womble because we exclude update queries without specific query or ID") continue except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue innerSelect = """(SELECT ps.search_id FROM indexersearch ps, search s WHERE ps.indexer_id == %(id)d AND ps.search_id = s.id AND ps.successful AND (s.episode NOT NULL OR s.season NOT NULL OR s.identifier_key NOT NULL OR s.query NOT NULL)) AND ps.time > %(after)s and ps.time < %(before)s""" % {"id": p.id, "after": afterSql, "before": beforeSql} result = database.db.execute_sql( """ SELECT (100 * (SELECT cast(sum(ps.resultsCount) AS FLOAT) FROM indexersearch ps WHERE ps.search_id IN %s AND ps.indexer_id == %d)) / (SELECT sum(ps.resultsCount) FROM indexersearch ps WHERE ps.search_id IN %s) AS sumAllResults """ % (innerSelect, p.id, innerSelect)).fetchone() avgResultsShare = int(result[0]) if result is not None and len(result) > 0 and result[0] is not None else "N/A" result = database.db.execute_sql( """ SELECT avg( CASE WHEN uniqueResults > 0 THEN 100 / (processedResults * 1.0 / uniqueResults) ELSE 0 END) as avgUniqueResults FROM indexersearch s WHERE processedResults IS NOT NULL AND uniqueResults IS NOT NULL AND s.indexer_id == %(id)d AND s.time > %(after)s and s.time < %(before)s GROUP BY indexer_id; """ % {"id": p.id, "after": afterSql, "before": beforeSql}).fetchone() if p.name in ["NZBIndex", "Binsearch", "NZBClub"]: avgUniqueResults = "-" elif result is not None and len(result) > 0 and result[0] is not None: avgUniqueResults = int(result[0]) else: avgUniqueResults = "N/A" results.append({"name": p.name, "avgResultsShare": avgResultsShare, "avgUniqueResults": avgUniqueResults}) results = sorted(results, key=lambda x: x["name"]) results = sorted(results, key=lambda x: 0 if x["avgResultsShare"] == "N/A" else x["avgResultsShare"], reverse=True) return results
def getIndexerDownloadStats(): results = [] allDownloadsCount = IndexerNzbDownload.select().count() for p in Indexer.select().order_by(Indexer.name): dlCount = IndexerNzbDownload().select(Indexer.name, IndexerApiAccess.response_successful).join(IndexerSearch, JOIN.LEFT_OUTER).join(Search, JOIN.LEFT_OUTER).switch(IndexerNzbDownload).join(IndexerApiAccess, JOIN.LEFT_OUTER).join(Indexer, JOIN.LEFT_OUTER).where(Indexer.id == p).count() results.append({"name": p.name, "total": dlCount, "share": 100 / (allDownloadsCount / dlCount) if allDownloadsCount > 0 and dlCount > 0 else 0}) return results
def get_avg_indexer_search_results_share(): results = [] for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug("Skipping download stats for %s" % p.name) continue if indexer.settings.name == "Womble": logger.debug("Skipping download stats for Womble because we exclude update queries without specific query or ID") continue except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue result = database.db.execute_sql( """ SELECT (100 * (SELECT cast(sum(ps.resultsCount) AS FLOAT) FROM indexersearch ps WHERE ps.search_id IN (SELECT ps.search_id FROM indexersearch ps, search s WHERE ps.indexer_id == %d AND ps.search_id = s.id AND ps.successful AND (s.episode NOT NULL OR s.season NOT NULL OR s.identifier_key NOT NULL OR s.query NOT NULL)) AND ps.indexer_id == %d)) / (SELECT sum(ps.resultsCount) FROM indexersearch ps WHERE ps.search_id IN (SELECT ps.search_id FROM indexersearch ps, search s WHERE ps.indexer_id == %d AND ps.search_id = s.id AND ps.successful AND (s.episode NOT NULL OR s.season NOT NULL OR s.identifier_key NOT NULL OR s.query NOT NULL))) AS sumAllResults """ % (p.id, p.id, p.id)).fetchone() avgResultsShare = int(result[0]) if result is not None and result[0] is not None else "N/A" result = database.db.execute_sql( """ SELECT avg( CASE WHEN uniqueResults > 0 THEN 100 / (processedResults * 1.0 / uniqueResults) ELSE 0 END) as avgUniqueResults FROM indexersearch WHERE processedResults IS NOT NULL AND uniqueResults IS NOT NULL AND indexer_id == %d GROUP BY indexer_id; """ % p.id).fetchone() if p.name in ["NZBIndex", "Binsearch", "NZBClub"]: avgUniqueResults = "-" elif result is not None and result[0] is not None: avgUniqueResults = int(result[0]) else: avgUniqueResults = "N/A" results.append({"name": p.name, "avgResultsShare": avgResultsShare, "avgUniqueResults": avgUniqueResults}) results = sorted(results, key=lambda x: x["name"]) results = sorted(results, key=lambda x: 0 if x["avgResultsShare"] == "N/A" else x["avgResultsShare"], reverse=True) return results
def get_avg_indexer_search_results_share(afterSql, beforeSql): results = [] for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug("Skipping download stats for %s" % p.name) continue except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue innerSelect = """(SELECT ps.search_id FROM indexersearch ps, search s WHERE ps.indexer_id == %(id)d AND ps.search_id = s.id AND ps.successful AND (s.episode NOT NULL OR s.season NOT NULL OR s.identifier_key NOT NULL OR s.query NOT NULL)) AND ps.time > %(after)s and ps.time < %(before)s""" % {"id": p.id, "after": afterSql, "before": beforeSql} result = database.db.execute_sql( """ SELECT (100 * (SELECT cast(sum(ps.resultsCount) AS FLOAT) FROM indexersearch ps WHERE ps.search_id IN %s AND ps.indexer_id == %d)) / (SELECT sum(ps.resultsCount) FROM indexersearch ps WHERE ps.search_id IN %s) AS sumAllResults """ % (innerSelect, p.id, innerSelect)).fetchone() avgResultsShare = int(result[0]) if result is not None and len(result) > 0 and result[0] is not None else "N/A" result = database.db.execute_sql( """ SELECT avg( CASE WHEN uniqueResults > 0 THEN 100 / (processedResults * 1.0 / uniqueResults) ELSE 0 END) as avgUniqueResults FROM indexersearch s WHERE processedResults IS NOT NULL AND uniqueResults IS NOT NULL AND s.indexer_id == %(id)d AND s.time > %(after)s and s.time < %(before)s GROUP BY indexer_id; """ % {"id": p.id, "after": afterSql, "before": beforeSql}).fetchone() if p.name in ["NZBIndex", "Binsearch", "NZBClub"]: avgUniqueResults = "-" elif result is not None and len(result) > 0 and result[0] is not None: avgUniqueResults = int(result[0]) else: avgUniqueResults = "N/A" results.append({"name": p.name, "avgResultsShare": avgResultsShare, "avgUniqueResults": avgUniqueResults}) results = sorted(results, key=lambda x: x["name"]) results = sorted(results, key=lambda x: 0 if x["avgResultsShare"] == "N/A" else x["avgResultsShare"], reverse=True) return results
def getIndexerBasedDownloadStats(afterSql, beforeSql): enabledIndexerIds = [] for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug( "Skipping download stats for %s because it's disabled" % p.name) continue enabledIndexerIds.append(str(p.id)) except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue enabledIndexerIds = ", ".join(enabledIndexerIds) query = """ SELECT indexer.name, count(*) AS total, CASE WHEN count(*) > 0 THEN 100 / (1.0 * countall.countall / count(*)) ELSE 0 END AS share FROM indexernzbdownload dl, (SELECT count(*) AS countall FROM indexernzbdownload dl LEFT OUTER JOIN indexerapiaccess api ON dl.apiAccess_id = api.id WHERE api.indexer_id IN (%(enabledIndexerIds)s) AND api.time > %(afterSql)s AND api.time < %(beforeSql)s ) countall LEFT OUTER JOIN indexerapiaccess api ON dl.apiAccess_id = api.id LEFT OUTER JOIN indexer indexer ON api.indexer_id = indexer.id WHERE api.indexer_id IN (%(enabledIndexerIds)s) GROUP BY indexer.id """ % { "enabledIndexerIds": enabledIndexerIds, "afterSql": afterSql, "beforeSql": beforeSql } stats = database.db.execute_sql(query).fetchall() stats = [{"name": x[0], "total": x[1], "share": x[2]} for x in stats] stats = sorted(stats, key=lambda x: x["name"]) stats = sorted(stats, key=lambda x: x["share"], reverse=True) return stats
def get_avg_indexer_search_results_share(): results = [] for p in Indexer.select().order_by(Indexer.name): result = database.db.execute_sql( "select (100 * (select cast(sum(ps.results) as float) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d) and ps.indexer_id == %d)) / (select sum(ps.results) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d)) as sumAllResults" % (p.id, p.id, p.id)).fetchone() results.append({ "name": p.name, "avgResultsShare": result[0] if result[0] is not None else "N/A" }) return results
def get_avg_indexer_response_times(): result = [] response_times = [] for p in Indexer.select().order_by(Indexer.name): avg_response_time = IndexerApiAccess().select(fn.AVG(IndexerApiAccess.response_time)).where((IndexerApiAccess.response_successful) & (IndexerApiAccess.indexer == p)).tuples()[0][0] if avg_response_time: response_times.append({"name": p.name, "avgResponseTime": avg_response_time}) avg_response_time = IndexerApiAccess().select(fn.AVG(IndexerApiAccess.response_time)).where((IndexerApiAccess.response_successful) & (IndexerApiAccess.response_time is not None)).tuples()[0][0] for i in response_times: delta = i["avgResponseTime"] - avg_response_time i["delta"] = delta result.append(i) return result
def getIndexerBasedDownloadStats(afterSql, beforeSql): enabledIndexerIds = [] for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug("Skipping download stats for %s because it's disabled" % p.name) continue enabledIndexerIds.append(str(p.id)) except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue enabledIndexerIds = ", ".join(enabledIndexerIds) query = """ SELECT indexer.name, count(*) AS total, CASE WHEN count(*) > 0 THEN 100 / (1.0 * countall.countall / count(*)) ELSE 0 END AS share FROM indexernzbdownload dl, (SELECT count(*) AS countall FROM indexernzbdownload dl LEFT OUTER JOIN indexerapiaccess api ON dl.apiAccess_id = api.id WHERE api.indexer_id IN (%(enabledIndexerIds)s) AND api.time > %(afterSql)s AND api.time < %(beforeSql)s ) countall LEFT OUTER JOIN indexerapiaccess api ON dl.apiAccess_id = api.id LEFT OUTER JOIN indexer indexer ON api.indexer_id = indexer.id WHERE api.indexer_id IN (%(enabledIndexerIds)s) AND api.time > %(afterSql)s AND api.time < %(beforeSql)s GROUP BY indexer.id """ % {"enabledIndexerIds": enabledIndexerIds, "afterSql": afterSql, "beforeSql": beforeSql} stats = database.db.execute_sql(query).fetchall() stats = [{"name": x[0], "total": x[1], "share": x[2]} for x in stats] stats = sorted(stats, key=lambda x: x["name"]) stats = sorted(stats, key=lambda x: x["share"], reverse=True) return stats
def get_avg_indexer_search_results_share(): results = [] for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug("Skipping download stats for %s" % p.name) continue except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue result = database.db.execute_sql( "select (100 * (select cast(sum(ps.resultsCount) as float) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d) and ps.indexer_id == %d)) / (select sum(ps.resultsCount) from indexersearch ps where ps.search_id in (select ps.search_id from indexersearch ps where ps.indexer_id == %d)) as sumAllResults" % ( p.id, p.id, p.id)).fetchone() results.append({"name": p.name, "avgResultsShare": result[0] if result[0] is not None else "N/A"}) return results
def getIndexerDownloadStats(): results = [] allDownloadsCount = IndexerNzbDownload.select().count() for p in Indexer.select().order_by(Indexer.name): dlCount = IndexerNzbDownload().select( Indexer.name, IndexerApiAccess.response_successful).join( IndexerSearch, JOIN.LEFT_OUTER).join( Search, JOIN.LEFT_OUTER).switch(IndexerNzbDownload).join( IndexerApiAccess, JOIN.LEFT_OUTER).join( Indexer, JOIN.LEFT_OUTER).where(Indexer.id == p).count() results.append({ "name": p.name, "total": dlCount, "share": 100 / (allDownloadsCount / dlCount) if allDownloadsCount > 0 and dlCount > 0 else 0 }) return results
def getIndexerDownloadStats(): results = [] allDownloadsCount = IndexerNzbDownload.select().count() for p in Indexer.select().order_by(Indexer.name): try: indexer = getIndexerByName(p.name) if not indexer.settings.enabled: logger.debug("Skipping download stats for %s" % p.name) continue except IndexerNotFoundException: logger.error("Unable to find indexer %s in configuration" % p.name) continue dlCount = IndexerNzbDownload().\ select(Indexer.name, IndexerApiAccess.response_successful). \ join(IndexerApiAccess, JOIN.LEFT_OUTER). \ join(Indexer, JOIN.LEFT_OUTER).\ where(Indexer.id == p).\ count() results.append({"name": p.name, "total": dlCount, "share": 100 / (allDownloadsCount / dlCount) if allDownloadsCount > 0 and dlCount > 0 else 0}) return results
def get_avg_indexer_response_times(): result = [] response_times = [] for p in Indexer.select().order_by(Indexer.name): avg_response_time = IndexerApiAccess().select( fn.AVG(IndexerApiAccess.response_time)).where( (IndexerApiAccess.response_successful) & (IndexerApiAccess.indexer == p)).tuples()[0][0] if avg_response_time: response_times.append({ "name": p.name, "avgResponseTime": avg_response_time }) avg_response_time = IndexerApiAccess().select( fn.AVG(IndexerApiAccess.response_time)).where( (IndexerApiAccess.response_successful) & (IndexerApiAccess.response_time is not None)).tuples()[0][0] for i in response_times: delta = i["avgResponseTime"] - avg_response_time i["delta"] = delta result.append(i) return result