def init_indexer_table_entry(indexer_name): try: Indexer.get(fn.lower(Indexer.name) == indexer_name.lower()) except Indexer.DoesNotExist as e: logger.info("Unable to find indexer with name %s in database. Will add it" % indexer_name) indexer = Indexer().create(name=indexer_name) IndexerStatus.create_or_get(indexer=indexer, first_failure=None, latest_failure=None, disabled_until=None)
def init_indexer_table_entry(indexer_name): try: Indexer.get(fn.lower(Indexer.name) == indexer_name.lower()) except Indexer.DoesNotExist as e: logger.info( "Unable to find indexer with name %s in database. Will add it" % indexer_name) Indexer().create(name=indexer_name)
def get_indexer_nzb_link(indexer_name, indexerguid, title, searchid, mode, log_api_access): """ Build a link that leads to the actual NZB of the indexer using the given informations. We log this as indexer API access and NZB download because this is only called when the NZB will be actually downloaded later (by us or a downloader) :return: str """ for p in indexers.enabled_indexers: if p.name.strip() == indexer_name.strip(): link = p.get_nzb_link(indexerguid, title) # Log to database indexer = Indexer.get(fn.lower(Indexer.name) == indexer_name.lower()) papiaccess = IndexerApiAccess(indexer=p.indexer, type="nzb", url=link, response_successful=None, indexer_search=searchid) if log_api_access else None try: papiaccess.username = request.authorization.username if request.authorization is not None else None except RuntimeError: pass papiaccess.save() pnzbdl = IndexerNzbDownload(indexer=indexer, indexer_search=searchid, api_access=papiaccess, mode=mode, title=title, guid=indexerguid) pnzbdl.save() return link, papiaccess, pnzbdl else: logger.error("Did not find indexer with name %s" % indexer_name) return None, None, None
def search_and_handle_db(dbsearch, indexers_and_search_requests): results_by_indexer = start_search_futures(indexers_and_search_requests) dbsearch.username = request.authorization.username if request.authorization is not None else None saveSearch(dbsearch) with databaseLock: with db.atomic(): for indexer, result in results_by_indexer.items(): if result.didsearch: indexersearchentry = result.indexerSearchEntry indexersearchentry.search = dbsearch indexersearchentry.save() result.indexerApiAccessEntry.username = request.authorization.username if request.authorization is not None else None try: result.indexerApiAccessEntry.indexer = Indexer.get( Indexer.name == indexer) result.indexerApiAccessEntry.save() result.indexerStatus.save() except Indexer.DoesNotExist: logger.error( "Tried to save indexer API access but no indexer with name %s was found in the database. Adding it now. This shouldn't've happened. If possible send a bug report with a full log." % indexer) Indexer().create(name=indexer) except Exception as e: logger.exception("Error saving IndexerApiAccessEntry") return {"results": results_by_indexer, "dbsearch": dbsearch}
def search_and_handle_db(dbsearch, indexers_and_search_requests): results_by_indexer = start_search_futures(indexers_and_search_requests) dbsearch.username = request.authorization.username if request.authorization is not None else None with db.atomic(): dbsearch.save() for indexer, result in results_by_indexer.items(): if result.didsearch: with db.atomic(): indexersearchentry = result.indexerSearchEntry indexersearchentry.search = dbsearch indexersearchentry.save() result.indexerApiAccessEntry.username = request.authorization.username if request.authorization is not None else None try: result.indexerApiAccessEntry.indexer = Indexer.get(Indexer.name == indexer) result.indexerApiAccessEntry.save() result.indexerStatus.save() except Exception: logger.error("Error saving IndexerApiAccessEntry. Debug info: %s" % json.dumps(model_to_dict(result.indexerApiAccessEntry))) logger.debug("Returning search results now") return {"results": results_by_indexer, "dbsearch": dbsearch}
def get_nzb_link(indexer_name, guid, title, searchid): """ Build a link that leads to the actual NZB of the indexer using the given informations. We log this as indexer API access and NZB download because this is only called when the NZB will be actually downloaded later (by us or a downloader) :return: str """ for p in indexers.enabled_indexers: if p.name == indexer_name: link = p.get_nzb_link(guid, title) # Log to database indexer = Indexer.get(fn.lower(Indexer.name) == indexer_name.lower()) papiaccess = IndexerApiAccess(indexer=p.indexer, type="nzb", url=link, response_successful=None, indexer_search=indexer) papiaccess.save() pnzbdl = IndexerNzbDownload(indexer=indexer, indexer_search=searchid, api_access=papiaccess, mode="redirect") pnzbdl.save() return link else: logger.error("Did not find indexer with name %s" % indexer_name) return None
def testHandleIndexerFailureAndSuccess(self): indexer_model = Indexer.get(Indexer.name == "NZBs.org") with freeze_time("2015-09-20 14:00:00", tz_offset=-4): sm = search_module.SearchModule(indexer_model) sm.handle_indexer_failure(indexer_model) # First error, so level 1 self.assertEqual(1, indexer_model.status.get().level) now = arrow.utcnow() first_failure = arrow.get(arrow.get(indexer_model.status.get().first_failure)) disabled_until = arrow.get(indexer_model.status.get().disabled_until) self.assertEqual(now, first_failure) self.assertEqual(now.replace(minutes=+sm.disable_periods[1]), disabled_until) sm.handle_indexer_failure() self.assertEqual(2, indexer_model.status.get().level) disabled_until = arrow.get(indexer_model.status.get().disabled_until) self.assertEqual(now.replace(minutes=+sm.disable_periods[2]), disabled_until) sm.handle_indexer_success() self.assertEqual(1, indexer_model.status.get().level) self.assertEqual(arrow.get(0), indexer_model.status.get().disabled_until) self.assertIsNone(indexer_model.status.get().reason)
def download_nzb_and_log(indexer_name, provider_guid, title, searchid): """ Gets the NZB link from the indexer using the guid, downloads it and logs the download :param indexer_name: name of the indexer :param provider_guid: guid to build link :param title: the title to build the link :param searchid: the id of the IndexerSearch entry so we can link the download to a search :return: IndexerNzbDownloadResult """ for p in indexers.enabled_indexers: if p.name == indexer_name: link = p.get_nzb_link(provider_guid, title) indexer = Indexer.get(fn.lower(Indexer.name) == indexer_name.lower()) psearch = IndexerSearch.get((IndexerSearch.indexer == indexer) & (IndexerSearch.search == searchid)) papiaccess = IndexerApiAccess(indexer=p.indexer, type="nzb", url=link, indexer_search=psearch) papiaccess.save() internallink, guid = get_nzb_link_and_guid(indexer_name, provider_guid, searchid, title) pnzbdl = IndexerNzbDownload(indexer=indexer, indexer_search=searchid, api_access=papiaccess, mode="serve", title=title, guid=internallink) pnzbdl.save() try: r = p.get(link, timeout=10) r.raise_for_status() papiaccess.response_successful = True papiaccess.response_time = r.elapsed.microseconds / 1000 return IndexerNzbDownloadResult(content=r.content, headers=r.headers) except RequestException as e: logger.error("Error while connecting to URL %s: %s" % (link, str(e))) papiaccess.error = str(e) return None finally: papiaccess.save() else: return "Unable to find NZB link"
def get_indexer_nzb_link(indexer_name, indexerguid, title, searchid, mode, log_api_access): """ Build a link that leads to the actual NZB of the indexer using the given informations. We log this as indexer API access and NZB download because this is only called when the NZB will be actually downloaded later (by us or a downloader) :return: str """ for p in indexers.enabled_indexers: if p.name.strip() == indexer_name.strip(): link = p.get_nzb_link(indexerguid, title) # Log to database indexer = Indexer.get( fn.lower(Indexer.name) == indexer_name.lower()) papiaccess = IndexerApiAccess( indexer=p.indexer, type="nzb", url=link, response_successful=None, indexer_search=searchid) if log_api_access else None try: papiaccess.username = request.authorization.username if request.authorization is not None else None except RuntimeError: pass papiaccess.save() pnzbdl = IndexerNzbDownload(indexer=indexer, indexer_search=searchid, api_access=papiaccess, mode=mode, title=title, guid=indexerguid) pnzbdl.save() return link, papiaccess, pnzbdl else: logger.error("Did not find indexer with name %s" % indexer_name) return None, None, None
def search_and_handle_db(dbsearch, indexers_and_search_requests): results_by_indexer = start_search_futures(indexers_and_search_requests) dbsearch.username = request.authorization.username if request.authorization is not None else None saveSearch(dbsearch) with databaseLock: with db.atomic(): for indexer, result in results_by_indexer.items(): if result.didsearch: indexersearchentry = result.indexerSearchEntry indexersearchentry.search = dbsearch indexersearchentry.save() result.indexerApiAccessEntry.username = request.authorization.username if request.authorization is not None else None try: result.indexerApiAccessEntry.indexer = Indexer.get(Indexer.name == indexer) result.indexerApiAccessEntry.save() result.indexerStatus.save() except Indexer.DoesNotExist: logger.error("Tried to save indexer API access but no indexer with name %s was found in the database. Adding it now. This shouldn't've happened. If possible send a bug report with a full log." % indexer) Indexer().create(name=indexer) except Exception as e: logger.exception("Error saving IndexerApiAccessEntry") return {"results": results_by_indexer, "dbsearch": dbsearch}
def testHandleIndexerFailureAndSuccess(self): Indexer(module="newznab", name="newznab1").save() indexer_model = Indexer.get(Indexer.name == "newznab1") with freeze_time("2015-09-20 14:00:00", tz_offset=-4): sm = search_module.SearchModule(self.newznab1) sm.handle_indexer_failure(indexer_model) # First error, so level 1 self.assertEqual(1, indexer_model.status.get().level) now = arrow.utcnow() first_failure = arrow.get(arrow.get(indexer_model.status.get().first_failure)) disabled_until = arrow.get(indexer_model.status.get().disabled_until) self.assertEqual(now, first_failure) self.assertEqual(now.replace(minutes=+sm.disable_periods[1]), disabled_until) sm.handle_indexer_failure() self.assertEqual(2, indexer_model.status.get().level) disabled_until = arrow.get(indexer_model.status.get().disabled_until) self.assertEqual(now.replace(minutes=+sm.disable_periods[2]), disabled_until) sm.handle_indexer_success() self.assertEqual(1, indexer_model.status.get().level) self.assertEqual(arrow.get(0), indexer_model.status.get().disabled_until) self.assertIsNone(indexer_model.status.get().reason)
def indexer(self): if self.indexerDb is None: self.indexerDb = Indexer.get( fn.lower(Indexer.name) == self.settings.name.lower()) return self.indexerDb
def indexer(self): return Indexer.get(fn.lower(Indexer.name) == self.settings.name.lower())
def testThatDatabaseValuesAreStored(self): with self.app.test_request_context('/'): with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps: newznabItems = [ [mockbuilder.buildNewznabItem(title="title1", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab1")], [mockbuilder.buildNewznabItem(title="title2", pubdate=arrow.get(1000).format("ddd, DD MMM YYYY HH:mm:ss Z"), size=1000, indexer_name="newznab2")] ] self.prepareSearchMocks(rsps, indexerCount=len(newznabItems), newznabItems=newznabItems) # Make the second access unsuccessful rsps._urls.pop(1) rsps.add(responses.GET, r".*", body="an error message", status=500, content_type='application/x-html') searchRequest = SearchRequest(type="search", query="aquery", category="acategory", identifier_key="imdbid", identifier_value="animdbid", season=1, episode=2, indexers="newznab1|newznab2") result = search.search(searchRequest) results = result["results"] self.assertEqual(1, len(results)) dbSearch = Search().get() self.assertEqual(True, dbSearch.internal) self.assertEqual("aquery", dbSearch.query) self.assertEqual("All", dbSearch.category) self.assertEqual("imdbid", dbSearch.identifier_key) self.assertEqual("animdbid", dbSearch.identifier_value) self.assertEqual("1", dbSearch.season) self.assertEqual("2", dbSearch.episode) self.assertEqual("search", dbSearch.type) self.assertEqual(18, dbSearch.time.hour) indexerSearch1 = IndexerSearch.get(IndexerSearch.indexer == Indexer.get(Indexer.name == "newznab1")) self.assertEqual(indexerSearch1.search, dbSearch) self.assertEqual(18, indexerSearch1.time.hour) indexerSearch2 = IndexerSearch.get(IndexerSearch.indexer == Indexer.get(Indexer.name == "newznab2")) self.assertEqual(indexerSearch2.search, dbSearch) self.assertEqual(18, indexerSearch2.time.hour) calledUrls = sorted([x.request.url for x in rsps.calls]) indexerApiAccess1 = IndexerApiAccess.get(IndexerApiAccess.indexer == Indexer.get(Indexer.name == "newznab1")) self.assertEqual(indexerSearch1, indexerApiAccess1.indexer_search) self.assertEqual(18, indexerApiAccess1.time.hour) self.assertEqual("search", indexerApiAccess1.type) self.assertEqual(calledUrls[0], indexerApiAccess1.url) self.assertTrue(indexerApiAccess1.response_successful) self.assertEqual(0, indexerApiAccess1.response_time) self.assertIsNone(indexerApiAccess1.error) indexerApiAccess2 = IndexerApiAccess.get(IndexerApiAccess.indexer == Indexer.get(Indexer.name == "newznab2")) self.assertEqual(indexerSearch2, indexerApiAccess2.indexer_search) self.assertEqual(18, indexerApiAccess2.time.hour) self.assertEqual("search", indexerApiAccess2.type) self.assertEqual(calledUrls[1], indexerApiAccess2.url) self.assertFalse(indexerApiAccess2.response_successful) self.assertIsNone(indexerApiAccess2.response_time) self.assertTrue("Connection refused" in indexerApiAccess2.error) indexerStatus2 = IndexerStatus.get(IndexerStatus.indexer == Indexer.get(Indexer.name == "newznab2")) self.assertEqual(1, indexerStatus2.level) self.assertTrue("Connection refused" in indexerStatus2.reason)
def indexer(self): if self.indexerDb is None: self.indexerDb = Indexer.get(fn.lower(Indexer.name) == self.settings.name.lower()) return self.indexerDb
def init_indexer_table_entry(indexer_name): try: Indexer.get(fn.lower(Indexer.name) == indexer_name.lower()) except Indexer.DoesNotExist as e: logger.info("Unable to find indexer with name %s in database. Will add it" % indexer_name) Indexer().create(name=indexer_name)