def setUp(self): self.user, self.token = self.create_user_and_return_id_and_token() # create a second user/token so the user updates tests are independent of all else self.new_user, self.new_user_token = self.create_user_and_return_id_and_token( ) self.admin_user, self.admin_user_token = self.create_user_and_return_id_and_token( roles=['admin']) self.qa_user, self.qa_user_token = self.create_user_and_return_id_and_token( roles=['qa']) self.suspended_user, self.suspended_user_token = self.create_user_and_return_id_and_token( suspended=True) self.internal_user, self.internal_user_token = self.create_user_and_return_id_and_token( is_internal_user=True) self.contributor_user, self.contributor_user_token = self.create_user_and_return_id_and_token( roles=['contributor']) self.unenabled_user, self.unenabled_user_token = self.create_user_and_return_id_and_token( enabled=False) api_key = ApiKey({'enabled': True}) api_key.gen_token() db_session_users.add(api_key) db_session_users.commit() db_session_users.refresh(api_key) self.api_key = api_key # for the sake of testing populate plans table add_plans() # n.b. this should be removed when mainstream news category is launched os.environ["MAINSTREAM_NEWS_ENABLED"] = "true"
def tearDown(self): foobar_user = db_session_users.query(User).filter_by( email="*****@*****.**").first() if foobar_user: db_session_users.query(UserAgency).filter_by( user_id=foobar_user.id).delete() db_session_users.commit() db_session_users.delete(foobar_user) db_session_users.commit()
def test_pop_judgment_with_empty_queue(self): # hack: change the status of everything on the queue to something that won't be popped to # simulate an empty queue scenario for tj in db_session_users.query(TopicJudgment).all(): tj.status = 'skipped' db_session_users.add(tj) db_session_users.commit() response = self.client.get("/topic_judgments/pop", headers={'Authorization': self.token}) self.assert200(response) for key in ['id', 'status', 'judgment', 'document', 'user', 'topic_name']: self.assertNotIn(key, response.json) self.assertIn('queue', response.json) self.assertEqual(response.json['queue'], 'empty')
def test_success_link_to_email_account(self): email = "*****@*****.**" db_session_users.add( User({ 'email': email, 'password': '******', 'enabled': False })) db_session_users.commit() initial_user = db_session_users.query(User).filter_by( email=email).first() with patch("requests.post") as rpost: mock_response = rpost.return_value mock_response.status_code = 200 mock_response.json.return_value = {"access_token": "doesntmatter"} with patch("requests.get") as rget: mock_response = rget.return_value mock_response.status_code = 200 mock_response.json.return_value = { "given_name": "doesntmatter", "family_name": "doesntmatter", "id": "doesntmatter", "email": email } req_body = { 'code': 'doesntmatter', "redirect_uri": "doesntmatter", 'termsAgreed': 'true' } resp = self.client.post("/auth_google_token", data=req_body) updated_user = db_session_users.query(User).filter_by( email=email).first() self.assert200(resp) self.assertIn("jwt_token", resp.data) self.assertIn('confirmed_date', updated_user.properties) self.assertIsInstance(updated_user.properties['confirmed_date'], basestring) self.assertEqual(True, updated_user.enabled) self.assertEqual(True, len(updated_user.google_id) > 0) self.assertIn('secondary_signup_dates', updated_user.properties) self.assertIn('google', updated_user.properties['secondary_signup_dates']) self.assertIsInstance( updated_user.properties['secondary_signup_dates']['google'], basestring)
def test_pop_judgment_with_zz_bad_doc(self): # hack: change the status back for everything to queued and set all but one doc to an invalid doc id original_tj = db_session_users.query(TopicJudgment).first() bad_tj = TopicJudgment(original_tj.__dict__) bad_tj.doc_id = 1000000 bad_tj.status = 'queued' bad_tj.user_id = self.new_user.id db_session_users.add(bad_tj) db_session_users.commit() response = self.client.get("/topic_judgments/pop", headers={'Authorization': self.new_user_token}) self.assert200(response)\ #n.b. the previous test left this in the state where everything else is queued for key in ['id', 'status', 'judgment', 'document', 'user', 'topic_name']: self.assertNotIn(key, response.json) self.assertIn('queue', response.json) self.assertEqual(response.json['queue'], 'empty')
SearchVersion = 1 ## NB: tweak by hand for now ## koala-0 = javelina ## koala-1 = acronym boost (boost=8.0) # create a token to use for authorization for all api calls user = db_session_users.query(User).filter_by(id=1).scalar() seconds_until_expiration = 60 * 60 * 24 * 14 expiration_datetime = dt.datetime.utcnow() + dt.timedelta(seconds=seconds_until_expiration) token = jwt.encode({'user_id': user.id, 'exp': expiration_datetime}, SECRET_JWT) for q in db_session_users.query(SearchRegressionQuery).all(): url = "http://localhost:5000/documents?query={}".format(q.query) response = requests.get(url, headers={'Authorization': token}) docs = response.json()['documents'] doc_ids = [ d['id'] for d in docs ] scores = [ d['score'] for d in docs ] results = { 'documents': docs } assessment_params = { 'query_id': q.id, 'build': BuildName, 'version': SearchVersion, 'scores': scores, 'results': results, 'doc_ids': doc_ids } db_session_users.add(SearchAssessmentResult(assessment_params)) db_session_users.commit()
def setUpClass(cls): # break off here if we've run the setup once for before all suite behavior, since this gets called once # per subclass of AppTest. # if we ever truly want class-agnostic per-class steps, we could name this method something else and call # it from here if SETUP_RUN: return cls.maxDiff = None cls.tearDownClassForRealz() base_users.BaseUsers.metadata.create_all(base_users.engine_users) jsearch.setup_test_index() time.sleep(0.2) ## avoid race conditions fixtures = json.loads( open(this_folder + '/fixtures/fixtures_201712.json').read()) all_agencies = json.loads( open(this_folder + '/fixtures/agencies_20160721.json').read())['agencies'] default_agency_lookup = set(DefaultAgencies) cls.agencies = [ a for a in all_agencies if a['id'] in default_agency_lookup ] cls.all_documents = fixtures['documents'] cls.acts = fixtures['acts'] cls.regulations = fixtures['named_regulations'] cls.concept_mentions = fixtures['concepts'] cls.jurisdictions = fixtures['jurisdictions'] cls.banks = fixtures['banks'] cls.document_citations = fixtures['document_citations'] cls.topics = fixtures['topics'] cls.all_topics = fixtures['all_topics'] cls.news_sources = fixtures['news_sources'] topic_judgments = [] topic = {"id": 1, "name": "General Provisions"} for i in range(0, 5): topic_judgments.append({ 'topic_id': topic['id'], 'topic_name': topic['name'], 'doc_id': cls.all_documents[0]['id'], 'status': 'queued' }) db_session_users.add_all( [base_users.TopicJudgment(x) for x in topic_judgments]) db_session_users.commit() # once everything is shoved into the db that we need to read, index it once! ## TODO: not yet covered # indexer.index_concepts() # indexer.index_dockets() for agency in cls.agencies: jsearch.index_jsearch_dict(agency, 'agencies') for act in cls.acts: jsearch.index_jsearch_dict(act, 'acts') for reg in cls.regulations: jsearch.index_jsearch_dict(reg, 'named_regulations') for doc in cls.all_documents: jsearch.index_jsearch_dict(doc, 'documents') for cm in cls.concept_mentions: jsearch.index_jsearch_dict(cm, 'concepts') for j in cls.jurisdictions: jsearch.index_jsearch_dict(j, 'jurisdictions') for b in cls.banks: jsearch.index_jsearch_dict(b, 'banks') for dc in cls.document_citations: jsearch.index_jsearch_dict(dc, 'document_citations') for t in cls.topics: jsearch.index_jsearch_dict(t, 'topics') for t in cls.all_topics: jsearch.index_jsearch_dict(t, 'all_topics') for n in cls.news_sources: jsearch.index_jsearch_dict(n, 'news_sources') time.sleep(1.0) ## avoid race conditions # make sure we note that we've run this method once global SETUP_RUN SETUP_RUN = True
def build_query_set(): for line in open(this_folder + '/fixtures/relevance_queries.tsv'): query = line.strip().split("\t")[1] if not db_session_users.query(SearchRegressionQuery).filter_by(query=query).scalar(): db_session_users.add(SearchRegressionQuery({ 'query': query})) db_session_users.commit()