def get_sample_template_processing_status(st_id): job_info = r_client.get(SAMPLE_TEMPLATE_KEY_FORMAT % st_id) if job_info: job_info = loads(job_info) job_id = job_info['job_id'] if job_id: redis_info = loads(r_client.get(job_id)) processing = redis_info['status_msg'] == 'Running' if processing: alert_type = 'info' alert_msg = 'This sample template is currently being processed' elif redis_info['status_msg'] == 'Success': alert_type = redis_info['return']['status'] alert_msg = redis_info['return']['message'].replace('\n', '</br>') payload = {'job_id': None, 'status': alert_type, 'message': alert_msg} r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % st_id, dumps(payload)) else: alert_type = redis_info['return']['status'] alert_msg = redis_info['return']['message'].replace('\n', '</br>') else: processing = False alert_type = job_info['status'] alert_msg = job_info['message'].replace('\n', '</br>') else: processing = False alert_type = '' alert_msg = '' return processing, alert_type, alert_msg
def get_release_info(study_status='public'): """Returns the study status release MD5 Parameters ---------- study_status : str, optional The study status to search for. Note that this should always be set to 'public' but having this exposed helps with testing. The other options are 'private' and 'sandbox' Returns ------ str, str, str The release MD5, filepath and timestamp """ portal = qiita_config.portal md5sum = r_client.get('%s:release:%s:md5sum' % (portal, study_status)) filepath = r_client.get('%s:release:%s:filepath' % (portal, study_status)) timestamp = r_client.get('%s:release:%s:time' % (portal, study_status)) if md5sum is None: md5sum = '' if filepath is None: filepath = '' if timestamp is None: timestamp = '' return md5sum, filepath, timestamp
def test_sample_template_patch_request(self): # Wrong operation operation obs = sample_template_patch_request("*****@*****.**", "add", "/1/columns/season_environment/") exp = {"status": "error", "message": 'Operation "add" not supported. ' "Current supported operations: remove"} self.assertEqual(obs, exp) # Wrong path parameter obs = sample_template_patch_request("*****@*****.**", "remove", "/columns/season_environment/") exp = {"status": "error", "message": "Incorrect path parameter"} self.assertEqual(obs, exp) # No access obs = sample_template_patch_request("*****@*****.**", "remove", "/1/columns/season_environment/") exp = {"status": "error", "message": "User does not have access to study"} self.assertEqual(obs, exp) # Success obs = sample_template_patch_request("*****@*****.**", "remove", "/1/columns/season_environment/") exp = {"status": "success", "message": ""} self.assertEqual(obs, exp) # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database obs = r_client.get("sample_template_1") self.assertIsNotNone(obs) redis_info = loads(r_client.get(loads(obs)["job_id"])) while redis_info["status_msg"] == "Running": sleep(0.05) redis_info = loads(r_client.get(loads(obs)["job_id"])) ST = qdb.metadata_template.sample_template.SampleTemplate self.assertNotIn("season_environment", ST(1).categories())
def wait_for_prep_information_job(prep_id, raise_if_none=True): """Waits until a prep information job is completed Parameters ---------- prep_id : int Prep template id raise_if_none : bool, optional If True, raise an AssertionError if the correspondent redis key is empty. Default: True Raises ------ AssertionError If `raise_if_none` is True and the correspondent redis key is not set """ res = r_client.get('prep_template_%d' % prep_id) if raise_if_none and res is None: raise AssertionError("unexpectedly None") if res is not None: payload = loads(res) job_id = payload['job_id'] if payload['is_qiita_job']: wait_for_processing_job(job_id) else: redis_info = loads(r_client.get(job_id)) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(job_id)) sleep(0.05)
def _wait_for_parallel_job(self, key): # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database obs = r_client.get(key) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id']))
def _wait_for_parallel_job(self, key): # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database obs = r_client.get(key) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.5) redis_info = loads(r_client.get(loads(obs)['job_id']))
def test_artifact_post_request(self): # No access with self.assertRaises(QiitaHTTPError): artifact_post_req(User('*****@*****.**'), 1) artifact_post_req(User('*****@*****.**'), 2) # Wait until the job is completed wait_for_prep_information_job(1) # Check that the delete function has been actually called obs = r_client.get(loads(r_client.get('prep_template_1'))['job_id']) self.assertIn('Cannot delete artifact 2', obs)
def test_delete_artifact(self): response = self.post('/artifact/', {'artifact_id': 2}) self.assertEqual(response.code, 200) # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database obs = r_client.get('prep_template_1') self.assertIsNotNone(obs) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id']))
def test_artifact_delete_req(self): obs = artifact_delete_req(3, '*****@*****.**') exp = {'status': 'success', 'message': ''} self.assertEqual(obs, exp) # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database obs = r_client.get('prep_template_1') self.assertIsNotNone(obs) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id']))
def test_artifact_delete_req(self): obs = artifact_delete_req(self.artifact.id, '*****@*****.**') exp = {'status': 'success', 'message': ''} self.assertEqual(obs, exp) # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database obs = r_client.get('prep_template_1') self.assertIsNotNone(obs) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id']))
def test_sample_template_put_req(self): obs = sample_template_put_req(1, '*****@*****.**', 'uploaded_file.txt') exp = {'status': 'success', 'message': '', 'file': 'uploaded_file.txt'} self.assertEqual(obs, exp) obs = r_client.get('sample_template_1') self.assertIsNotNone(obs) # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id']))
def test_sample_template_delete_req(self): obs = sample_template_delete_req(1, "*****@*****.**") exp = {"status": "success", "message": ""} self.assertEqual(obs, exp) obs = r_client.get("sample_template_1") self.assertIsNotNone(obs) # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database redis_info = loads(r_client.get(loads(obs)["job_id"])) while redis_info["status_msg"] == "Running": sleep(0.05) redis_info = loads(r_client.get(loads(obs)["job_id"]))
def unlisten_to_node(self, id_): """Stop listening to a job Parameters ---------- id_ : str An ID to remove Returns -------- str or None The ID removed or None if the ID was not removed """ id_pubsub = _pubsub_key(id_) if id_pubsub in self._listening_to: del self._listening_to[id_pubsub] self.toredis.unsubscribe(id_pubsub) parent = json_decode(r_client.get(id_)).get('parent', None) if parent is not None: r_client.srem(_children_key(parent), id_) r_client.srem(self.group_children, id_) return id_
def listen_to_node(self, id_): """Attach a callback on the job pubsub if it exists""" if r_client.get(id_) is None: return else: self.toredis.subscribe(_pubsub_key(id_), callback=self.callback) self._listening_to[_pubsub_key(id_)] = id_ return id_
def get(self, job_id): details = loads(r_client.get(job_id)) if details['status_msg'] == 'Failed': # TODO: something smart pass self.redirect('/')
def test_delete_sample_template(self): response = self.post('/study/description/sample_template/', {'study_id': 1, 'action': 'delete'}) self.assertEqual(response.code, 200) exp = ('{"status": "success", ' '"message": ""}') # checking that the action was sent self.assertEqual(response.body, exp) # Wait until the job has completed obs = r_client.get('sample_template_1') self.assertIsNotNone(obs) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.5) redis_info = loads(r_client.get(loads(obs)['job_id']))
def get(self, job_id): details = loads(r_client.get(job_id)) if details["status_msg"] == "Failed": # TODO: something smart pass self.redirect("/")
def get(self, job_id): details = loads(r_client.get(job_id)) if details['status_msg'] == 'Failed': # TODO: something smart pass self.redirect('%s/' % qiita_config.portal_dir)
def safe_submit(*args, **kwargs): """Safe wraper for the submit function There are cases in which a race condition may occur: submit returns the job id but moi hasn't submitted the job. In some cases this is not acceptable, so this wrapper makes sure that the job_id is returned only once the job has already been submitted. From previous tests, the while loop is executed ~2 times, so there is not much time lost in here """ job_id = submit(*args, **kwargs) payload = r_client.get(job_id) while not payload: sleep(0.005) payload = r_client.get(job_id) return job_id
def test_sample_template_post_req(self): obs = sample_template_post_req(1, '*****@*****.**', '16S', 'uploaded_file.txt') exp = {'status': 'success', 'message': '', 'file': 'uploaded_file.txt'} self.assertEqual(obs, exp) obs = r_client.get('sample_template_1') self.assertIsNotNone(obs) # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id']))
def get_sample_template_processing_status(st_id): job_info = r_client.get(SAMPLE_TEMPLATE_KEY_FORMAT % st_id) if job_info: job_info = loads(job_info) job_id = job_info['job_id'] if job_id: redis_info = r_client.get(job_id) if redis_info: redis_info = loads(redis_info) processing = redis_info['status_msg'] == 'Running' if processing: alert_type = 'info' alert_msg = ('This sample template is currently being ' 'processed') elif redis_info['status_msg'] == 'Success': alert_type = redis_info['return']['status'] alert_msg = redis_info['return']['message'].replace( '\n', '</br>') payload = { 'job_id': None, 'status': alert_type, 'message': alert_msg } r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % st_id, dumps(payload)) else: alert_type = redis_info['return']['status'] alert_msg = redis_info['return']['message'].replace( '\n', '</br>') else: processing = False alert_type = '' alert_msg = '' else: processing = False alert_type = job_info['status'] alert_msg = job_info['message'].replace('\n', '</br>') else: processing = False alert_type = '' alert_msg = '' return processing, alert_type, alert_msg
def test_sample_template_patch_request(self): # Wrong operation operation obs = sample_template_patch_request("*****@*****.**", "add", "/1/columns/season_environment/") exp = { 'status': 'error', 'message': 'Operation "add" not supported. ' 'Current supported operations: remove' } self.assertEqual(obs, exp) # Wrong path parameter obs = sample_template_patch_request("*****@*****.**", "remove", "/columns/season_environment/") exp = {'status': 'error', 'message': 'Incorrect path parameter'} self.assertEqual(obs, exp) # No access obs = sample_template_patch_request("*****@*****.**", "remove", "/1/columns/season_environment/") exp = { 'status': 'error', 'message': 'User does not have access to study' } self.assertEqual(obs, exp) # Success obs = sample_template_patch_request("*****@*****.**", "remove", "/1/columns/season_environment/") exp = {'status': 'success', 'message': ''} self.assertEqual(obs, exp) # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database obs = r_client.get('sample_template_1') self.assertIsNotNone(obs) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id'])) ST = qdb.metadata_template.sample_template.SampleTemplate self.assertNotIn("season_environment", ST(1).categories())
def test_status_change(self): new_status = 'new status' r_client.set(self.test_id, json.dumps(self.test_job_info)) # _status_change will return the old status obs = _status_change(self.test_id, new_status) self.assertEqual(obs, self.test_job_info['status']) obs = json.loads(r_client.get(self.test_id)) self.assertEqual(obs['status'], new_status)
def test__submit(self): ctx = ctxs.values()[0] cmd = 'echo "hello"' id_, pid_, _ = _submit(ctx, 'no parent', 'test', '/', system_call, cmd) self.test_keys.append(id_) self.test_keys.append(pid_) sleep(1) obs = json.loads(r_client.get(id_)) self.assertEqual(obs['result'], [u"hello\n", u"", 0]) self.assertEqual(obs['status'], 'Success') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None)
def test__submit(self): ctx = ctxs.values()[0] cmd = 'echo "hello"' id_, pid_ = _submit(ctx, 'no parent', 'test', '/', system_call, cmd) self.test_keys.append(id_) self.test_keys.append(pid_) sleep(2) obs = json.loads(r_client.get(id_)) self.assertEqual(obs['result'], [u"hello\n", u"", 0]) self.assertEqual(obs['status'], 'Success') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None)
def test_redis_wrap_except(self): def foo(a, b, **kwargs): return a + b with self.assertRaises(TypeError): _redis_wrap(self.test_job_info, foo, 1) obs = json.loads(r_client.get(self.test_job_info['id'])) self.assertEqual(obs['result'][0], u'Traceback (most recent call last):\n') self.assertEqual(obs['status'], 'Failed') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None)
def test_redis_wrap_except(self): def foo(a, b, **kwargs): return a+b with self.assertRaises(TypeError): _redis_wrap(self.test_job_info, foo, 1) obs = json.loads(r_client.get(self.test_job_info['id'])) self.assertEqual(obs['result'][0], u'Traceback (most recent call last):\n') self.assertEqual(obs['status'], 'Failed') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None)
def test_get_sample_template_processing_status(self): key = SAMPLE_TEMPLATE_KEY_FORMAT % 1 obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) self.assertEqual(obs_at, "") self.assertEqual(obs_am, "") # Without job id r_client.set(key, dumps({"job_id": None, "status": "success", "message": ""})) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) self.assertEqual(obs_at, "success") self.assertEqual(obs_am, "") # With job id and processing r_client.set(key, dumps({"job_id": "test_job_id"})) r_client.set("test_job_id", dumps({"status_msg": "Running"})) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertTrue(obs_proc) self.assertEqual(obs_at, "info") self.assertEqual(obs_am, "This sample template is currently being processed") # With job id and success r_client.set(key, dumps({"job_id": "test_job_id"})) r_client.set( "test_job_id", dumps({"status_msg": "Success", "return": {"status": "success", "message": "Some\nwarning"}}) ) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) self.assertEqual(obs_at, "success") self.assertEqual(obs_am, "Some</br>warning") obs = loads(r_client.get(key)) self.assertEqual(obs, {"job_id": None, "status": "success", "message": "Some</br>warning"}) # With job and not success r_client.set(key, dumps({"job_id": "test_job_id"})) r_client.set( "test_job_id", dumps({"status_msg": "Failed", "return": {"status": "error", "message": "Some\nerror"}}) ) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) self.assertEqual(obs_at, "error") self.assertEqual(obs_am, "Some</br>error") # With job expired r_client.set(key, dumps({"job_id": "non_existent_job"})) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) self.assertEqual(obs_at, "") self.assertEqual(obs_am, "")
def wrapper(handler, *args, **kwargs): header = handler.request.headers.get('Authorization', None) if header is None: _oauth_error(handler, 'Oauth2 error: invalid access token', 'invalid_request') return token_info = header.split() # Based on RFC6750 if reply is not 2 elements in the format of: # ['Bearer', token] we assume a wrong reply if len(token_info) != 2 or token_info[0] != 'Bearer': _oauth_error(handler, 'Oauth2 error: invalid access token', 'invalid_grant') return token = token_info[1] db_token = r_client.hgetall(token) if not db_token: # token has timed out or never existed _oauth_error(handler, 'Oauth2 error: token has timed out', 'invalid_grant') return # Check daily rate limit for key if password style key if db_token['grant_type'] == 'password': limit_key = '%s_%s_daily_limit' % (db_token['client_id'], db_token['user']) limiter = r_client.get(limit_key) if limiter is None: # Set limit to 5,000 requests per day r_client.setex(limit_key, 5000, 86400) else: r_client.decr(limit_key) if int(r_client.get(limit_key)) <= 0: _oauth_error( handler, 'Oauth2 error: daily request limit reached', 'invalid_grant') return return f(handler, *args, **kwargs)
def analysis_description_handler_get_request(analysis_id, user): """Returns the analysis information Parameters ---------- analysis_id : int The analysis id user : qiita_db.user.User The user performing the request """ analysis = Analysis(analysis_id) check_analysis_access(user, analysis) job_info = r_client.get("analysis_%s" % analysis.id) alert_type = 'info' alert_msg = '' if job_info: job_info = loads(job_info) job_id = job_info['job_id'] if job_id: r_payload = r_client.get(job_id) if r_payload: redis_info = loads(r_client.get(job_id)) if redis_info['status_msg'] == 'running': alert_msg = ('An artifact is being deleted from this ' 'analysis') elif redis_info['return'] is not None: alert_type = redis_info['return']['status'] alert_msg = redis_info['return']['message'].replace( '\n', '</br>') return { 'analysis_name': analysis.name, 'analysis_id': analysis.id, 'analysis_description': analysis.description, 'alert_type': alert_type, 'alert_msg': alert_msg }
def _traverse(self, id_): """Traverse groups and yield info dicts for jobs""" nodes = r_client.smembers(_children_key(id_)) while nodes: current_id = nodes.pop() details = self._decode(r_client.get(current_id)) if details['type'] == 'group': children = r_client.smembers(_children_key(details['id'])) if children is not None: nodes.update(children) yield details
def test_authenticate_header_username(self): obs = self.get('/qiita_db/artifacts/1/mapping/', headers={ 'Authorization': 'Bearer ' + self.user_token}) self.assertEqual(obs.code, 200) # Check rate limiting works self.assertEqual(int(r_client.get(self.user_rate_key)), 1) r_client.setex('[email protected]_daily_limit', 0, 2) obs = self.get('/qiita_db/artifacts/100/mapping/', headers={ 'Authorization': 'Bearer ' + self.user_token}) exp = {'error': 'invalid_grant', 'error_description': 'Oauth2 error: daily request limit reached' } self.assertEqual(loads(obs.body), exp)
def test_prep_template_patch_req(self): pt = PrepTemplate(1) # Update investigation type obs = prep_template_patch_req('*****@*****.**', 'replace', '/1/investigation_type', 'Cancer Genomics') exp = {'status': 'success', 'message': ''} self.assertEqual(obs, exp) self.assertEqual(pt.investigation_type, 'Cancer Genomics') # Update prep template data obs = prep_template_patch_req('*****@*****.**', 'replace', '/1/data', 'update.txt') self.assertEqual(obs, exp) obs = r_client.get('prep_template_1') self.assertIsNotNone(obs) # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id']))
def test_post_artifact(self): args = { 'artifact-type': 'FASTQ', 'name': 'New Artifact Handler test', 'prep-template-id': self.prep.id, 'raw_forward_seqs': [self.fwd_fp], 'raw_barcodes': [self.barcodes_fp], 'raw_reverse_seqs': [], 'import-artifact': ''} response = self.post('/study/new_artifact/', args) self.assertEqual(response.code, 200) # make sure new artifact created obs = r_client.get('prep_template_%s' % self.prep.id) self.assertIsNotNone(obs) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id'])) new_artifact_id = get_count('qiita.artifact') artifact = Artifact(new_artifact_id) self.assertEqual(artifact.name, 'New Artifact Handler test') self._files_to_remove.extend([fp for _, fp, _ in artifact.filepaths])
def post(self): if r_client.get('maintenance') is not None: raise HTTPError(503, "Site is down for maintenance") username = self.get_argument("username", "").strip().lower() passwd = self.get_argument("password", "") nextpage = self.get_argument("next", None) if nextpage is None: if "auth/" not in self.request.headers['Referer']: nextpage = self.request.headers['Referer'] else: nextpage = "%s/" % qiita_config.portal_dir msg = "" # check the user level try: if User(username).level == "unverified": # email not verified so dont log in msg = ("Email not verified. Please check your email and click " "the verify link. You may need to check your spam " "folder to find the email.<br/>If a verification email" " has not arrived in 15 minutes, please email <a href='" "mailto:[email protected]'>[email protected]</a>") except QiitaDBUnknownIDError: msg = "Unknown user" except RuntimeError: # means DB not available, so set maintenance mode and failover r_client.set( "maintenance", "Database connection unavailable, " "please try again later.") self.redirect("%s/" % qiita_config.portal_dir) return # Check the login information login = None try: login = User.login(username, passwd) except IncorrectEmailError: msg = "Unknown user" except IncorrectPasswordError: msg = "Incorrect password" except UnverifiedEmailError: msg = "You have not verified your email address" if login: # everything good so log in self.set_current_user(username) self.redirect(nextpage) else: self.render("index.html", message=msg, level='danger')
def test_authenticate_header_username(self): obs = self.get('/qiita_db/artifacts/1/', headers={ 'Authorization': 'Bearer ' + self.user_token}) self.assertEqual(obs.code, 200) # Check rate limiting works self.assertEqual(int(r_client.get(self.user_rate_key)), 1) r_client.setex('[email protected]_daily_limit', 0, 2) obs = self.get('/qiita_db/artifacts/100/', headers={ 'Authorization': 'Bearer ' + self.user_token}) exp = {'error': 'invalid_grant', 'error_description': 'Oauth2 error: daily request limit reached' } self.assertEqual(loads(obs.body), exp)
def test_prep_template_patch_req(self): pt = PrepTemplate(1) # Update investigation type obs = prep_template_patch_req( '*****@*****.**', 'replace', '/1/investigation_type', 'Cancer Genomics') exp = {'status': 'success', 'message': ''} self.assertEqual(obs, exp) self.assertEqual(pt.investigation_type, 'Cancer Genomics') # Update prep template data obs = prep_template_patch_req( '*****@*****.**', 'replace', '/1/data', 'update.txt') self.assertEqual(obs, exp) obs = r_client.get('prep_template_1') self.assertIsNotNone(obs) # This is needed so the clean up works - this is a distributed system # so we need to make sure that all processes are done before we reset # the test database redis_info = loads(r_client.get(obs)) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(obs))
def test_get_sample_template_processing_status(self): key = SAMPLE_TEMPLATE_KEY_FORMAT % 1 obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) self.assertEqual(obs_at, "") self.assertEqual(obs_am, "") # Without job id r_client.set(key, dumps({'job_id': None, 'status': "success", 'message': ""})) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) self.assertEqual(obs_at, "success") self.assertEqual(obs_am, "") # With job id and processing r_client.set(key, dumps({'job_id': "test_job_id"})) r_client.set("test_job_id", dumps({'status_msg': 'Running'})) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertTrue(obs_proc) self.assertEqual(obs_at, "info") self.assertEqual( obs_am, "This sample template is currently being processed") # With job id and success r_client.set(key, dumps({'job_id': "test_job_id"})) r_client.set("test_job_id", dumps({'status_msg': 'Success', 'return': {'status': 'success', 'message': 'Some\nwarning'}})) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) self.assertEqual(obs_at, "success") self.assertEqual(obs_am, "Some</br>warning") obs = loads(r_client.get(key)) self.assertEqual(obs, {'job_id': None, 'status': 'success', 'message': 'Some</br>warning'}) # With job and not success r_client.set(key, dumps({'job_id': "test_job_id"})) r_client.set("test_job_id", dumps({'status_msg': 'Failed', 'return': {'status': 'error', 'message': 'Some\nerror'}})) obs_proc, obs_at, obs_am = get_sample_template_processing_status(1) self.assertFalse(obs_proc) self.assertEqual(obs_at, "error") self.assertEqual(obs_am, "Some</br>error")
def _submit(ctx, parent_id, name, url, func, *args, **kwargs): """Submit a function to a cluster Parameters ---------- parent_id : str The ID of the group that the job is a part of. name : str The name of the job url : str The handler that can take the results (e.g., /beta_diversity/) func : function The function to execute. Any returns from this function will be serialized and deposited into Redis using the uuid for a key. This function should raise if the method fails. args : tuple or None Any args for ``func`` kwargs : dict or None Any kwargs for ``func`` Returns ------- tuple, (str, str, AsyncResult) The job ID, parent ID and the IPython's AsyncResult object of the job """ parent_info = r_client.get(parent_id) if parent_info is None: parent_info = create_info('unnamed', 'group', id=parent_id) parent_id = parent_info['id'] r_client.set(parent_id, json.dumps(parent_info)) parent_pubsub_key = parent_id + ':pubsub' job_info = create_info(name, 'job', url=url, parent=parent_id, context=ctx.name, store=True) job_info['status'] = 'Queued' job_id = job_info['id'] with r_client.pipeline() as pipe: pipe.set(job_id, json.dumps(job_info)) pipe.publish(parent_pubsub_key, json.dumps({'add': [job_id]})) pipe.execute() ar = ctx.bv.apply_async(_redis_wrap, job_info, func, *args, **kwargs) return job_id, parent_id, ar
def post(self): if r_client.get('maintenance') is not None: raise HTTPError(503, "Site is down for maintenance") username = self.get_argument("username", "").strip().lower() passwd = self.get_argument("password", "") nextpage = self.get_argument("next", None) if nextpage is None: if "auth/" not in self.request.headers['Referer']: nextpage = self.request.headers['Referer'] else: nextpage = "%s/" % qiita_config.portal_dir msg = "" # check the user level try: if User(username).level == "unverified": # email not verified so dont log in msg = ("Email not verified. Please check your email and click " "the verify link. You may need to check your spam " "folder to find the email.<br/>If a verification email" " has not arrived in 15 minutes, please email <a href='" "mailto:[email protected]'>[email protected]</a>") except QiitaDBUnknownIDError: msg = "Unknown user" except RuntimeError: # means DB not available, so set maintenance mode and failover r_client.set("maintenance", "Database connection unavailable, " "please try again later.") self.redirect("%s/" % qiita_config.portal_dir) return # Check the login information login = None try: login = User.login(username, passwd) except IncorrectEmailError: msg = "Unknown user" except IncorrectPasswordError: msg = "Incorrect password" except UnverifiedEmailError: msg = "You have not verified your email address" if login: # everything good so log in self.set_current_user(username) self.redirect(nextpage) else: self.render("index.html", message=msg, level='danger')
def test_redis_wrap(self): def foo(a, b, **kwargs): return a+b r_client.set(self.test_job_info['id'], json.dumps(self.test_job_info)) _redis_wrap(self.test_job_info, foo, 1, 2) sleep(2) obs = json.loads(r_client.get(self.test_job_info['id'])) self.assertEqual(obs['result'], 3) self.assertEqual(obs['status'], 'Success') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None) r_client.set(self.test_job_info['id'], json.dumps(self.test_job_info)) _redis_wrap(self.test_job_info, foo, 1, 2, 3) sleep(2) obs = json.loads(r_client.get(self.test_job_info['id'])) self.assertEqual(obs['result'][0], u'Traceback (most recent call last):\n') self.assertEqual(obs['status'], 'Failed') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None)
def test_post_artifact(self): args = { 'artifact-type': 'FASTQ', 'name': 'New Artifact Handler test', 'prep-template-id': self.prep.id, 'raw_forward_seqs': [self.fwd_fp], 'raw_barcodes': [self.barcodes_fp], 'raw_reverse_seqs': [], 'import-artifact': '' } response = self.post('/study/new_artifact/', args) self.assertEqual(response.code, 200) # make sure new artifact created obs = r_client.get('prep_template_%s' % self.prep.id) self.assertIsNotNone(obs) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id'])) new_artifact_id = get_count('qiita.artifact') artifact = Artifact(new_artifact_id) self.assertEqual(artifact.name, 'New Artifact Handler test') self._files_to_remove.extend([fp for _, fp, _ in artifact.filepaths])
def test_submit_nouser(self): def foo(a, b, c=10, **kwargs): return a + b + c id_, pid_, _ = submit_nouser(foo, 1, 2, c=20) self.test_keys.append(id_) self.test_keys.append(pid_) sleep(1) obs = json.loads(r_client.get(id_)) self.assertEqual(obs['result'], 23) self.assertEqual(obs['status'], 'Success') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None)
def test_submit_nouser(self): def foo(a, b, c=10, **kwargs): return a+b+c id_, pid_ = submit_nouser(foo, 1, 2, c=20) self.test_keys.append(id_) self.test_keys.append(pid_) sleep(2) obs = json.loads(r_client.get(id_)) self.assertEqual(obs['result'], 23) self.assertEqual(obs['status'], 'Success') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None)
def test_redis_wrap(self): def foo(a, b, **kwargs): return a + b r_client.set(self.test_job_info['id'], json.dumps(self.test_job_info)) _redis_wrap(self.test_job_info, foo, 1, 2) sleep(2) obs = json.loads(r_client.get(self.test_job_info['id'])) self.assertEqual(obs['result'], 3) self.assertEqual(obs['status'], 'Success') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None) r_client.set(self.test_job_info['id'], json.dumps(self.test_job_info)) _redis_wrap(self.test_job_info, foo, 1, 2, 3) sleep(2) obs = json.loads(r_client.get(self.test_job_info['id'])) self.assertEqual(obs['result'][0], u'Traceback (most recent call last):\n') self.assertEqual(obs['status'], 'Failed') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None)
def test_redis_wrap(self): def foo(a, b, **kwargs): return a+b r_client.set(self.test_job_info['id'], json.dumps(self.test_job_info)) obs_ret = _redis_wrap(self.test_job_info, foo, 1, 2) sleep(1) obs = json.loads(r_client.get(self.test_job_info['id'])) self.assertEqual(obs['result'], 3) self.assertEqual(obs_ret, obs['result']) self.assertEqual(obs['status'], 'Success') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None) r_client.set(self.test_job_info['id'], json.dumps(self.test_job_info))
def test_submit(self): def foo(a, b, c=10, **kwargs): return a+b+c for ctx in ctxs: id_, pid_ = submit(ctx, 'no parent', 'test', '/', foo, 1, 2, c=15) self.test_keys.append(id_) self.test_keys.append(pid_) sleep(2) obs = json.loads(r_client.get(id_)) self.assertEqual(obs['result'], 18) self.assertEqual(obs['status'], 'Success') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None)
def test_redis_wrap(self): def foo(a, b, **kwargs): return a + b r_client.set(self.test_job_info['id'], json.dumps(self.test_job_info)) obs_ret = _redis_wrap(self.test_job_info, foo, 1, 2) sleep(1) obs = json.loads(r_client.get(self.test_job_info['id'])) self.assertEqual(obs['result'], 3) self.assertEqual(obs_ret, obs['result']) self.assertEqual(obs['status'], 'Success') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None) r_client.set(self.test_job_info['id'], json.dumps(self.test_job_info))
def test_submit(self): def foo(a, b, c=10, **kwargs): return a + b + c for ctx in ctxs: id_, pid_ = submit(ctx, 'no parent', 'test', '/', foo, 1, 2, c=15) self.test_keys.append(id_) self.test_keys.append(pid_) sleep(2) obs = json.loads(r_client.get(id_)) self.assertEqual(obs['result'], 18) self.assertEqual(obs['status'], 'Success') self.assertNotEqual(obs['date_start'], None) self.assertNotEqual(obs['date_end'], None)
def _action_get(self, ids): """Get the details for ids Parameters ---------- ids : {list, set, tuple, generator} of str The IDs to get Notes ----- If ids is empty, then all IDs are returned. Returns ------- list of dict The details of the jobs """ if not ids: ids = self.jobs result = [] ids = set(ids) while ids: id_ = ids.pop() if id_ is None: continue try: payload = r_client.get(id_) except ResponseError: # wrong key type continue try: payload = self._decode(payload) except ValueError: # unable to decode or data doesn't exist in redis continue else: result.append(payload) if payload['type'] == 'group': for obj in self.traverse(id_): ids.add(obj['id']) return result
def test_artifact_post_req(self): # Create new prep template to attach artifact to pt = npt.assert_warns( QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S') self._files_to_remove.extend([fp for _, fp in pt.get_filepaths()]) new_artifact_id = get_count('qiita.artifact') + 1 filepaths = {'raw_forward_seqs': 'uploaded_file.txt', 'raw_barcodes': 'update.txt'} obs = artifact_post_req( '*****@*****.**', filepaths, 'FASTQ', 'New Test Artifact', pt.id) exp = {'status': 'success', 'message': ''} self.assertEqual(obs, exp) obs = r_client.get('prep_template_%d' % pt.id) self.assertIsNotNone(obs) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id'])) # Instantiate the artifact to make sure it was made and # to clean the environment a = Artifact(new_artifact_id) self._files_to_remove.extend([fp for _, fp, _ in a.filepaths]) # Test importing an artifact # Create new prep template to attach artifact to pt = npt.assert_warns( QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S') self._files_to_remove.extend([fp for _, fp in pt.get_filepaths()]) new_artifact_id_2 = get_count('qiita.artifact') + 1 obs = artifact_post_req( '*****@*****.**', {}, 'FASTQ', 'New Test Artifact 2', pt.id, new_artifact_id) exp = {'status': 'success', 'message': ''} self.assertEqual(obs, exp) obs = r_client.get('prep_template_%d' % pt.id) self.assertIsNotNone(obs) redis_info = loads(r_client.get(loads(obs)['job_id'])) while redis_info['status_msg'] == 'Running': sleep(0.05) redis_info = loads(r_client.get(loads(obs)['job_id'])) # Instantiate the artifact to make sure it was made and # to clean the environment a = Artifact(new_artifact_id_2) self._files_to_remove.extend([fp for _, fp, _ in a.filepaths])