def setUp(self): logging.info("Creating test sqlite database") copy2(dirpath + "/test_pipeline.db.template", dirpath + "/test_pipeline.db") logging.info("Connecting to hive test sqlite database " + dirpath + "/test_pipeline.db") self.hive = HiveInstance("sqlite:///" + dirpath + "/test_pipeline.db")
def get_hive(process): if process not in hives.keys(): if process not in process_lookup.keys(): raise ProcessNotFoundError("Process " + str(process) + " not known") hives[process] = HiveInstance(process_lookup[process]['hive_uri']) return hives[process]
def get_hive(process): if process not in hives: if process not in process_lookup: raise ProcessNotFoundError('Process %s not known' % process) hives[process] = HiveInstance(process_lookup[process]['hive_uri']) return hives[process]
from ensembl_prodinf.exceptions import HTTPRequestError from ensembl_prodinf.config import load_config_json app = Flask(__name__, instance_relative_config=True) app.config.from_object('db_config') app.analysis = app.config["HIVE_ANALYSIS"] app.blacklisted_status_hosts = set(app.config['BLACKLISTED_STATUS_HOSTS']) app.config['SWAGGER'] = { 'title': 'Database copy REST endpoints', 'uiversion': 2 } swagger = Swagger(app) app.hive = HiveInstance(app.config["HIVE_URI"]) app.logger.addHandler(app_logging.file_handler(__name__)) app.logger.addHandler(app_logging.default_handler()) app.logger.info(app.config) app.servers = load_config_json(app.config['SERVER_URIS_FILE']) def is_running(pid): try: os.kill(pid, 0) except OSError as err: return False return True
class HiveTest(unittest.TestCase): """Create fresh database file""" def setUp(self): logging.info("Creating test sqlite database") copy2(dirpath + "/test_pipeline.db.template", dirpath + "/test_pipeline.db") logging.info("Connecting to hive test sqlite database " + dirpath + "/test_pipeline.db") self.hive = HiveInstance("sqlite:///" + dirpath + "/test_pipeline.db") """Basic test case for creating a new job""" def test_create_job(self): job1 = self.hive.create_job('TestRunnable', {'x': 'y', 'a': 'b'}) logging.debug(job1) job2 = self.hive.get_job_by_id(job1.job_id) logging.debug(job2) self.assertEquals(job1.job_id, job2.job_id) self.assertEquals(job1.analysis.logic_name, job2.analysis.logic_name) self.assertEquals(job1.input_id, job2.input_id) """Test case for checking on a finished semaphore""" def test_check_semaphore_success(self): semaphore_data = self.hive.get_semaphore_data(2) logging.debug(semaphore_data) status = self.hive.check_semaphores_for_job(semaphore_data) logging.debug("Status for 2 is " + status) self.assertEquals(status, 'complete', "Checking expected status for completed semaphore") """Test case for checking on a failed semaphore""" def test_check_semaphore_failure(self): semaphore_data = self.hive.get_semaphore_data(8) logging.debug(semaphore_data) status = self.hive.check_semaphores_for_job(semaphore_data) logging.debug("Status for 8 is " + status) self.assertEquals(status, 'failed', "Checking expected status for failed semaphore") """Test case for checking on a completed single job""" def test_check_job_success(self): job = self.hive.get_job_by_id(20) logging.debug(job) status = self.hive.get_job_tree_status(job) self.assertEquals("complete", status, "Checking status of completed single job") """Test case for checking on a failed single job""" def test_check_job_failure(self): job = self.hive.get_job_by_id(11) logging.debug(job) status = self.hive.get_job_tree_status(job) self.assertEquals("failed", status, "Checking status of failed single job") """Test case for checking on a completed job factory""" def test_check_job_tree_success(self): job = self.hive.get_job_by_id(1) logging.debug(job) status = self.hive.get_job_tree_status(job) logging.debug(status) self.assertEquals("complete", status, "Checking status of completed job factory") """Test case for checking on a failed job factory""" def test_check_job_tree_failure(self): job = self.hive.get_job_by_id(7) logging.debug(job) status = self.hive.get_job_tree_status(job) logging.debug(status) self.assertEquals("failed", status, "Checking status of failed job factory") """Test case for getting output on a completed job factory""" def test_get_job_output_success(self): output = self.hive.get_result_for_job_id(1) logging.debug(output) self.assertEquals('complete', output['status'], "Checking status of successful job factory output") self.assertTrue(output['output'] != None, "Checking output of successful job factory output") """Test case for getting output a failed job factory""" def test_get_job_output_failed(self): output = self.hive.get_result_for_job_id(7) logging.debug(output) self.assertEquals( 'failed', output['status'], "Checking status of unsuccessful job factory output") self.assertTrue('output' not in output, "Checking output of unsuccessful job factory output") """Test case for listing all jobs""" def test_get_all_results(self): jobs = self.hive.get_all_results('TestRunnable') self.assertEquals(1, len(jobs), "Checking we got just one job") """Remove test database file""" def tearDown(self): logging.info("Removing test sqlite database")
def get_hive(): global hive if hive is None: hive = HiveInstance(app.config["HIVE_URI"]) return hive