def test_runDHIS_3_verifyPost(self): """Verify VA records got posted to DHIS2.""" dbFileName = "Pipeline.db" dbDirectory = "." dbKey = "enilepiP" useDHIS = True pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS) settings = pl.config() settingsPipeline = settings["pipeline"] settingsODK = settings["odk"] settingsOpenVA = settings["openVA"] settingsDHIS = settings["dhis"] odkBC = pl.runODK(settingsODK, settingsPipeline) rOut = pl.runOpenVA(settingsOpenVA, settingsPipeline, settingsODK.odkID, pl.pipelineRunDate) pipelineDHIS = pl.runDHIS(settingsDHIS, settingsPipeline) dfNewStorage = pd.read_csv("OpenVAFiles/newStorage.csv") nPushed = sum(dfNewStorage["pipelineOutcome"] == "Pushed to DHIS2") self.assertEqual(nPushed, pipelineDHIS["nPostedRecords"]) shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate) shutil.rmtree("DHIS/blobs")
def test_runDHIS_2_postVA(self): """Post VA records to DHIS2.""" dbFileName = "Pipeline.db" dbDirectory = "." dbKey = "enilepiP" useDHIS = True pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS) settings = pl.config() settingsPipeline = settings["pipeline"] settingsODK = settings["odk"] settingsOpenVA = settings["openVA"] settingsDHIS = settings["dhis"] odkBC = pl.runODK(settingsODK, settingsPipeline) rOut = pl.runOpenVA(settingsOpenVA, settingsPipeline, settingsODK.odkID, pl.pipelineRunDate) pipelineDHIS = pl.runDHIS(settingsDHIS, settingsPipeline) postLog = pipelineDHIS["postLog"] checkLog = 'importSummaries' in postLog["response"].keys() self.assertTrue(checkLog) shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate) shutil.rmtree("DHIS/blobs/")
def setUpClass(cls): shutil.rmtree('DHIS/blobs/', ignore_errors=True) if os.path.isfile('OpenVAFiles/entityAttributeValue.csv'): os.remove('OpenVAFiles/entityAttributeValue.csv') if os.path.isfile('OpenVAFiles/recordStorage.csv'): os.remove('OpenVAFiles/recordStorage.csv') shutil.copy('OpenVAFiles/sampleEAV.csv', 'OpenVAFiles/entityAttributeValue.csv') shutil.copy('OpenVAFiles/sample_recordStorage.csv', 'OpenVAFiles/recordStorage.csv') shutil.copy('OpenVAFiles/sample_newStorage.csv', 'OpenVAFiles/newStorage.csv') if not os.path.isfile('Pipeline.db'): createTransferDB('Pipeline.db', '.', 'enilepiP') pl = Pipeline('Pipeline.db', '.', 'enilepiP', True) plRunDate = pl.pipelineRunDate settings = pl.config() settingsPipeline = settings['pipeline'] settingsODK = settings['odk'] settingsOpenVA = settings['openVA'] settingsDHIS = settings['dhis'] cls.pipelineDHIS = pl.runDHIS(settingsDHIS, settingsPipeline)
def test_runDHIS_1_vaProgramUID(self): """Verify VA program is installed.""" dbFileName = "Pipeline.db" dbDirectory = "." dbKey = "enilepiP" useDHIS = True pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS) settings = pl.config() settingsPipeline = settings["pipeline"] settingsODK = settings["odk"] settingsOpenVA = settings["openVA"] settingsDHIS = settings["dhis"] odkBC = pl.runODK(settingsODK, settingsPipeline) rOut = pl.runOpenVA(settingsOpenVA, settingsPipeline, settingsODK.odkID, pl.pipelineRunDate) pipelineDHIS = pl.runDHIS(settingsDHIS, settingsPipeline) self.assertEqual(pipelineDHIS["vaProgramUID"], "sv91bCroFFx") shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate) shutil.rmtree("DHIS/blobs/")
class Check_storeResultsDB(unittest.TestCase): """Check storeResultsDB method marks duplicate records:""" @classmethod def setUpClass(cls): if not os.path.isfile('Pipeline.db'): createTransferDB('Pipeline.db', '.', 'enilepiP') def setUp(self): shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True) shutil.rmtree('DHIS/blobs/', ignore_errors=True) if os.path.isfile('ODKFiles/odkBCExportNew.csv'): os.remove('ODKFiles/odkBCExportNew.csv') if os.path.isfile('ODKFiles/odkBCExportPrev.csv'): os.remove('ODKFiles/odkBCExportPrev.csv') if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'): downloadBriefcase() self.pl = Pipeline('Pipeline.db', '.', 'enilepiP', True) self.settings = self.pl.config() self.settingsPipeline = self.settings['pipeline'] self.settingsODK = self.settings['odk'] self.settingsOpenVA = self.settings['openVA'] self.settingsDHIS = self.settings['dhis'] self.xferDB = TransferDB(dbFileName='Pipeline.db', dbDirectory='.', dbKey='enilepiP', plRunDate=True) self.conn = self.xferDB.connectDB() self.c = self.conn.cursor() self.c.execute('DELETE FROM EventLog;') self.conn.commit() self.c.execute('DELETE FROM VA_Storage;') self.conn.commit() self.odkBC = self.pl.runODK(self.settingsODK, self.settingsPipeline) def test_runODK_checkDuplicates(self): """Check checkDuplicates() method:""" vaRecords = read_csv('ODKFiles/odkBCExportNew.csv') nVA = vaRecords.shape[0] rOut = self.pl.runOpenVA(self.settingsOpenVA, self.settingsPipeline, self.settingsODK.odkID, self.pl.pipelineRunDate) pipelineDHIS = self.pl.runDHIS(self.settingsDHIS, self.settingsPipeline) self.pl.storeResultsDB() os.remove('ODKFiles/odkBCExportNew.csv') os.remove('OpenVAFiles/pycrossva_input.csv') os.remove('OpenVAFiles/openVA_input.csv') odkBC2 = self.pl.runODK(self.settingsODK, self.settingsPipeline) self.c.execute('SELECT eventDesc FROM EventLog;') query = self.c.fetchall() nDuplicates = [i[0] for i in query if 'duplicate' in i[0]] self.assertEqual(len(nDuplicates), nVA) def tearDown(self): if os.path.isfile('ODKFiles/odkBCExportNew.csv'): os.remove('ODKFiles/odkBCExportNew.csv') if os.path.isfile('ODKFiles/odkBCExportPrev.csv'): os.remove('ODKFiles/odkBCExportPrev.csv') shutil.rmtree('DHIS/blobs/', ignore_errors=True) shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True) self.conn.close() @classmethod def tearDownClass(cls): os.remove('Pipeline.db')
def test_runODK_6(self): """Check checkDuplicates() method.""" if os.path.isfile("ODKFiles/odkBCExportNew.csv"): os.remove("ODKFiles/odkBCExportNew.csv") if os.path.isfile("ODKFiles/odkBCExportPrev.csv"): os.remove("ODKFiles/odkBCExportPrev.csv") if os.path.isfile("OpenVAFiles/openVA_input.csv"): os.remove("OpenVAFiles/openVA_input.csv") dbFileName = "copy_Pipeline.db" dbDirectory = "." dbKey = "enilepiP" useDHIS = True nowDate = datetime.datetime.now() pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S") xferDB = TransferDB(dbFileName = dbFileName, dbDirectory = dbDirectory, dbKey = dbKey, plRunDate = pipelineRunDate) conn = xferDB.connectDB() c = conn.cursor() c.execute("DELETE FROM EventLog;") conn.commit() c.execute("DELETE FROM VA_Storage;") conn.commit() conn.close() pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS) settings = pl.config() settingsPipeline = settings["pipeline"] settingsODK = settings["odk"] settingsOpenVA = settings["openVA"] settingsDHIS = settings["dhis"] odkBC = pl.runODK(settingsODK, settingsPipeline) vaRecords = pd.read_csv("ODKFiles/odkBCExportNew.csv") nVA = vaRecords.shape[0] rOut = pl.runOpenVA(settingsOpenVA, settingsPipeline, settingsODK.odkID, pl.pipelineRunDate) pipelineDHIS = pl.runDHIS(settingsDHIS, settingsPipeline) pl.storeResultsDB() os.remove("ODKFiles/odkBCExportNew.csv") os.remove("OpenVAFiles/openVA_input.csv") odkBC2 = pl.runODK(settingsODK, settingsPipeline) xferDB = TransferDB(dbFileName = dbFileName, dbDirectory = dbDirectory, dbKey = dbKey, plRunDate = pl.pipelineRunDate) conn = xferDB.connectDB() c = conn.cursor() c.execute("SELECT eventDesc FROM EventLog;") query = c.fetchall() nDuplicates = [i[0] for i in query if "Duplicate" in i[0]] self.assertEqual(len(nDuplicates), nVA) shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate) shutil.rmtree("DHIS/blobs/") os.remove("OpenVAFiles/newStorage.csv") os.remove("OpenVAFiles/recordStorage.csv") os.remove("OpenVAFiles/entityAttributeValue.csv")
def runPipeline(database_file_name, database_directory, database_key, export_to_DHIS = True): """Runs through all steps of the OpenVA Pipeline This function is a wrapper for the Pipeline class, which runs through all steps of the OpenVA Pipeline -- (1) connect to Transfer Database (to retrieve configuration settings); (2) connect to ODK Aggregate to download a CSV file with VA records; (3) run openVA (or SmartVA) to assign cause of death; and (4) store CoD results and VA data in the Transfer Database as well as a DHIS2 VA Program (if requested). :param database_file_name: File name for the Transfer Database. :param database_directory: Path of the Transfer Database. :param datatbase_key: Encryption key for the Transfer Database :param export_to_DHIS: Indicator for posting VA records to a DHIS2 server. :type export_to_DHIS: (Boolean) """ pl = Pipeline(dbFileName = database_file_name, dbDirectory = database_directory, dbKey = database_key, useDHIS = export_to_DHIS) try: settings = pl.config() except PipelineConfigurationError as e: pl.logEvent(str(e), "Error") sys.exit(1) settingsPipeline = settings["pipeline"] settingsODK = settings["odk"] settingsOpenVA = settings["openVA"] settingsDHIS = settings["dhis"] try: odkBC = pl.runODK(settingsODK, settingsPipeline) pl.logEvent("Briefcase Export Completed Successfully", "Event") except ODKError as e: pl.logEvent(str(e), "Error") sys.exit(1) try: rOut = pl.runOpenVA(settingsOpenVA, settingsPipeline, settingsODK.odkID, pl.pipelineRunDate) pl.logEvent("OpenVA Analysis Completed Successfully", "Event") except (OpenVAError, SmartVAError) as e: pl.logEvent(str(e), "Error") sys.exit(1) if (rOut["zeroRecords"] == True): pl.logEvent("No new VA records from ODK (now exiting)", "Event") sys.exit(0) if (export_to_DHIS): try: pipelineDHIS = pl.runDHIS(settingsDHIS, settingsPipeline) pl.logEvent("Posted Events to DHIS2 Successfully", "Event") except DHISError as e: pl.logEvent(str(e), "Error") sys.exit(1) try: pl.storeResultsDB() pl.logEvent("Stored Records to Xfer Database Successfully", "Event") except (PipelineError, DatabaseConnectionError, PipelineConfigurationError) as e: pl.logEvent(str(e), "Error") sys.exit(1) try: pl.closePipeline() pl.logEvent("Successfully Completed Run of Pipeline", "Event") except (DatabaseConnectionError, DatabaseConnectionError) as e: pl.logEvent(str(e), "Error") sys.exit(1)