def test_runOpenVA_5(self):
        """Check that runOpenVA() doesn't create new file if returns zeroRecords == True."""

        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/zeroRecords_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/zeroRecords_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
            os.remove(self.dirOpenVA + "/openVA_input.csv")

        plZero = Pipeline(self.dbFileName,
                          self.dbDirectory,
                          self.dbKey,
                          self.useDHIS)
        settings = plZero.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]
        rOut = plZero.runOpenVA(settingsOpenVA,
                                settingsPipeline,
                                settingsODK.odkID,
                                plZero.pipelineRunDate)

        self.assertFalse(
            os.path.isfile(self.dirOpenVA + "/openVA_input.csv")
        )
        os.remove(self.dirODK + "/odkBCExportPrev.csv")
        os.remove(self.dirODK + "/odkBCExportNew.csv")
    def test_runODK_2(self):
        """Test runODK method downloads file."""

        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            os.remove("ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/previous_bc_export.csv", "ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/another_bc_export.csv", "ODKFiles/odkBCExportNew.csv")

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)
        self.assertTrue(os.path.isfile("ODKFiles/odkBCExportPrev.csv"))
        os.remove("ODKFiles/odkBCExportPrev.csv")
 def test_runDHIS_2_postVA(self):
     """Post VA records to DHIS2."""
     dbFileName = "Pipeline.db"
     dbDirectory = "."
     dbKey = "enilepiP"
     useDHIS = True
     pl = Pipeline(dbFileName, dbDirectory,
                   dbKey, useDHIS)
     settings = pl.config()
     settingsPipeline = settings["pipeline"]
     settingsODK = settings["odk"]
     settingsOpenVA = settings["openVA"]
     settingsDHIS = settings["dhis"]
     odkBC = pl.runODK(settingsODK,
                       settingsPipeline)
     rOut = pl.runOpenVA(settingsOpenVA,
                         settingsPipeline,
                         settingsODK.odkID,
                         pl.pipelineRunDate)
     pipelineDHIS = pl.runDHIS(settingsDHIS,
                               settingsPipeline)
     postLog = pipelineDHIS["postLog"]
     checkLog = 'importSummaries' in postLog["response"].keys()
     self.assertTrue(checkLog)
     shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
     shutil.rmtree("DHIS/blobs/")
    def test_runOpenVA_1(self):
        """Check that runOpenVA() brings in new file."""
        if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
            os.remove(self.dirOpenVA + "/openVA_input.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/previous_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/another_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")

        pl = Pipeline(self.dbFileName, self.dbDirectory, self.dbKey, self.useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]
        rOut = pl.runOpenVA(settingsOpenVA,
                            settingsPipeline,
                            settingsODK.odkID,
                            pl.pipelineRunDate)
        self.assertTrue(
            os.path.isfile("OpenVAFiles/openVA_input.csv")
        )
        shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
 def test_runDHIS_3_verifyPost(self):
     """Verify VA records got posted to DHIS2."""
     dbFileName = "Pipeline.db"
     dbDirectory = "."
     dbKey = "enilepiP"
     useDHIS = True
     pl = Pipeline(dbFileName, dbDirectory,
                   dbKey, useDHIS)
     settings = pl.config()
     settingsPipeline = settings["pipeline"]
     settingsODK = settings["odk"]
     settingsOpenVA = settings["openVA"]
     settingsDHIS = settings["dhis"]
     odkBC = pl.runODK(settingsODK,
                       settingsPipeline)
     rOut = pl.runOpenVA(settingsOpenVA,
                         settingsPipeline,
                         settingsODK.odkID,
                         pl.pipelineRunDate)
     pipelineDHIS = pl.runDHIS(settingsDHIS,
                               settingsPipeline)
     dfNewStorage = pd.read_csv("OpenVAFiles/newStorage.csv")
     nPushed = sum(dfNewStorage["pipelineOutcome"] == "Pushed to DHIS2")
     self.assertEqual(nPushed, pipelineDHIS["nPostedRecords"])
     shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
     shutil.rmtree("DHIS/blobs")
    def test_runOpenVA_4(self):
        """Check that runOpenVA() returns zeroRecords = FALSE"""

        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/previous_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/another_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
            os.remove(self.dirOpenVA + "/openVA_input.csv")

        plZero = Pipeline(self.dbFileName,
                          self.dbDirectory,
                          self.dbKey,
                          self.useDHIS)
        settings = plZero.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]
        rOut = plZero.runOpenVA(settingsOpenVA,
                                settingsPipeline,
                                settingsODK.odkID,
                                plZero.pipelineRunDate)
        self.assertFalse(rOut["zeroRecords"])
        os.remove(self.dirODK + "/odkBCExportPrev.csv")
        os.remove(self.dirODK + "/odkBCExportNew.csv")
        os.remove(self.dirOpenVA + "/openVA_input.csv")
        shutil.rmtree("OpenVAFiles/" + plZero.pipelineRunDate)
예제 #7
0
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')
        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        settings = pl.config()
        cls.settingsPipeline = settings['pipeline']
        cls.settingsODK = settings['odk']
        cls.settingsOpenVA = settings['openVA']
        cls.settingsDHIS = settings['dhis']
    def test_runOpenVA_SmartVA_1(self):
        """Check that runOpenVA() executes smartva cli"""
        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            os.remove("ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/odkExport_phmrc-1.csv",
                    "ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/odkExport_phmrc-2.csv",
                    "ODKFiles/odkBCExportNew.csv")

        dbFileName = "copy_smartVA_Pipeline.db"
        dbKey = "enilepiP"
        dbDirectory = "."
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")

        pl = Pipeline(dbFileName,
                      dbDirectory,
                      dbKey,
                      True)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        rOut = pl.runOpenVA(settingsOpenVA,
                            settingsPipeline,
                            settingsODK.odkID,
                            pl.pipelineRunDate)

        svaOut = os.path.join(
            "OpenVAFiles",
            pl.pipelineRunDate,
            "1-individual-cause-of-death/individual-cause-of-death.csv"
        )

        self.assertTrue(os.path.isfile(svaOut))
        shutil.rmtree(
            os.path.join("OpenVAFiles", pl.pipelineRunDate),
            ignore_errors = True
        )
        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            os.remove("ODKFiles/odkBCExportPrev.csv")
    def test_storeVA(self):
        """Check that depositResults() stores VA records in Transfer DB."""
        shutil.copy("OpenVAFiles/sample_newStorage.csv",
                    "OpenVAFiles/newStorage.csv")

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("DELETE FROM VA_Storage;")
        conn.commit()
        conn.close()

        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        pl.storeResultsDB()
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        sql = "SELECT id FROM VA_Storage"
        c.execute(sql)
        vaIDs = c.fetchall()
        conn.close()
        vaIDsList = [j for i in vaIDs for j in i]
        s1 = set(vaIDsList)
        dfNewStorage = pd.read_csv("OpenVAFiles/newStorage.csv")
        dfNewStorageID = dfNewStorage["odkMetaInstanceID"]
        s2 = set(dfNewStorageID)
        self.assertTrue(s2.issubset(s1))
    def test_runODK_4(self):
        """Check successful run with valid parameters."""

        shutil.rmtree("ODKFiles/ODK Briefcase Storage/", ignore_errors = True)

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)
        self.assertEqual(0, odkBC.returncode)
예제 #11
0
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')
        if os.path.isfile('OpenVAFiles/newStorage.csv'):
            os.remove('OpenVAFiles/newStorage.csv')
        shutil.copy('OpenVAFiles/sample_newStorage.csv',
                    'OpenVAFiles/newStorage.csv')
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime('%Y-%m-%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName='Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute('DELETE FROM VA_Storage;')
        conn.commit()
        conn.close()
        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        settings = pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']

        pl.storeResultsDB()

        xferDB = TransferDB(dbFileName='Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        sql = 'SELECT id FROM VA_Storage'
        c.execute(sql)
        vaIDs = c.fetchall()
        conn.close()
        vaIDsList = [j for i in vaIDs for j in i]
        cls.s1 = set(vaIDsList)
        dfNewStorage = read_csv('OpenVAFiles/newStorage.csv')
        dfNewStorageID = dfNewStorage['odkMetaInstanceID']
        cls.s2 = set(dfNewStorageID)
예제 #12
0
    def setUpClass(cls):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'):
            downloadBriefcase()
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        settings = pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        cls.odkBC = pl.runODK(settingsODK, settingsPipeline)
    def test_runODK_5(self):
        """Check for exported CSV file."""

        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)
        self.assertTrue(os.path.isfile("ODKFiles/odkBCExportNew.csv"))
예제 #14
0
    def setUpClass(cls):

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/zeroRecords_bc_export.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/zeroRecords_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('OpenVAFiles/openVA_input.csv'):
            os.remove('OpenVAFiles/openVA_input.csv')

        plZero = Pipeline('copy_Pipeline.db', '.', 'enilepiP', True)
        settings = plZero.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        cls.rOut = plZero.runOpenVA(settingsOpenVA, settingsPipeline,
                                    settingsODK.odkID, plZero.pipelineRunDate)
    def test_runODK_3(self):
        """Check mergeToPrevExport() includes all VA records from ODK BC export files."""

        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            os.remove("ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/previous_bc_export.csv", "ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/another_bc_export.csv", "ODKFiles/odkBCExportNew.csv")

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)

        hasAll = True
        with open("ODKFiles/odkBCExportPrev.csv") as fCombined:
            fCombinedLines = fCombined.readlines()
        with open("ODKFiles/previous_bc_export.csv") as fPrevious:
            fPreviousLines = fPrevious.readlines()
        with open("ODKFiles/another_bc_export.csv") as fAnother:
            fAnotherLines = fAnother.readlines()
        for line in fPreviousLines:
            if line not in fCombinedLines:
                hasAll = False
        for line in fAnotherLines:
            if line not in fCombinedLines:
                hasAll = False
        self.assertTrue(hasAll)
        os.remove("ODKFiles/odkBCExportPrev.csv")
예제 #16
0
    def setUpClass(cls):

        if os.path.isfile('OpenVAFiles/openVA_input.csv'):
            os.remove('OpenVAFiles/openVA_input.csv')
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/odkExport_prev_who_v151.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/odkExport_new_who_v151.csv',
                    'ODKFiles/odkBCExportNew.csv')
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        settings = pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        cls.rOut = pl.runOpenVA(settingsOpenVA, settingsPipeline,
                                settingsODK.odkID, pl.pipelineRunDate)
 def test_runOpenVA_2(self):
     """Check that runOpenVA() includes all records."""
     if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
         os.remove(self.dirOpenVA + "/openVA_input.csv")
     if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
         os.remove(self.dirODK + "/odkBCExportNew.csv")
     if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
         os.remove(self.dirODK + "/odkBCExportPrev.csv")
     shutil.copy(self.dirODK + "/previous_bc_export.csv",
                 self.dirODK + "/odkBCExportPrev.csv")
     shutil.copy(self.dirODK + "/another_bc_export.csv",
                 self.dirODK + "/odkBCExportNew.csv")
     pl = Pipeline(self.dbFileName, self.dbDirectory, self.dbKey, self.useDHIS)
     settings = pl.config()
     settingsPipeline = settings["pipeline"]
     settingsODK = settings["odk"]
     settingsOpenVA = settings["openVA"]
     settingsDHIS = settings["dhis"]
     rOut = pl.runOpenVA(settingsOpenVA,
                         settingsPipeline,
                         settingsODK.odkID,
                         pl.pipelineRunDate)
     hasAll = True
     with open("OpenVAFiles/openVA_input.csv") as fCombined:
         fCombinedLines = fCombined.readlines()
     with open("ODKFiles/previous_bc_export.csv") as fPrevious:
         fPreviousLines = fPrevious.readlines()
     with open("ODKFiles/another_bc_export.csv") as fAnother:
         fAnotherLines = fAnother.readlines()
     for line in fPreviousLines:
         if line not in fCombinedLines:
             hasAll = False
     for line in fAnotherLines:
         if line not in fCombinedLines:
             hasAll = False
     self.assertTrue(hasAll)
     shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
 def test_runDHIS_1_vaProgramUID(self):
     """Verify VA program is installed."""
     dbFileName = "Pipeline.db"
     dbDirectory = "."
     dbKey = "enilepiP"
     useDHIS = True
     pl = Pipeline(dbFileName, dbDirectory,
                   dbKey, useDHIS)
     settings = pl.config()
     settingsPipeline = settings["pipeline"]
     settingsODK = settings["odk"]
     settingsOpenVA = settings["openVA"]
     settingsDHIS = settings["dhis"]
     odkBC = pl.runODK(settingsODK,
                       settingsPipeline)
     rOut = pl.runOpenVA(settingsOpenVA,
                         settingsPipeline,
                         settingsODK.odkID,
                         pl.pipelineRunDate)
     pipelineDHIS = pl.runDHIS(settingsDHIS,
                               settingsPipeline)
     self.assertEqual(pipelineDHIS["vaProgramUID"], "sv91bCroFFx")
     shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
     shutil.rmtree("DHIS/blobs/")
예제 #19
0
    def setUpClass(cls):

        shutil.rmtree('DHIS/blobs/', ignore_errors=True)
        if os.path.isfile('OpenVAFiles/entityAttributeValue.csv'):
            os.remove('OpenVAFiles/entityAttributeValue.csv')
        if os.path.isfile('OpenVAFiles/recordStorage.csv'):
            os.remove('OpenVAFiles/recordStorage.csv')
        shutil.copy('OpenVAFiles/sampleEAV.csv',
                    'OpenVAFiles/entityAttributeValue.csv')
        shutil.copy('OpenVAFiles/sample_recordStorage.csv',
                    'OpenVAFiles/recordStorage.csv')
        shutil.copy('OpenVAFiles/sample_newStorage.csv',
                    'OpenVAFiles/newStorage.csv')
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        plRunDate = pl.pipelineRunDate
        settings = pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        cls.pipelineDHIS = pl.runDHIS(settingsDHIS, settingsPipeline)
예제 #20
0
def runPipeline(database_file_name,
                database_directory,
                database_key,
                export_to_DHIS = True):
    """Runs through all steps of the OpenVA Pipeline

    This function is a wrapper for the Pipeline class, which
    runs through all steps of the OpenVA Pipeline -- (1) connect to
    Transfer Database (to retrieve configuration settings); (2) connect to
    ODK Aggregate to download a CSV file with VA records; (3) run openVA
    (or SmartVA) to assign cause of death; and (4) store CoD results and
    VA data in the Transfer Database as well as a DHIS2 VA Program (if
    requested).

    :param database_file_name: File name for the Transfer Database.
    :param database_directory: Path of the Transfer Database.
    :param datatbase_key: Encryption key for the Transfer Database
    :param export_to_DHIS: Indicator for posting VA records to a DHIS2 server.
    :type export_to_DHIS: (Boolean)
    """

    pl = Pipeline(dbFileName = database_file_name,
                  dbDirectory = database_directory,
                  dbKey = database_key,
                  useDHIS = export_to_DHIS)
    try:
        settings = pl.config()
    except PipelineConfigurationError as e:
        pl.logEvent(str(e), "Error")
        sys.exit(1)

    settingsPipeline = settings["pipeline"]
    settingsODK = settings["odk"]
    settingsOpenVA = settings["openVA"]
    settingsDHIS = settings["dhis"]

    try:
        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)
        pl.logEvent("Briefcase Export Completed Successfully", "Event")
    except ODKError as e:
        pl.logEvent(str(e), "Error")
        sys.exit(1)

    try:
        rOut = pl.runOpenVA(settingsOpenVA,
                            settingsPipeline,
                            settingsODK.odkID,
                            pl.pipelineRunDate)
        pl.logEvent("OpenVA Analysis Completed Successfully", "Event")
    except (OpenVAError, SmartVAError) as e:
        pl.logEvent(str(e), "Error")
        sys.exit(1)

    if (rOut["zeroRecords"] == True):
        pl.logEvent("No new VA records from ODK (now exiting)", "Event")
        sys.exit(0)

    if (export_to_DHIS):
        try:
            pipelineDHIS = pl.runDHIS(settingsDHIS,
                                      settingsPipeline)
            pl.logEvent("Posted Events to DHIS2 Successfully", "Event")
        except DHISError as e:
            pl.logEvent(str(e), "Error")
            sys.exit(1)

    try:
        pl.storeResultsDB()
        pl.logEvent("Stored Records to Xfer Database Successfully", "Event")
    except (PipelineError, DatabaseConnectionError,
            PipelineConfigurationError) as e:
        pl.logEvent(str(e), "Error")
        sys.exit(1)

    try:
        pl.closePipeline()
        pl.logEvent("Successfully Completed Run of Pipeline", "Event")
    except (DatabaseConnectionError, DatabaseConnectionError) as e:
        pl.logEvent(str(e), "Error")
        sys.exit(1)
예제 #21
0
class Check_runODK_with_exports(unittest.TestCase):
    """Check runODK method with existing ODK exports:"""
    @classmethod
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

    def setUp(self):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/previous_bc_export.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/another_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')
        self.old_mtimePrev = os.path.getmtime('ODKFiles/odkBCExportPrev.csv')
        self.old_mtimeNew = os.path.getmtime('ODKFiles/odkBCExportNew.csv')
        if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'):
            downloadBriefcase()
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')
        self.dbFileName = 'Pipeline.db'
        self.dbDirectory = '.'
        self.dbKey = 'enilepiP'
        self.useDHIS = True
        self.pl = Pipeline(self.dbFileName, self.dbDirectory, self.dbKey,
                           self.useDHIS)
        settings = self.pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        self.odkBC = self.pl.runODK(settingsODK, settingsPipeline)
        self.new_mtimePrev = os.path.getmtime('ODKFiles/odkBCExportPrev.csv')
        self.new_mtimeNew = os.path.getmtime('ODKFiles/odkBCExportNew.csv')

    def tearDown(self):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')

    def test_runODK_returncode_with_previous_exports(self):
        """Check returncode with valid parameters:"""

        self.assertEqual(0, self.odkBC.returncode)

    def test_runODK_exportPrev_with_previous_exports(self):
        """Check modification time on odkBCExportPrev with previous exports:"""

        self.assertTrue(self.new_mtimePrev > self.old_mtimePrev)

    def test_runODK_exportNew_with_previous_exports(self):
        """Check modification time on odkBCExportNew:"""

        self.assertTrue(self.new_mtimeNew > self.old_mtimeNew)

    def test_runODK_mergeToPrevExport_with_previous_exports(self):
        """Check mergeToPrevExport() keeps all records from BC export files:"""

        hasAll = True
        with open('ODKFiles/odkBCExportPrev.csv') as fCombined:
            fCombinedLines = fCombined.readlines()
        with open('ODKFiles/previous_bc_export.csv') as fPrevious:
            fPreviousLines = fPrevious.readlines()
        with open('ODKFiles/another_bc_export.csv') as fAnother:
            fAnotherLines = fAnother.readlines()
        for line in fPreviousLines:
            if line not in fCombinedLines:
                hasAll = False
        for line in fAnotherLines:
            if line not in fCombinedLines:
                hasAll = False
        self.assertTrue(hasAll)

    @classmethod
    def tearDownClass(cls):

        os.remove('Pipeline.db')
    def test_runODK_6(self):
        """Check checkDuplicates() method."""

        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            os.remove("ODKFiles/odkBCExportPrev.csv")
        if os.path.isfile("OpenVAFiles/openVA_input.csv"):
            os.remove("OpenVAFiles/openVA_input.csv")

        dbFileName = "copy_Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("DELETE FROM EventLog;")
        conn.commit()
        c.execute("DELETE FROM VA_Storage;")
        conn.commit()
        conn.close()
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]
        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)
        vaRecords = pd.read_csv("ODKFiles/odkBCExportNew.csv")
        nVA = vaRecords.shape[0]
        rOut = pl.runOpenVA(settingsOpenVA,
                            settingsPipeline,
                            settingsODK.odkID,
                            pl.pipelineRunDate)
        pipelineDHIS = pl.runDHIS(settingsDHIS,
                                  settingsPipeline)
        pl.storeResultsDB()
        os.remove("ODKFiles/odkBCExportNew.csv")
        os.remove("OpenVAFiles/openVA_input.csv")
        odkBC2 = pl.runODK(settingsODK,
                           settingsPipeline)
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pl.pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("SELECT eventDesc FROM EventLog;")
        query = c.fetchall()
        nDuplicates = [i[0] for i in query if "Duplicate" in i[0]]
        self.assertEqual(len(nDuplicates), nVA)
        shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
        shutil.rmtree("DHIS/blobs/")
        os.remove("OpenVAFiles/newStorage.csv")
        os.remove("OpenVAFiles/recordStorage.csv")
        os.remove("OpenVAFiles/entityAttributeValue.csv")
예제 #23
0
class Check_storeResultsDB(unittest.TestCase):
    """Check storeResultsDB method marks duplicate records:"""
    @classmethod
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

    def setUp(self):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        shutil.rmtree('DHIS/blobs/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'):
            downloadBriefcase()
        self.pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        self.settings = self.pl.config()
        self.settingsPipeline = self.settings['pipeline']
        self.settingsODK = self.settings['odk']
        self.settingsOpenVA = self.settings['openVA']
        self.settingsDHIS = self.settings['dhis']

        self.xferDB = TransferDB(dbFileName='Pipeline.db',
                                 dbDirectory='.',
                                 dbKey='enilepiP',
                                 plRunDate=True)
        self.conn = self.xferDB.connectDB()
        self.c = self.conn.cursor()
        self.c.execute('DELETE FROM EventLog;')
        self.conn.commit()
        self.c.execute('DELETE FROM VA_Storage;')
        self.conn.commit()
        self.odkBC = self.pl.runODK(self.settingsODK, self.settingsPipeline)

    def test_runODK_checkDuplicates(self):
        """Check checkDuplicates() method:"""

        vaRecords = read_csv('ODKFiles/odkBCExportNew.csv')
        nVA = vaRecords.shape[0]
        rOut = self.pl.runOpenVA(self.settingsOpenVA, self.settingsPipeline,
                                 self.settingsODK.odkID,
                                 self.pl.pipelineRunDate)
        pipelineDHIS = self.pl.runDHIS(self.settingsDHIS,
                                       self.settingsPipeline)
        self.pl.storeResultsDB()
        os.remove('ODKFiles/odkBCExportNew.csv')
        os.remove('OpenVAFiles/pycrossva_input.csv')
        os.remove('OpenVAFiles/openVA_input.csv')
        odkBC2 = self.pl.runODK(self.settingsODK, self.settingsPipeline)
        self.c.execute('SELECT eventDesc FROM EventLog;')
        query = self.c.fetchall()
        nDuplicates = [i[0] for i in query if 'duplicate' in i[0]]
        self.assertEqual(len(nDuplicates), nVA)

    def tearDown(self):

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.rmtree('DHIS/blobs/', ignore_errors=True)
        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        self.conn.close()

    @classmethod
    def tearDownClass(cls):

        os.remove('Pipeline.db')