def test_cleanPipeline_odkLastRun(self):
        """Test update of ODK_Conf.odkLastRun."""

        os.makedirs("DHIS/blobs/", exist_ok = True)
        dbFileName = "copy_Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        pl = Pipeline(dbFileName, dbDirectory,
                      dbKey, useDHIS)
        pl.closePipeline()

        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("SELECT odkLastRun FROM ODK_Conf;")
        sqlQuery = c.fetchone()
        results = [i for i in sqlQuery]
        self.assertEqual(results[0], pipelineRunDate)
        c.execute("UPDATE ODK_Conf SET odkLastRun = '1900-01-01_00:00:01';")
        conn.commit()
        conn.close()
    def test_runODK_2(self):
        """Test runODK method downloads file."""

        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            os.remove("ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/previous_bc_export.csv", "ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/another_bc_export.csv", "ODKFiles/odkBCExportNew.csv")

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)
        self.assertTrue(os.path.isfile("ODKFiles/odkBCExportPrev.csv"))
        os.remove("ODKFiles/odkBCExportPrev.csv")
    def test_runOpenVA_5(self):
        """Check that runOpenVA() doesn't create new file if returns zeroRecords == True."""

        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/zeroRecords_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/zeroRecords_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
            os.remove(self.dirOpenVA + "/openVA_input.csv")

        plZero = Pipeline(self.dbFileName,
                          self.dbDirectory,
                          self.dbKey,
                          self.useDHIS)
        settings = plZero.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]
        rOut = plZero.runOpenVA(settingsOpenVA,
                                settingsPipeline,
                                settingsODK.odkID,
                                plZero.pipelineRunDate)

        self.assertFalse(
            os.path.isfile(self.dirOpenVA + "/openVA_input.csv")
        )
        os.remove(self.dirODK + "/odkBCExportPrev.csv")
        os.remove(self.dirODK + "/odkBCExportNew.csv")
Esempio n. 4
0
    def setUp(self):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/previous_bc_export.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/another_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')
        self.old_mtimePrev = os.path.getmtime('ODKFiles/odkBCExportPrev.csv')
        self.old_mtimeNew = os.path.getmtime('ODKFiles/odkBCExportNew.csv')
        if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'):
            downloadBriefcase()
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')
        self.dbFileName = 'Pipeline.db'
        self.dbDirectory = '.'
        self.dbKey = 'enilepiP'
        self.useDHIS = True
        self.pl = Pipeline(self.dbFileName, self.dbDirectory, self.dbKey,
                           self.useDHIS)
        settings = self.pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        self.odkBC = self.pl.runODK(settingsODK, settingsPipeline)
        self.new_mtimePrev = os.path.getmtime('ODKFiles/odkBCExportPrev.csv')
        self.new_mtimeNew = os.path.getmtime('ODKFiles/odkBCExportNew.csv')
    def test_runOpenVA_4(self):
        """Check that runOpenVA() returns zeroRecords = FALSE"""

        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/previous_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/another_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
            os.remove(self.dirOpenVA + "/openVA_input.csv")

        plZero = Pipeline(self.dbFileName,
                          self.dbDirectory,
                          self.dbKey,
                          self.useDHIS)
        settings = plZero.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]
        rOut = plZero.runOpenVA(settingsOpenVA,
                                settingsPipeline,
                                settingsODK.odkID,
                                plZero.pipelineRunDate)
        self.assertFalse(rOut["zeroRecords"])
        os.remove(self.dirODK + "/odkBCExportPrev.csv")
        os.remove(self.dirODK + "/odkBCExportNew.csv")
        os.remove(self.dirOpenVA + "/openVA_input.csv")
        shutil.rmtree("OpenVAFiles/" + plZero.pipelineRunDate)
    def test_runOpenVA_1(self):
        """Check that runOpenVA() brings in new file."""
        if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
            os.remove(self.dirOpenVA + "/openVA_input.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/previous_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/another_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")

        pl = Pipeline(self.dbFileName, self.dbDirectory, self.dbKey, self.useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]
        rOut = pl.runOpenVA(settingsOpenVA,
                            settingsPipeline,
                            settingsODK.odkID,
                            pl.pipelineRunDate)
        self.assertTrue(
            os.path.isfile("OpenVAFiles/openVA_input.csv")
        )
        shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
Esempio n. 7
0
    def setUp(self):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        shutil.rmtree('DHIS/blobs/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'):
            downloadBriefcase()
        self.pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        self.settings = self.pl.config()
        self.settingsPipeline = self.settings['pipeline']
        self.settingsODK = self.settings['odk']
        self.settingsOpenVA = self.settings['openVA']
        self.settingsDHIS = self.settings['dhis']

        self.xferDB = TransferDB(dbFileName='Pipeline.db',
                                 dbDirectory='.',
                                 dbKey='enilepiP',
                                 plRunDate=True)
        self.conn = self.xferDB.connectDB()
        self.c = self.conn.cursor()
        self.c.execute('DELETE FROM EventLog;')
        self.conn.commit()
        self.c.execute('DELETE FROM VA_Storage;')
        self.conn.commit()
        self.odkBC = self.pl.runODK(self.settingsODK, self.settingsPipeline)
Esempio n. 8
0
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')
        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        settings = pl.config()
        cls.settingsPipeline = settings['pipeline']
        cls.settingsODK = settings['odk']
        cls.settingsOpenVA = settings['openVA']
        cls.settingsDHIS = settings['dhis']
Esempio n. 9
0
    def setUpClass(cls):

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/odkExport_phmrc-1.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/odkExport_phmrc-2.csv',
                    'ODKFiles/odkBCExportNew.csv')
        if not os.path.isfile('smartva'):
            downloadSmartVA()

        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime('%Y-%m-%d_%H:%M:%S')
        cls.pl = Pipeline('copy_smartVA_Pipeline.db', '.', 'enilepiP', True)
        settings = cls.pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']

        cls.rOut = cls.pl.runOpenVA(settingsOpenVA, settingsPipeline,
                                    settingsODK.odkID, cls.pl.pipelineRunDate)
        cls.svaOut = os.path.join(
            'OpenVAFiles', cls.pl.pipelineRunDate,
            '1-individual-cause-of-death/individual-cause-of-death.csv')
    def test_runOpenVA_SmartVA_1(self):
        """Check that runOpenVA() executes smartva cli"""
        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            os.remove("ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/odkExport_phmrc-1.csv",
                    "ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/odkExport_phmrc-2.csv",
                    "ODKFiles/odkBCExportNew.csv")

        dbFileName = "copy_smartVA_Pipeline.db"
        dbKey = "enilepiP"
        dbDirectory = "."
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")

        pl = Pipeline(dbFileName,
                      dbDirectory,
                      dbKey,
                      True)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        rOut = pl.runOpenVA(settingsOpenVA,
                            settingsPipeline,
                            settingsODK.odkID,
                            pl.pipelineRunDate)

        svaOut = os.path.join(
            "OpenVAFiles",
            pl.pipelineRunDate,
            "1-individual-cause-of-death/individual-cause-of-death.csv"
        )

        self.assertTrue(os.path.isfile(svaOut))
        shutil.rmtree(
            os.path.join("OpenVAFiles", pl.pipelineRunDate),
            ignore_errors = True
        )
        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            os.remove("ODKFiles/odkBCExportPrev.csv")
    def test_storeVA(self):
        """Check that depositResults() stores VA records in Transfer DB."""
        shutil.copy("OpenVAFiles/sample_newStorage.csv",
                    "OpenVAFiles/newStorage.csv")

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("DELETE FROM VA_Storage;")
        conn.commit()
        conn.close()

        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        pl.storeResultsDB()
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        sql = "SELECT id FROM VA_Storage"
        c.execute(sql)
        vaIDs = c.fetchall()
        conn.close()
        vaIDsList = [j for i in vaIDs for j in i]
        s1 = set(vaIDsList)
        dfNewStorage = pd.read_csv("OpenVAFiles/newStorage.csv")
        dfNewStorageID = dfNewStorage["odkMetaInstanceID"]
        s2 = set(dfNewStorageID)
        self.assertTrue(s2.issubset(s1))
Esempio n. 12
0
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')
        if os.path.isfile('OpenVAFiles/newStorage.csv'):
            os.remove('OpenVAFiles/newStorage.csv')
        shutil.copy('OpenVAFiles/sample_newStorage.csv',
                    'OpenVAFiles/newStorage.csv')
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime('%Y-%m-%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName='Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute('DELETE FROM VA_Storage;')
        conn.commit()
        conn.close()
        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        settings = pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']

        pl.storeResultsDB()

        xferDB = TransferDB(dbFileName='Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        sql = 'SELECT id FROM VA_Storage'
        c.execute(sql)
        vaIDs = c.fetchall()
        conn.close()
        vaIDsList = [j for i in vaIDs for j in i]
        cls.s1 = set(vaIDsList)
        dfNewStorage = read_csv('OpenVAFiles/newStorage.csv')
        dfNewStorageID = dfNewStorage['odkMetaInstanceID']
        cls.s2 = set(dfNewStorageID)
    def test_runODK_4(self):
        """Check successful run with valid parameters."""

        shutil.rmtree("ODKFiles/ODK Briefcase Storage/", ignore_errors = True)

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)
        self.assertEqual(0, odkBC.returncode)
Esempio n. 14
0
    def setUpClass(cls):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'):
            downloadBriefcase()
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        settings = pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        cls.odkBC = pl.runODK(settingsODK, settingsPipeline)
    def test_runODK_5(self):
        """Check for exported CSV file."""

        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)
        self.assertTrue(os.path.isfile("ODKFiles/odkBCExportNew.csv"))
Esempio n. 16
0
    def setUpClass(cls):

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/zeroRecords_bc_export.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/zeroRecords_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('OpenVAFiles/openVA_input.csv'):
            os.remove('OpenVAFiles/openVA_input.csv')

        plZero = Pipeline('copy_Pipeline.db', '.', 'enilepiP', True)
        settings = plZero.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        cls.rOut = plZero.runOpenVA(settingsOpenVA, settingsPipeline,
                                    settingsODK.odkID, plZero.pipelineRunDate)
 def test_runDHIS_3_verifyPost(self):
     """Verify VA records got posted to DHIS2."""
     dbFileName = "Pipeline.db"
     dbDirectory = "."
     dbKey = "enilepiP"
     useDHIS = True
     pl = Pipeline(dbFileName, dbDirectory,
                   dbKey, useDHIS)
     settings = pl.config()
     settingsPipeline = settings["pipeline"]
     settingsODK = settings["odk"]
     settingsOpenVA = settings["openVA"]
     settingsDHIS = settings["dhis"]
     odkBC = pl.runODK(settingsODK,
                       settingsPipeline)
     rOut = pl.runOpenVA(settingsOpenVA,
                         settingsPipeline,
                         settingsODK.odkID,
                         pl.pipelineRunDate)
     pipelineDHIS = pl.runDHIS(settingsDHIS,
                               settingsPipeline)
     dfNewStorage = pd.read_csv("OpenVAFiles/newStorage.csv")
     nPushed = sum(dfNewStorage["pipelineOutcome"] == "Pushed to DHIS2")
     self.assertEqual(nPushed, pipelineDHIS["nPostedRecords"])
     shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
     shutil.rmtree("DHIS/blobs")
 def test_runDHIS_2_postVA(self):
     """Post VA records to DHIS2."""
     dbFileName = "Pipeline.db"
     dbDirectory = "."
     dbKey = "enilepiP"
     useDHIS = True
     pl = Pipeline(dbFileName, dbDirectory,
                   dbKey, useDHIS)
     settings = pl.config()
     settingsPipeline = settings["pipeline"]
     settingsODK = settings["odk"]
     settingsOpenVA = settings["openVA"]
     settingsDHIS = settings["dhis"]
     odkBC = pl.runODK(settingsODK,
                       settingsPipeline)
     rOut = pl.runOpenVA(settingsOpenVA,
                         settingsPipeline,
                         settingsODK.odkID,
                         pl.pipelineRunDate)
     pipelineDHIS = pl.runDHIS(settingsDHIS,
                               settingsPipeline)
     postLog = pipelineDHIS["postLog"]
     checkLog = 'importSummaries' in postLog["response"].keys()
     self.assertTrue(checkLog)
     shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
     shutil.rmtree("DHIS/blobs/")
    def test_runODK_3(self):
        """Check mergeToPrevExport() includes all VA records from ODK BC export files."""

        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            os.remove("ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/previous_bc_export.csv", "ODKFiles/odkBCExportPrev.csv")
        shutil.copy("ODKFiles/another_bc_export.csv", "ODKFiles/odkBCExportNew.csv")

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)

        hasAll = True
        with open("ODKFiles/odkBCExportPrev.csv") as fCombined:
            fCombinedLines = fCombined.readlines()
        with open("ODKFiles/previous_bc_export.csv") as fPrevious:
            fPreviousLines = fPrevious.readlines()
        with open("ODKFiles/another_bc_export.csv") as fAnother:
            fAnotherLines = fAnother.readlines()
        for line in fPreviousLines:
            if line not in fCombinedLines:
                hasAll = False
        for line in fAnotherLines:
            if line not in fCombinedLines:
                hasAll = False
        self.assertTrue(hasAll)
        os.remove("ODKFiles/odkBCExportPrev.csv")
Esempio n. 20
0
    def setUpClass(cls):

        if os.path.isfile('OpenVAFiles/openVA_input.csv'):
            os.remove('OpenVAFiles/openVA_input.csv')
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/odkExport_prev_who_v151.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/odkExport_new_who_v151.csv',
                    'ODKFiles/odkBCExportNew.csv')
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        settings = pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        cls.rOut = pl.runOpenVA(settingsOpenVA, settingsPipeline,
                                settingsODK.odkID, pl.pipelineRunDate)
 def test_runOpenVA_2(self):
     """Check that runOpenVA() includes all records."""
     if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
         os.remove(self.dirOpenVA + "/openVA_input.csv")
     if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
         os.remove(self.dirODK + "/odkBCExportNew.csv")
     if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
         os.remove(self.dirODK + "/odkBCExportPrev.csv")
     shutil.copy(self.dirODK + "/previous_bc_export.csv",
                 self.dirODK + "/odkBCExportPrev.csv")
     shutil.copy(self.dirODK + "/another_bc_export.csv",
                 self.dirODK + "/odkBCExportNew.csv")
     pl = Pipeline(self.dbFileName, self.dbDirectory, self.dbKey, self.useDHIS)
     settings = pl.config()
     settingsPipeline = settings["pipeline"]
     settingsODK = settings["odk"]
     settingsOpenVA = settings["openVA"]
     settingsDHIS = settings["dhis"]
     rOut = pl.runOpenVA(settingsOpenVA,
                         settingsPipeline,
                         settingsODK.odkID,
                         pl.pipelineRunDate)
     hasAll = True
     with open("OpenVAFiles/openVA_input.csv") as fCombined:
         fCombinedLines = fCombined.readlines()
     with open("ODKFiles/previous_bc_export.csv") as fPrevious:
         fPreviousLines = fPrevious.readlines()
     with open("ODKFiles/another_bc_export.csv") as fAnother:
         fAnotherLines = fAnother.readlines()
     for line in fPreviousLines:
         if line not in fCombinedLines:
             hasAll = False
     for line in fAnotherLines:
         if line not in fCombinedLines:
             hasAll = False
     self.assertTrue(hasAll)
     shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
Esempio n. 22
0
    def setUpClass(cls):

        shutil.rmtree('DHIS/blobs/', ignore_errors=True)
        if os.path.isfile('OpenVAFiles/entityAttributeValue.csv'):
            os.remove('OpenVAFiles/entityAttributeValue.csv')
        if os.path.isfile('OpenVAFiles/recordStorage.csv'):
            os.remove('OpenVAFiles/recordStorage.csv')
        shutil.copy('OpenVAFiles/sampleEAV.csv',
                    'OpenVAFiles/entityAttributeValue.csv')
        shutil.copy('OpenVAFiles/sample_recordStorage.csv',
                    'OpenVAFiles/recordStorage.csv')
        shutil.copy('OpenVAFiles/sample_newStorage.csv',
                    'OpenVAFiles/newStorage.csv')
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        plRunDate = pl.pipelineRunDate
        settings = pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        cls.pipelineDHIS = pl.runDHIS(settingsDHIS, settingsPipeline)
Esempio n. 23
0
    def setUpClass(cls):

        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime('%Y-%m-%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName='copy_Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        conn = xferDB.connectDB()

        c = conn.cursor()
        sql = 'UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?'
        par = (
            'InSilicoVA',
            'InSilicoVA|1.1.4|InterVA|5|2016 WHO Verbal Autopsy Form|v1_4_1')
        c.execute(sql, par)
        sql = 'UPDATE InSilicoVA_Conf SET data_type = ?'
        par = ('WHO2016', )
        c.execute(sql, par)
        conn.commit()
        conn.close()
        cls.pl = Pipeline('copy_Pipeline.db', '.', 'enilepiP', True)
        settings = cls.pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/previous_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')
        shutil.copy('ODKFiles/another_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')

        if os.path.isfile('OpenVAFiles/recordStorage.csv'):
            os.remove('OpenVAFiles/recordStorage.csv')
        if os.path.isfile('OpenVAFiles/entityAttributeValue.csv'):
            os.remove('OpenVAFiles/entityAttributeValue.csv')

        cls.rOut = cls.pl.runOpenVA(settingsOpenVA, settingsPipeline,
                                    settingsODK.odkID, cls.pl.pipelineRunDate)
Esempio n. 24
0
    def setUpClass(cls):

        if not os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            shutil.copy('ODKFiles/previous_bc_export.csv',
                        'ODKFiles/odkBCExportPrev.csv')
        if not os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            shutil.copy('ODKFiles/another_bc_export.csv',
                        'ODKFiles/odkBCExportNew.csv')

        if not os.path.isfile('OpenVAFiles/openVA_input.csv'):
            shutil.copy('OpenVAFiles/sample_openVA_input.csv',
                        'OpenVAFiles/openVA_input.csv')
        if not os.path.isfile('OpenVAFiles/entityAttributeValue.csv'):
            shutil.copy('OpenVAFiles/sampleEAV.csv',
                        'OpenVAFiles/entityAttributeValue.csv')
        if not os.path.isfile('OpenVAFiles/recordStorage.csv'):
            shutil.copy('OpenVAFiles/sample_recordStorage.csv',
                        'OpenVAFiles/recordStorage.csv')
        if not os.path.isfile('OpenVAFiles/newStorage.csv'):
            shutil.copy('OpenVAFiles/sample_newStorage.csv',
                        'OpenVAFiles/newStorage.csv')

        os.makedirs('DHIS/blobs/', exist_ok=True)
        shutil.copy('OpenVAFiles/sample_newStorage.csv',
                    'DHIS/blobs/001-002-003.db')

        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime('%Y-%m-%d_%H:%M:%S')
        cls.pl = Pipeline('copy_Pipeline.db', '.', 'enilepiP', True)
        cls.pl.closePipeline()

        xferDB = TransferDB(dbFileName='copy_Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        cls.conn = xferDB.connectDB()
        cls.c = cls.conn.cursor()
 def test_runDHIS_1_vaProgramUID(self):
     """Verify VA program is installed."""
     dbFileName = "Pipeline.db"
     dbDirectory = "."
     dbKey = "enilepiP"
     useDHIS = True
     pl = Pipeline(dbFileName, dbDirectory,
                   dbKey, useDHIS)
     settings = pl.config()
     settingsPipeline = settings["pipeline"]
     settingsODK = settings["odk"]
     settingsOpenVA = settings["openVA"]
     settingsDHIS = settings["dhis"]
     odkBC = pl.runODK(settingsODK,
                       settingsPipeline)
     rOut = pl.runOpenVA(settingsOpenVA,
                         settingsPipeline,
                         settingsODK.odkID,
                         pl.pipelineRunDate)
     pipelineDHIS = pl.runDHIS(settingsDHIS,
                               settingsPipeline)
     self.assertEqual(pipelineDHIS["vaProgramUID"], "sv91bCroFFx")
     shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
     shutil.rmtree("DHIS/blobs/")
Esempio n. 26
0
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')
        cls.pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
class Check_Pipeline_runOpenVA_InterVA(unittest.TestCase):
    """Check runOpenVA method runs InterVA"""

    dbFileName = "copy_Pipeline.db"
    dbKey = "enilepiP"
    dbDirectory = "."
    nowDate = datetime.datetime.now()
    pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")

    xferDB = TransferDB(dbFileName = "copy_Pipeline.db",
                        dbDirectory = dbDirectory,
                        dbKey = dbKey,
                        plRunDate = pipelineRunDate)
    conn = xferDB.connectDB()

    c = conn.cursor()
    sql = "UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?"
    par = ("InterVA", "InterVA4|4.04|InterVA|5|2016 WHO Verbal Autopsy Form|v1_4_1")
    c.execute(sql, par)
    conn.commit()
    conn.close()
    pl = Pipeline(dbFileName,
                  dbDirectory,
                  dbKey,
                  True)
    settings = pl.config()
    settingsPipeline = settings["pipeline"]
    settingsODK = settings["odk"]
    settingsOpenVA = settings["openVA"]
    settingsDHIS = settings["dhis"]

    dirOpenVA = os.path.join(settingsPipeline.workingDirectory, "OpenVAFiles")
    dirODK = os.path.join(settingsPipeline.workingDirectory, "ODKFiles")
    shutil.rmtree(
        os.path.join(dirOpenVA, pl.pipelineRunDate),
        ignore_errors = True
    )
    if os.path.isfile("OpenVAFiles/recordStorage.csv"):
        os.remove("OpenVAFiles/recordStorage.csv")

    rOut = pl.runOpenVA(settingsOpenVA,
                        settingsPipeline,
                        settingsODK.odkID,
                        pl.pipelineRunDate)

    def test_runOpenVA_InterVA_1(self):
        """Check that runOpenVA() creates an R script for InterVA."""
        rScriptFile = os.path.join(self.dirOpenVA,
                                   self.pl.pipelineRunDate,
                                   "Rscript_" + self.pl.pipelineRunDate + ".R")
        self.assertTrue(os.path.isfile(rScriptFile))

    def test_runOpenVA_InterVA_2(self):
        """Check that runOpenVA() runs R script for InterVA."""
        rScriptFile = os.path.join(self.dirOpenVA,
                                   self.pl.pipelineRunDate,
                                   "Rscript_" + self.pl.pipelineRunDate + ".Rout")
        self.assertTrue(os.path.isfile(rScriptFile))

    def test_runOpenVA_InterVA_3(self):
        """Check that runOpenVA() creates resuls file for InterVA script."""
        rScriptFile = os.path.join(self.dirOpenVA,
                                   self.pl.pipelineRunDate,
                                   "Rscript_" + self.pl.pipelineRunDate + ".R")
        self.assertTrue(os.path.isfile(rScriptFile))
        shutil.rmtree("OpenVAFiles/" + self.pl.pipelineRunDate)
    def test_cleanPipeline_rmFiles(self):
        """Test file removal."""
        if not os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            shutil.copy("ODKFiles/previous_bc_export.csv",
                        "ODKFiles/odkBCExportPrev.csv")
        if not os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            shutil.copy("ODKFiles/another_bc_export.csv",
                        "ODKFiles/odkBCExportNew.csv")

        if not os.path.isfile("OpenVAFiles/openVA_input.csv"):
            shutil.copy("OpenVAFiles/sample_openVA_input.csv",
                        "OpenVAFiles/openVA_input.csv")
        if not os.path.isfile("OpenVAFiles/entityAttributeValue.csv"):
            shutil.copy("OpenVAFiles/sampleEAV.csv",
                        "OpenVAFiles/entityAttributeValue.csv")
        if not os.path.isfile("OpenVAFiles/recordStorage.csv"):
            shutil.copy("OpenVAFiles/sample_recordStorage.csv",
                        "OpenVAFiles/recordStorage.csv")
        if not os.path.isfile("OpenVAFiles/newStorage.csv"):
            shutil.copy("OpenVAFiles/sample_newStorage.csv",
                        "OpenVAFiles/newStorage.csv")

        os.makedirs("DHIS/blobs/", exist_ok = True)
        shutil.copy("OpenVAFiles/sample_newStorage.csv",
                    "DHIS/blobs/001-002-003.db")

        dbFileName = "copy_Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        pl.closePipeline()
        fileExist = False
        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            fileExist = True
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            fileExist = True
        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            fileExist = True
        if os.path.isfile("OpenVAFiles/openVA_input.csv"):
            fileExist = True
        if os.path.isfile("OpenVAFiles/entityAttributeValue.csv"):
            fileExist = True
        if os.path.isfile("OpenVAFiles/recordStorage.csv"):
            fileExist = True
        if os.path.isfile("OpenVAFiles/newStorage.csv"):
            fileExist = True
        if os.path.isfile("DHIS/blobs/001-002-003.db"):
            fileExist = True
        self.assertFalse(fileExist)
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("UPDATE ODK_Conf SET odkLastRun = '1900-01-01_00:00:01';")
        conn.commit()
        conn.close()
Esempio n. 29
0
class Check_runODK_with_exports(unittest.TestCase):
    """Check runODK method with existing ODK exports:"""
    @classmethod
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

    def setUp(self):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/previous_bc_export.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/another_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')
        self.old_mtimePrev = os.path.getmtime('ODKFiles/odkBCExportPrev.csv')
        self.old_mtimeNew = os.path.getmtime('ODKFiles/odkBCExportNew.csv')
        if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'):
            downloadBriefcase()
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')
        self.dbFileName = 'Pipeline.db'
        self.dbDirectory = '.'
        self.dbKey = 'enilepiP'
        self.useDHIS = True
        self.pl = Pipeline(self.dbFileName, self.dbDirectory, self.dbKey,
                           self.useDHIS)
        settings = self.pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']
        self.odkBC = self.pl.runODK(settingsODK, settingsPipeline)
        self.new_mtimePrev = os.path.getmtime('ODKFiles/odkBCExportPrev.csv')
        self.new_mtimeNew = os.path.getmtime('ODKFiles/odkBCExportNew.csv')

    def tearDown(self):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')

    def test_runODK_returncode_with_previous_exports(self):
        """Check returncode with valid parameters:"""

        self.assertEqual(0, self.odkBC.returncode)

    def test_runODK_exportPrev_with_previous_exports(self):
        """Check modification time on odkBCExportPrev with previous exports:"""

        self.assertTrue(self.new_mtimePrev > self.old_mtimePrev)

    def test_runODK_exportNew_with_previous_exports(self):
        """Check modification time on odkBCExportNew:"""

        self.assertTrue(self.new_mtimeNew > self.old_mtimeNew)

    def test_runODK_mergeToPrevExport_with_previous_exports(self):
        """Check mergeToPrevExport() keeps all records from BC export files:"""

        hasAll = True
        with open('ODKFiles/odkBCExportPrev.csv') as fCombined:
            fCombinedLines = fCombined.readlines()
        with open('ODKFiles/previous_bc_export.csv') as fPrevious:
            fPreviousLines = fPrevious.readlines()
        with open('ODKFiles/another_bc_export.csv') as fAnother:
            fAnotherLines = fAnother.readlines()
        for line in fPreviousLines:
            if line not in fCombinedLines:
                hasAll = False
        for line in fAnotherLines:
            if line not in fCombinedLines:
                hasAll = False
        self.assertTrue(hasAll)

    @classmethod
    def tearDownClass(cls):

        os.remove('Pipeline.db')
Esempio n. 30
0
class Check_storeResultsDB(unittest.TestCase):
    """Check storeResultsDB method marks duplicate records:"""
    @classmethod
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

    def setUp(self):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        shutil.rmtree('DHIS/blobs/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'):
            downloadBriefcase()
        self.pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        self.settings = self.pl.config()
        self.settingsPipeline = self.settings['pipeline']
        self.settingsODK = self.settings['odk']
        self.settingsOpenVA = self.settings['openVA']
        self.settingsDHIS = self.settings['dhis']

        self.xferDB = TransferDB(dbFileName='Pipeline.db',
                                 dbDirectory='.',
                                 dbKey='enilepiP',
                                 plRunDate=True)
        self.conn = self.xferDB.connectDB()
        self.c = self.conn.cursor()
        self.c.execute('DELETE FROM EventLog;')
        self.conn.commit()
        self.c.execute('DELETE FROM VA_Storage;')
        self.conn.commit()
        self.odkBC = self.pl.runODK(self.settingsODK, self.settingsPipeline)

    def test_runODK_checkDuplicates(self):
        """Check checkDuplicates() method:"""

        vaRecords = read_csv('ODKFiles/odkBCExportNew.csv')
        nVA = vaRecords.shape[0]
        rOut = self.pl.runOpenVA(self.settingsOpenVA, self.settingsPipeline,
                                 self.settingsODK.odkID,
                                 self.pl.pipelineRunDate)
        pipelineDHIS = self.pl.runDHIS(self.settingsDHIS,
                                       self.settingsPipeline)
        self.pl.storeResultsDB()
        os.remove('ODKFiles/odkBCExportNew.csv')
        os.remove('OpenVAFiles/pycrossva_input.csv')
        os.remove('OpenVAFiles/openVA_input.csv')
        odkBC2 = self.pl.runODK(self.settingsODK, self.settingsPipeline)
        self.c.execute('SELECT eventDesc FROM EventLog;')
        query = self.c.fetchall()
        nDuplicates = [i[0] for i in query if 'duplicate' in i[0]]
        self.assertEqual(len(nDuplicates), nVA)

    def tearDown(self):

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.rmtree('DHIS/blobs/', ignore_errors=True)
        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        self.conn.close()

    @classmethod
    def tearDownClass(cls):

        os.remove('Pipeline.db')