コード例 #1
0
    def test_cleanPipeline_odkLastRun(self):
        """Test update of ODK_Conf.odkLastRun."""

        os.makedirs("DHIS/blobs/", exist_ok = True)
        dbFileName = "copy_Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        pl = Pipeline(dbFileName, dbDirectory,
                      dbKey, useDHIS)
        pl.closePipeline()

        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("SELECT odkLastRun FROM ODK_Conf;")
        sqlQuery = c.fetchone()
        results = [i for i in sqlQuery]
        self.assertEqual(results[0], pipelineRunDate)
        c.execute("UPDATE ODK_Conf SET odkLastRun = '1900-01-01_00:00:01';")
        conn.commit()
        conn.close()
コード例 #2
0
    def setUp(self):
        
        staticRunDate = datetime.datetime(
            2018, 9, 1, 9, 0, 0).strftime('%Y_%m_%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName='copy_Pipeline.db',
                                dbDirectory='.',
                                dbKey='enilepiP',
                                plRunDate=staticRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        algorithm = 'InterVA'
        sql = 'UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?'
        par = ('InterVA', 'InterVA4|4.04|Custom|1|2016 WHO Verbal Autopsy Form|v1_4_1')
        c.execute(sql, par)
        sql = 'UPDATE InterVA_Conf SET version = ?'
        par = ('4',)
        c.execute(sql, par)
        settingsPipeline = xferDB.configPipeline(conn)
        settingsAlgorithm = xferDB.configOpenVA(conn,
                                                algorithm,
                                                settingsPipeline.workingDirectory)

        self.rOpenVA = OpenVA(vaArgs=settingsAlgorithm,
                              pipelineArgs=settingsPipeline,
                              odkID=None,
                              runDate=staticRunDate)
        zeroRecords = self.rOpenVA.copyVA()
        self.rOpenVA.rScript()
        conn.rollback()
        conn.close()
コード例 #3
0
    def setUp(self):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        shutil.rmtree('DHIS/blobs/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'):
            downloadBriefcase()
        self.pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        self.settings = self.pl.config()
        self.settingsPipeline = self.settings['pipeline']
        self.settingsODK = self.settings['odk']
        self.settingsOpenVA = self.settings['openVA']
        self.settingsDHIS = self.settings['dhis']

        self.xferDB = TransferDB(dbFileName='Pipeline.db',
                                 dbDirectory='.',
                                 dbKey='enilepiP',
                                 plRunDate=True)
        self.conn = self.xferDB.connectDB()
        self.c = self.conn.cursor()
        self.c.execute('DELETE FROM EventLog;')
        self.conn.commit()
        self.c.execute('DELETE FROM VA_Storage;')
        self.conn.commit()
        self.odkBC = self.pl.runODK(self.settingsODK, self.settingsPipeline)
コード例 #4
0
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')
        if os.path.isfile('who_cod.R'):
            os.remove('who_cod.R')
        # if os.path.isfile('tariff_cod.py'):
        #     os.remove('tariff_cod.py')

        pipelineRunDate = datetime.datetime(2018, 9, 1, 9, 0,
                                            0).strftime('%Y_%m_%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName='Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        conn = xferDB.connectDB()
        cls.cod_who = xferDB.configDHIS(conn, 'InSilicoVA')
        cls.cod_tariff = xferDB.configDHIS(conn, 'SmartVA')

        with open("who_cod.R", "w", newline="") as f:
            f.write("data(causetextV5, package='InterVA5')\n")
            f.write(
                "write.csv(causetextV5, file='who_cod.csv', row.names=FALSE)\n"
            )
        rArgs = ["R", "CMD", "BATCH", "--vanilla", "who_cod.R"]
        subprocess.run(args=rArgs,
                       stdin=subprocess.PIPE,
                       stdout=subprocess.PIPE,
                       stderr=subprocess.PIPE,
                       check=True)
        who = read_csv("who_cod.csv", index_col=0)
        index_who_causes = [i for i in who.index if "b_" in i]
        cls.who_causes = who.loc[index_who_causes, who.columns[0]].tolist()
コード例 #5
0
    def setUpClass(cls):

        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime('%Y-%m-%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName='copy_Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        conn = xferDB.connectDB()

        c = conn.cursor()
        sql = 'UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?'
        par = (
            'InSilicoVA',
            'InSilicoVA|1.1.4|InterVA|5|2016 WHO Verbal Autopsy Form|v1_4_1')
        c.execute(sql, par)
        sql = 'UPDATE InSilicoVA_Conf SET data_type = ?'
        par = ('WHO2016', )
        c.execute(sql, par)
        conn.commit()
        conn.close()
        cls.pl = Pipeline('copy_Pipeline.db', '.', 'enilepiP', True)
        settings = cls.pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/previous_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')
        shutil.copy('ODKFiles/another_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')

        if os.path.isfile('OpenVAFiles/recordStorage.csv'):
            os.remove('OpenVAFiles/recordStorage.csv')
        if os.path.isfile('OpenVAFiles/entityAttributeValue.csv'):
            os.remove('OpenVAFiles/entityAttributeValue.csv')

        cls.rOut = cls.pl.runOpenVA(settingsOpenVA, settingsPipeline,
                                    settingsODK.odkID, cls.pl.pipelineRunDate)
コード例 #6
0
    def setUp(self):
        
        staticRunDate = datetime.datetime(
            2018, 9, 1, 9, 0, 0).strftime('%Y_%m_%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName='copy_Pipeline.db',
                                dbDirectory='.',
                                dbKey='enilepiP',
                                plRunDate=staticRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        algorithm = 'SmartVA'
        sql = 'UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?'
        par = ('SmartVA', 'SmartVA|2.0.0_a8|PHMRCShort|1|PHMRCShort|1')
        c.execute(sql, par)
        ntSmartVA = collections.namedtuple("ntSmartVA",
                                           ["SmartVA_country",
                                            "SmartVA_hiv",
                                            "SmartVA_malaria",
                                            "SmartVA_hce",
                                            "SmartVA_freetext",
                                            "SmartVA_figures",
                                            "SmartVA_language"])
        settingsAlgorithm = ntSmartVA("Unknown",
                                      "Wrong",
                                      "Wrong",
                                      "Wrong",
                                      "Wrong",
                                      "Wrong",
                                      "Wrong")
        settingsPipeline = xferDB.configPipeline(conn)

        self.rOpenVA = OpenVA(vaArgs=settingsAlgorithm,
                              pipelineArgs=settingsPipeline,
                              odkID=None,
                              runDate=staticRunDate)
        zeroRecords = self.rOpenVA.copyVA()
        conn.rollback()
        conn.close()
コード例 #7
0
class Check_DHIS(unittest.TestCase):
    """Check the everything works as it should."""

    shutil.rmtree("DHIS2/blobs/", ignore_errors=True)
    shutil.copy("OpenVAFiles/sampleEAV.csv",
                "OpenVAFiles/entityAttributeValue.csv")
    shutil.copy("OpenVAFiles/sample_recordStorage.csv",
                "OpenVAFiles/recordStorage.csv")
    # Define valid parameters for SwissTPH DHIS2 Server.
    dirOpenVA = "OpenVAFiles"
    dhisURL = "https://va30se.swisstph-mis.ch"
    # dhisURL = "https://va25.swisstph-mis.ch"
    dhisUser = "******"
    dhisPassword = "******"
    dhisOrgUnit = "SCVeBskgiK6"

    # parameters for connecting to DB (assuming DB is in tests folder)
    dbFileName = "Pipeline.db"
    dbKey = "enilepiP"
    wrong_dbKey = "wrongKey"
    # dbDirectory = os.path.abspath(os.path.dirname(__file__))
    dbDirectory = "."
    pipelineRunDate = datetime.datetime.now()

    xferDB = TransferDB(dbFileName=dbFileName,
                        dbDirectory=dbDirectory,
                        dbKey=dbKey,
                        plRunDate=pipelineRunDate)
    conn = xferDB.connectDB()
    settingsDHIS = xferDB.configDHIS(conn, "InSilicoVA")

    pipelineDHIS = dhis.DHIS(settingsDHIS, ".")
    apiDHIS = pipelineDHIS.connect()
    postLog = pipelineDHIS.postVA(apiDHIS)

    def test_DHIS_1_vaProgramUID(self):
        """Verify VA program is installed."""
        self.assertEqual(self.pipelineDHIS.vaProgramUID, "sv91bCroFFx")

    def test_DHIS_2_postVA(self):
        """Post VA records to DHIS2."""
        checkLog = 'importSummaries' in self.postLog['response'].keys()
        self.assertTrue(checkLog)

    def test_DHIS_3_verifyPost(self):
        """Verify VA records got posted to DHIS2."""
        self.pipelineDHIS.verifyPost(self.postLog, self.apiDHIS)
        dfNewStorage = pd.read_csv("OpenVAFiles/newStorage.csv")
        nPushed = sum(dfNewStorage['pipelineOutcome'] == "Pushed to DHIS2")
        self.assertEqual(nPushed, self.pipelineDHIS.nPostedRecords)
コード例 #8
0
    def setUpClass(cls):

        if not os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            shutil.copy('ODKFiles/previous_bc_export.csv',
                        'ODKFiles/odkBCExportPrev.csv')
        if not os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            shutil.copy('ODKFiles/another_bc_export.csv',
                        'ODKFiles/odkBCExportNew.csv')

        if not os.path.isfile('OpenVAFiles/openVA_input.csv'):
            shutil.copy('OpenVAFiles/sample_openVA_input.csv',
                        'OpenVAFiles/openVA_input.csv')
        if not os.path.isfile('OpenVAFiles/entityAttributeValue.csv'):
            shutil.copy('OpenVAFiles/sampleEAV.csv',
                        'OpenVAFiles/entityAttributeValue.csv')
        if not os.path.isfile('OpenVAFiles/recordStorage.csv'):
            shutil.copy('OpenVAFiles/sample_recordStorage.csv',
                        'OpenVAFiles/recordStorage.csv')
        if not os.path.isfile('OpenVAFiles/newStorage.csv'):
            shutil.copy('OpenVAFiles/sample_newStorage.csv',
                        'OpenVAFiles/newStorage.csv')

        os.makedirs('DHIS/blobs/', exist_ok=True)
        shutil.copy('OpenVAFiles/sample_newStorage.csv',
                    'DHIS/blobs/001-002-003.db')

        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime('%Y-%m-%d_%H:%M:%S')
        cls.pl = Pipeline('copy_Pipeline.db', '.', 'enilepiP', True)
        cls.pl.closePipeline()

        xferDB = TransferDB(dbFileName='copy_Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        cls.conn = xferDB.connectDB()
        cls.c = cls.conn.cursor()
コード例 #9
0
    def setUpClass(cls):

        shutil.rmtree('DHIS/blobs/', ignore_errors = True)
        shutil.copy('OpenVAFiles/sampleEAV.csv',
                    'OpenVAFiles/entityAttributeValue.csv')
        shutil.copy('OpenVAFiles/sample_recordStorage.csv',
                    'OpenVAFiles/recordStorage.csv')
        # Define valid parameters for SwissTPH DHIS2 Server.
        dirOpenVA = 'OpenVAFiles'
        dhisURL = 'https://va30se.swisstph-mis.ch'
        # dhisURL = 'https://va25.swisstph-mis.ch'
        dhisUser = '******'
        dhisPassword = '******'
        dhisOrgUnit = 'SCVeBskgiK6'

        # parameters for connecting to DB (assuming DB is in tests folder)
        dbFileName = 'Pipeline.db'
        dbKey = 'enilepiP'
        wrong_dbKey = 'wrongKey'
        # dbDirectory = os.path.abspath(os.path.dirname(__file__))
        dbDirectory = '.'
        if not os.path.isfile('Pipeline.db'):
            createTransferDB(dbFileName, dbDirectory, dbKey)
        pipelineRunDate = datetime.datetime.now()

        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        settingsDHIS = xferDB.configDHIS(conn, 'InSilicoVA')

        cls.pipelineDHIS = dhis.DHIS(settingsDHIS, '.')
        apiDHIS = cls.pipelineDHIS.connect()
        cls.postLog = cls.pipelineDHIS.postVA(apiDHIS)
        cls.pipelineDHIS.verifyPost(cls.postLog, apiDHIS)
コード例 #10
0
    def setUpClass(cls):

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/odkExport_phmrc-1.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/odkExport_phmrc-2.csv',
                    'ODKFiles/odkBCExportNew.csv')
        if not os.path.isfile('smartva'):
            downloadSmartVA()
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

        # pipelineRunDate = datetime.datetime.now()
        pipelineRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                            strftime('%Y_%m_%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName = 'copy_Pipeline.db',
                            dbDirectory = '.',
                            dbKey = 'enilepiP',
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        sql = 'UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?'
        par = ('SmartVA', 'SmartVA|2.0.0_a8|PHMRCShort|1|PHMRCShort|1')
        c.execute(sql, par)
        settingsPipeline = xferDB.configPipeline(conn)
        settingsODK = xferDB.configODK(conn)
        settingsSmartVA = xferDB.configOpenVA(conn,
                                              'SmartVA',
                                              settingsPipeline.workingDirectory)
        conn.rollback()
        conn.close()
        cls.staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                            strftime('%Y_%m_%d_%H:%M:%S')
        shutil.rmtree(
            os.path.join('OpenVAFiles', cls.staticRunDate),
            ignore_errors = True
        )
        cliSmartVA = OpenVA(vaArgs = settingsSmartVA,
                            pipelineArgs = settingsPipeline,
                            odkID = settingsODK.odkID,
                            runDate = cls.staticRunDate)
        zeroRecords = cliSmartVA.copyVA()
        cls.completed = cliSmartVA.getCOD()
        cls.svaOut = os.path.join(
            'OpenVAFiles',
            cls.staticRunDate,
            '1-individual-cause-of-death/individual-cause-of-death.csv'
        )
コード例 #11
0
    def setUpClass(cls):

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/previous_bc_export.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/another_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('Check_InSilicoVA_Pipeline.db'):
            os.remove('Check_InSilicoVA_Pipeline.db')
        createTransferDB('Check_InSilicoVA_Pipeline.db', '.', 'enilepiP')

        # pipelineRunDate = datetime.datetime.now()
        pipelineRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                            strftime('%Y_%m_%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName = 'Check_InSilicoVA_Pipeline.db',
                            dbDirectory = '.',
                            dbKey = 'enilepiP',
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        sql = 'UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?'
        par = ('InSilicoVA', 'InSilicoVA-2016|1.0.0|InterVA|5|2016 WHO Verbal Autopsy Form|v1_4_1')
        c.execute(sql, par)
        sql = 'UPDATE InSilicoVA_Conf SET data_type = ?'
        par = ('WHO2016',)
        c.execute(sql, par)
        settingsPipeline = xferDB.configPipeline(conn)
        settingsODK = xferDB.configODK(conn)
        settingsInSilicoVA = xferDB.configOpenVA(conn,
                                                 'InSilicoVA',
                                                 settingsPipeline.workingDirectory)
        # conn.rollback()
        conn.close()
        cls.staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                            strftime('%Y_%m_%d_%H:%M:%S')
 
        cls.rScript = os.path.join('OpenVAFiles', cls.staticRunDate,
                                   'Rscript_' + cls.staticRunDate + '.R')
        cls.rOutFile = os.path.join('OpenVAFiles', cls.staticRunDate,
                                    'Rscript_' + cls.staticRunDate + '.Rout')
        rOpenVA = OpenVA(vaArgs = settingsInSilicoVA,
                         pipelineArgs = settingsPipeline,
                         odkID = settingsODK.odkID,
                         runDate = cls.staticRunDate)
        zeroRecords = rOpenVA.copyVA()
        rOpenVA.rScript()
        cls.completed = rOpenVA.getCOD()
コード例 #12
0
    def test_storeVA(self):
        """Check that depositResults() stores VA records in Transfer DB."""
        shutil.copy("OpenVAFiles/sample_newStorage.csv",
                    "OpenVAFiles/newStorage.csv")

        dbFileName = "Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("DELETE FROM VA_Storage;")
        conn.commit()
        conn.close()

        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]

        pl.storeResultsDB()
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        sql = "SELECT id FROM VA_Storage"
        c.execute(sql)
        vaIDs = c.fetchall()
        conn.close()
        vaIDsList = [j for i in vaIDs for j in i]
        s1 = set(vaIDsList)
        dfNewStorage = pd.read_csv("OpenVAFiles/newStorage.csv")
        dfNewStorageID = dfNewStorage["odkMetaInstanceID"]
        s2 = set(dfNewStorageID)
        self.assertTrue(s2.issubset(s1))
コード例 #13
0
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')
        if os.path.isfile('OpenVAFiles/newStorage.csv'):
            os.remove('OpenVAFiles/newStorage.csv')
        shutil.copy('OpenVAFiles/sample_newStorage.csv',
                    'OpenVAFiles/newStorage.csv')
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime('%Y-%m-%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName='Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute('DELETE FROM VA_Storage;')
        conn.commit()
        conn.close()
        pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        settings = pl.config()
        settingsPipeline = settings['pipeline']
        settingsODK = settings['odk']
        settingsOpenVA = settings['openVA']
        settingsDHIS = settings['dhis']

        pl.storeResultsDB()

        xferDB = TransferDB(dbFileName='Pipeline.db',
                            dbDirectory='.',
                            dbKey='enilepiP',
                            plRunDate=pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        sql = 'SELECT id FROM VA_Storage'
        c.execute(sql)
        vaIDs = c.fetchall()
        conn.close()
        vaIDsList = [j for i in vaIDs for j in i]
        cls.s1 = set(vaIDsList)
        dfNewStorage = read_csv('OpenVAFiles/newStorage.csv')
        dfNewStorageID = dfNewStorage['odkMetaInstanceID']
        cls.s2 = set(dfNewStorageID)
コード例 #14
0
    def setUpClass(cls):

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/zeroRecords_bc_export.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/zeroRecords_bc_export.csv',
                    'ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('OpenVAFiles/openVA_input.csv'):
            os.remove('OpenVAFiles/openVA_input.csv')
        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

        # pipelineRunDate = datetime.datetime.now()
        pipelineRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                            strftime('%Y_%m_%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName = 'Pipeline.db',
                            dbDirectory = '.',
                            dbKey = 'enilepiP',
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        settingsPipeline = xferDB.configPipeline(conn)
        settingsODK = xferDB.configODK(conn)
        settingsInterVA = xferDB.configOpenVA(conn,
                                              'InterVA',
                                              settingsPipeline.workingDirectory)
        cls.staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                            strftime('%Y_%m_%d_%H:%M:%S')

        shutil.rmtree(
            os.path.join('OpenVAFiles', cls.staticRunDate),
            ignore_errors = True
        )

        rOpenVA = OpenVA(vaArgs = settingsInterVA,
                         pipelineArgs = settingsPipeline,
                         odkID = settingsODK.odkID,
                         runDate = cls.staticRunDate)
        cls.zeroRecords = rOpenVA.copyVA()
コード例 #15
0
class Check_3_getCOD(unittest.TestCase):

    dbFileName = "Pipeline.db"
    dbKey = "enilepiP"
    # dbDirectory = os.path.abspath(os.path.dirname(__file__))
    dbDirectory = "."
    pipelineRunDate = datetime.datetime.now()
    dirODK = "ODKFiles"
    dirOpenVA = "OpenVAFiles"
    xferDB = TransferDB(dbFileName=dbFileName,
                        dbDirectory=dbDirectory,
                        dbKey=dbKey,
                        plRunDate=pipelineRunDate)
    conn = xferDB.connectDB()

    settingsPipeline = xferDB.configPipeline(conn)
    settingsODK = xferDB.configODK(conn)
    settingsInterVA = xferDB.configOpenVA(conn, "InterVA",
                                          settingsPipeline.workingDirectory)
    staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                    strftime("%Y_%m_%d_%H:%M:%S")

    def test_3_getCOD_insilico(self):
        """Check that getCOD() executes R script for insilico"""
        c = self.conn.cursor()
        sql = "UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?"
        par = ("InSilicoVA",
               "InSilicoVA|1.1.4|Custom|1|2016 WHO Verbal Autopsy Form|v1_4_1")
        c.execute(sql, par)
        settingsPipeline = self.xferDB.configPipeline(self.conn)
        settingsODK = self.xferDB.configODK(self.conn)
        settingsInSilicoVA = self.xferDB.configOpenVA(
            self.conn, "InSilicoVA", settingsPipeline.workingDirectory)
        self.conn.rollback()
        dirOpenVA = os.path.join(settingsPipeline.workingDirectory,
                                 "OpenVAFiles")
        dirODK = os.path.join(settingsPipeline.workingDirectory, "ODKFiles")
        staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                        strftime("%Y_%m_%d_%H:%M:%S")
        rOutFile = os.path.join(dirOpenVA, staticRunDate,
                                "Rscript_" + staticRunDate + ".Rout")
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)
        rOpenVA = OpenVA(vaArgs=settingsInSilicoVA,
                         pipelineArgs=settingsPipeline,
                         odkID=settingsODK.odkID,
                         runDate=staticRunDate)

        if os.path.isfile(dirODK + "/odkBCExportNew.csv"):
            os.remove(dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(dirODK + "/odkBCExportPrev.csv"):
            os.remove(dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/previous_bc_export.csv",
                    dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/another_bc_export.csv",
                    dirODK + "/odkBCExportNew.csv")

        zeroRecords = rOpenVA.copyVA()
        rOpenVA.rScript()
        rOpenVA.getCOD()

        self.assertTrue(os.path.isfile(rOutFile))
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)

    def test_3_getCOD_insilico_exception(self):
        """getCOD() raises exception with faulty R script for InSilicoVA."""
        c = self.conn.cursor()
        sql = "UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?"
        par = ("InSilicoVA",
               "InSilicoVA|1.1.4|Custom|1|2016 WHO Verbal Autopsy Form|v1_4_1")
        c.execute(sql, par)
        settingsPipeline = self.xferDB.configPipeline(self.conn)
        settingsODK = self.xferDB.configODK(self.conn)
        settingsInSilicoVA = self.xferDB.configOpenVA(
            self.conn, "InSilicoVA", settingsPipeline.workingDirectory)
        self.conn.rollback()
        dirOpenVA = os.path.join(settingsPipeline.workingDirectory,
                                 "OpenVAFiles")
        dirODK = os.path.join(settingsPipeline.workingDirectory, "ODKFiles")
        staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                        strftime("%Y_%m_%d_%H:%M:%S")
        rOutFile = os.path.join(dirOpenVA, staticRunDate,
                                "Rscript_" + staticRunDate + ".Rout")
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)
        rOpenVA = OpenVA(vaArgs=settingsInSilicoVA,
                         pipelineArgs=settingsPipeline,
                         odkID="this should raise an exception",
                         runDate=staticRunDate)

        if os.path.isfile(dirODK + "/odkBCExportNew.csv"):
            os.remove(dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(dirODK + "/odkBCExportPrev.csv"):
            os.remove(dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/previous_bc_export.csv",
                    dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/another_bc_export.csv",
                    dirODK + "/odkBCExportNew.csv")

        zeroRecords = rOpenVA.copyVA()
        rOpenVA.rScript()

        self.assertRaises(OpenVAError, rOpenVA.getCOD)
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)

    def test_3_getCOD_interva(self):
        """Check that getCOD() executes R script for interva"""
        c = self.conn.cursor()
        sql = "UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?"
        par = ("InterVA",
               "InterVA4|4.04|Custom|1|2016 WHO Verbal Autopsy Form|v1_4_1")
        c.execute(sql, par)
        settingsPipeline = self.xferDB.configPipeline(self.conn)
        settingsODK = self.xferDB.configODK(self.conn)
        settingsInterVA = self.xferDB.configOpenVA(
            self.conn, "InterVA", settingsPipeline.workingDirectory)
        self.conn.rollback()
        dirOpenVA = os.path.join(settingsPipeline.workingDirectory,
                                 "OpenVAFiles")
        dirODK = os.path.join(settingsPipeline.workingDirectory, "ODKFiles")
        staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                        strftime("%Y_%m_%d_%H:%M:%S")
        rOutFile = os.path.join(dirOpenVA, staticRunDate,
                                "Rscript_" + staticRunDate + ".Rout")
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)
        rOpenVA = OpenVA(vaArgs=settingsInterVA,
                         pipelineArgs=settingsPipeline,
                         odkID=settingsODK.odkID,
                         runDate=staticRunDate)

        if os.path.isfile(dirODK + "/odkBCExportNew.csv"):
            os.remove(dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(dirODK + "/odkBCExportPrev.csv"):
            os.remove(dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/previous_bc_export.csv",
                    dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/another_bc_export.csv",
                    dirODK + "/odkBCExportNew.csv")

        zeroRecords = rOpenVA.copyVA()
        rOpenVA.rScript()
        rOpenVA.getCOD()

        self.assertTrue(os.path.isfile(rOutFile))
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)

    def test_3_getCOD_interva_exception(self):
        """getCOD() should raise an exception with problematic interva R script."""
        c = self.conn.cursor()
        sql = "UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?"
        par = ("InterVA",
               "InterVA4|4.04|Custom|1|2016 WHO Verbal Autopsy Form|v1_4_1")
        c.execute(sql, par)
        settingsPipeline = self.xferDB.configPipeline(self.conn)
        settingsODK = self.xferDB.configODK(self.conn)
        settingsInterVA = self.xferDB.configOpenVA(
            self.conn, "InterVA", settingsPipeline.workingDirectory)
        self.conn.rollback()
        dirOpenVA = os.path.join(settingsPipeline.workingDirectory,
                                 "OpenVAFiles")
        dirODK = os.path.join(settingsPipeline.workingDirectory, "ODKFiles")
        staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                        strftime("%Y_%m_%d_%H:%M:%S")
        rOutFile = os.path.join(dirOpenVA, staticRunDate,
                                "Rscript_" + staticRunDate + ".Rout")
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)
        rOpenVA = OpenVA(vaArgs=settingsInterVA,
                         pipelineArgs=settingsPipeline,
                         odkID="this should raise an exception",
                         runDate=staticRunDate)

        if os.path.isfile(dirODK + "/odkBCExportNew.csv"):
            os.remove(dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(dirODK + "/odkBCExportPrev.csv"):
            os.remove(dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/previous_bc_export.csv",
                    dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/another_bc_export.csv",
                    dirODK + "/odkBCExportNew.csv")

        zeroRecords = rOpenVA.copyVA()
        rOpenVA.rScript()

        self.assertRaises(OpenVAError, rOpenVA.getCOD)
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)

    def test_3_getCOD_smartva(self):
        """Check that getCOD() executes smartva cli"""
        c = self.conn.cursor()
        sql = "UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?"
        par = ("SmartVA", "SmartVA|2.0.0_a8|PHMRCShort|1|PHMRCShort|1")
        c.execute(sql, par)
        settingsPipeline = self.xferDB.configPipeline(self.conn)
        settingsODK = self.xferDB.configODK(self.conn)
        settingsSmartVA = self.xferDB.configOpenVA(
            self.conn, "SmartVA", settingsPipeline.workingDirectory)
        self.conn.rollback()
        dirOpenVA = os.path.join(settingsPipeline.workingDirectory,
                                 "OpenVAFiles")
        dirODK = os.path.join(settingsPipeline.workingDirectory, "ODKFiles")
        staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                        strftime("%Y_%m_%d_%H:%M:%S")
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)
        if os.path.isfile(dirODK + "/odkBCExportNew.csv"):
            os.remove(dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(dirODK + "/odkBCExportPrev.csv"):
            os.remove(dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/odkExport_phmrc-1.csv",
                    dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/odkExport_phmrc-2.csv",
                    dirODK + "/odkBCExportNew.csv")

        cliSmartVA = OpenVA(vaArgs=settingsSmartVA,
                            pipelineArgs=settingsPipeline,
                            odkID=settingsODK.odkID,
                            runDate=staticRunDate)

        zeroRecords = cliSmartVA.copyVA()
        completed = cliSmartVA.getCOD()
        svaOut = os.path.join(
            dirOpenVA, staticRunDate,
            "1-individual-cause-of-death/individual-cause-of-death.csv")

        self.assertTrue(os.path.isfile(svaOut))
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)

    def test_3_getCOD_smartva_exception(self):
        """getCOD() should raise an exception with faulty args for smartva cli"""
        c = self.conn.cursor()
        sql = "UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?"
        par = ("SmartVA", "SmartVA|2.0.0_a8|PHMRCShort|1|PHMRCShort|1")
        c.execute(sql, par)
        settingsPipeline = self.xferDB.configPipeline(self.conn)
        settingsODK = self.xferDB.configODK(self.conn)
        self.conn.rollback()
        ntSmartVA = collections.namedtuple("ntSmartVA", [
            "SmartVA_country", "SmartVA_hiv", "SmartVA_malaria", "SmartVA_hce",
            "SmartVA_freetext", "SmartVA_figures", "SmartVA_language"
        ])
        settingsSmartVA = ntSmartVA("Unknown", "Wrong", "Wrong", "Wrong",
                                    "Wrong", "Wrong", "Wrong")
        dirOpenVA = os.path.join(settingsPipeline.workingDirectory,
                                 "OpenVAFiles")
        dirODK = os.path.join(settingsPipeline.workingDirectory, "ODKFiles")
        staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                        strftime("%Y_%m_%d_%H:%M:%S")
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)
        if os.path.isfile(dirODK + "/odkBCExportNew.csv"):
            os.remove(dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(dirODK + "/odkBCExportPrev.csv"):
            os.remove(dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/previous_bc_export.csv",
                    dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/another_bc_export.csv",
                    dirODK + "/odkBCExportNew.csv")

        cliSmartVA = OpenVA(vaArgs=settingsSmartVA,
                            pipelineArgs=settingsPipeline,
                            odkID=settingsODK.odkID,
                            runDate=staticRunDate)

        zeroRecords = cliSmartVA.copyVA()

        self.assertRaises(SmartVAError, cliSmartVA.getCOD)
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)
コード例 #16
0
class Check_1_copyVA(unittest.TestCase):

    dbFileName = "Pipeline.db"
    dbKey = "enilepiP"
    # dbDirectory = os.path.abspath(os.path.dirname(__file__))
    dbDirectory = "."
    dirODK = "ODKFiles"
    dirOpenVA = "OpenVAFiles"
    pipelineRunDate = datetime.datetime.now()
    xferDB = TransferDB(dbFileName=dbFileName,
                        dbDirectory=dbDirectory,
                        dbKey=dbKey,
                        plRunDate=pipelineRunDate)
    conn = xferDB.connectDB()
    settingsPipeline = xferDB.configPipeline(conn)
    settingsODK = xferDB.configODK(conn)
    settingsInterVA = xferDB.configOpenVA(conn, "InterVA",
                                          settingsPipeline.workingDirectory)
    staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
      strftime("%Y_%m_%d_%H:%M:%S")

    shutil.rmtree(os.path.join(dirOpenVA, staticRunDate), ignore_errors=True)

    rOpenVA = OpenVA(vaArgs=settingsInterVA,
                     pipelineArgs=settingsPipeline,
                     odkID=settingsODK.odkID,
                     runDate=staticRunDate)

    def test_1_copyVA_isFile(self):
        """Check that copyVA() brings in new file."""

        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/previous_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/another_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")

        zeroRecords = self.rOpenVA.copyVA()

        self.assertTrue(os.path.isfile(self.dirOpenVA + "/openVA_input.csv"))
        os.remove(self.dirODK + "/odkBCExportPrev.csv")
        os.remove(self.dirODK + "/odkBCExportNew.csv")
        os.remove(self.dirOpenVA + "/openVA_input.csv")

    def test_1_copyVA_merge(self):
        """Check that copyVA() includes all records."""

        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/previous_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/another_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")

        zeroRecords = self.rOpenVA.copyVA()

        hasAll = True
        with open("OpenVAFiles/openVA_input.csv") as fCombined:
            fCombinedLines = fCombined.readlines()
        with open("ODKFiles/previous_bc_export.csv") as fPrevious:
            fPreviousLines = fPrevious.readlines()
        with open("ODKFiles/another_bc_export.csv") as fAnother:
            fAnotherLines = fAnother.readlines()
        for line in fPreviousLines:
            if line not in fCombinedLines:
                hasAll = False
        for line in fAnotherLines:
            if line not in fCombinedLines:
                hasAll = False
        self.assertTrue(hasAll)
        os.remove(self.dirODK + "/odkBCExportPrev.csv")
        os.remove(self.dirODK + "/odkBCExportNew.csv")
        os.remove(self.dirOpenVA + "/openVA_input.csv")

    def test_1_copyVA_zeroRecords_1(self):
        """Check that copyVA() returns zeroRecords == True."""

        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/zeroRecords_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/zeroRecords_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
            os.remove(self.dirOpenVA + "/openVA_input.csv")

        zeroRecords = self.rOpenVA.copyVA()

        self.assertTrue(zeroRecords)
        os.remove(self.dirODK + "/odkBCExportPrev.csv")
        os.remove(self.dirODK + "/odkBCExportNew.csv")

    def test_1_copyVA_zeroRecords_2(self):
        """Check that copyVA() does not produce file if zero records."""

        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/previous_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/another_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
            os.remove(self.dirOpenVA + "/openVA_input.csv")

        zeroRecords = self.rOpenVA.copyVA()

        self.assertFalse(zeroRecords)
        os.remove(self.dirODK + "/odkBCExportPrev.csv")
        os.remove(self.dirODK + "/odkBCExportNew.csv")
        os.remove(self.dirOpenVA + "/openVA_input.csv")

    def test_1_copyVA_zeroRecords_3(self):
        """Check that copyVA() doesn't create new file if returns zeroRecords == True."""

        if os.path.isfile(self.dirODK + "/odkBCExportNew.csv"):
            os.remove(self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirODK + "/odkBCExportPrev.csv"):
            os.remove(self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/zeroRecords_bc_export.csv",
                    self.dirODK + "/odkBCExportPrev.csv")
        shutil.copy(self.dirODK + "/zeroRecords_bc_export.csv",
                    self.dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(self.dirOpenVA + "/openVA_input.csv"):
            os.remove(self.dirOpenVA + "/openVA_input.csv")

        zeroRecords = self.rOpenVA.copyVA()

        self.assertFalse(os.path.isfile(self.dirOpenVA + "/openVA_input.csv"))
        os.remove(self.dirODK + "/odkBCExportPrev.csv")
        os.remove(self.dirODK + "/odkBCExportNew.csv")
コード例 #17
0
class Check_2_rScript(unittest.TestCase):

    dbFileName = "Pipeline.db"
    dbKey = "enilepiP"
    dbDirectory = "."
    pipelineRunDate = datetime.datetime.now()

    xferDB = TransferDB(dbFileName="copy_Pipeline.db",
                        dbDirectory=dbDirectory,
                        dbKey=dbKey,
                        plRunDate=pipelineRunDate)
    conn = xferDB.connectDB()

    def test_2_rScript_insilico(self):
        """Check that rScript() creates an R script for InSilicoVA."""

        c = self.conn.cursor()
        sql = "UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?"
        par = ("InSilicoVA",
               "InSilicoVA|1.1.4|Custom|1|2016 WHO Verbal Autopsy Form|v1_4_1")
        c.execute(sql, par)
        settingsPipeline = self.xferDB.configPipeline(self.conn)
        settingsODK = self.xferDB.configODK(self.conn)
        settingsInSilicoVA = self.xferDB.configOpenVA(
            self.conn, "InSilicoVA", settingsPipeline.workingDirectory)
        self.conn.rollback()
        dirOpenVA = os.path.join(settingsPipeline.workingDirectory,
                                 "OpenVAFiles")
        dirODK = os.path.join(settingsPipeline.workingDirectory, "ODKFiles")
        staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                        strftime("%Y_%m_%d_%H:%M:%S")
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)
        rScriptFile = os.path.join(dirOpenVA, staticRunDate,
                                   "Rscript_" + staticRunDate + ".R")
        rOpenVA = OpenVA(vaArgs=settingsInSilicoVA,
                         pipelineArgs=settingsPipeline,
                         odkID=settingsODK.odkID,
                         runDate=staticRunDate)

        if os.path.isfile(dirODK + "/odkBCExportNew.csv"):
            os.remove(dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(dirODK + "/odkBCExportPrev.csv"):
            os.remove(dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/previous_bc_export.csv",
                    dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/another_bc_export.csv",
                    dirODK + "/odkBCExportNew.csv")

        zeroRecords = rOpenVA.copyVA()
        rOpenVA.rScript()

        self.assertTrue(os.path.isfile(rScriptFile))
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)

    def test_2_rScript_interva(self):
        """Check that rScript() creates an R script for InterVA."""

        c = self.conn.cursor()
        sql = "UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?"
        par = ("InterVA",
               "InterVA4|4.04|InterVA|4|2016 WHO Verbal Autopsy Form|v1_4_1")
        c.execute(sql, par)
        settingsPipeline = self.xferDB.configPipeline(self.conn)
        settingsODK = self.xferDB.configODK(self.conn)
        settingsInSilicoVA = self.xferDB.configOpenVA(
            self.conn, "InterVA", settingsPipeline.workingDirectory)
        self.conn.rollback()
        dirOpenVA = os.path.join(settingsPipeline.workingDirectory,
                                 "OpenVAFiles")
        dirODK = os.path.join(settingsPipeline.workingDirectory, "ODKFiles")
        staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                  strftime("%Y_%m_%d_%H:%M:%S")
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)
        rScriptFile = os.path.join(dirOpenVA, staticRunDate,
                                   "Rscript_" + staticRunDate + ".R")
        rOpenVA = OpenVA(vaArgs=settingsInSilicoVA,
                         pipelineArgs=settingsPipeline,
                         odkID=settingsODK.odkID,
                         runDate=staticRunDate)
        if os.path.isfile(dirODK + "/odkBCExportNew.csv"):
            os.remove(dirODK + "/odkBCExportNew.csv")
        if os.path.isfile(dirODK + "/odkBCExportPrev.csv"):
            os.remove(dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/previous_bc_export.csv",
                    dirODK + "/odkBCExportPrev.csv")
        shutil.copy(dirODK + "/another_bc_export.csv",
                    dirODK + "/odkBCExportNew.csv")
        zeroRecords = rOpenVA.copyVA()
        rOpenVA.rScript()
        self.assertTrue(os.path.isfile(rScriptFile))
        shutil.rmtree(os.path.join(dirOpenVA, staticRunDate),
                      ignore_errors=True)
コード例 #18
0
    def setUp(self):

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/odkExport_phmrc-1.csv',
                    'ODKFiles/odkBCExportPrev.csv')
        shutil.copy('ODKFiles/odkExport_phmrc-2.csv',
                    'ODKFiles/odkBCExportNew.csv')
        if not os.path.isfile('smartva'):
            downloadSmartVA()

        self.staticRunDate = datetime.datetime(2018, 9, 1, 9, 0, 0). \
                        strftime('%Y_%m_%d_%H:%M:%S')
        xferDB = TransferDB(dbFileName = 'copy_Pipeline.db',
                            dbDirectory = '.',
                            dbKey = 'enilepiP',
                            plRunDate = self.staticRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        if self.id() == 'test_openVA.Check_Exceptions.test_insilico_exception':
            algorithm = 'InSilicoVA'
            sql = 'UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?'
            par = ('InSilicoVA', 'InSilicoVA|1.1.4|Custom|1|2016 WHO Verbal Autopsy Form|v1_4_1')
            c.execute(sql, par)
            sql = 'UPDATE InSilicoVA_Conf SET data_type = ?'
            par = ('WHO2016',)
            c.execute(sql, par)
        elif self.id() == 'test_openVA.Check_Exceptions.test_interva_exception':
            algorithm = 'InterVA'
            sql = 'UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?'
            par = ('InterVA', 'InterVA4|4.04|Custom|1|2016 WHO Verbal Autopsy Form|v1_4_1')
            c.execute(sql, par)
            sql = 'UPDATE InterVA_Conf SET version = ?'
            par = ('5',)
            c.execute(sql, par)
        else:
            algorithm = 'SmartVA'
            sql = 'UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?'
            par = ('SmartVA', 'SmartVA|2.0.0_a8|PHMRCShort|1|PHMRCShort|1')
            c.execute(sql, par)
        settingsPipeline = xferDB.configPipeline(conn)
        settingsODK = xferDB.configODK(conn)
        settingsAlgorithm = xferDB.configOpenVA(conn,
                                                algorithm,
                                                settingsPipeline.workingDirectory)
        if self.id() == 'test_openVA.Check_Exceptions.test_smartva_exception':
            ntSmartVA = collections.namedtuple("ntSmartVA",
                                               ["SmartVA_country",
                                                "SmartVA_hiv",
                                                "SmartVA_malaria",
                                                "SmartVA_hce",
                                                "SmartVA_freetext",
                                                "SmartVA_figures",
                                                "SmartVA_language"]
            )
            settingsAlgorithm = ntSmartVA("Unknown",
                                          "Wrong",
                                          "Wrong",
                                          "Wrong",
                                          "Wrong",
                                          "Wrong",
                                          "Wrong")

        conn.rollback()
        conn.close()
        self.rOpenVA = OpenVA(vaArgs = settingsAlgorithm,
                              pipelineArgs = settingsPipeline,
                              odkID = '',
                              runDate = self.staticRunDate)
        zeroRecords = self.rOpenVA.copyVA()
        self.rOpenVA.rScript()
コード例 #19
0
    def test_runODK_6(self):
        """Check checkDuplicates() method."""

        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            os.remove("ODKFiles/odkBCExportNew.csv")
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            os.remove("ODKFiles/odkBCExportPrev.csv")
        if os.path.isfile("OpenVAFiles/openVA_input.csv"):
            os.remove("OpenVAFiles/openVA_input.csv")

        dbFileName = "copy_Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("DELETE FROM EventLog;")
        conn.commit()
        c.execute("DELETE FROM VA_Storage;")
        conn.commit()
        conn.close()
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        settings = pl.config()
        settingsPipeline = settings["pipeline"]
        settingsODK = settings["odk"]
        settingsOpenVA = settings["openVA"]
        settingsDHIS = settings["dhis"]
        odkBC = pl.runODK(settingsODK,
                          settingsPipeline)
        vaRecords = pd.read_csv("ODKFiles/odkBCExportNew.csv")
        nVA = vaRecords.shape[0]
        rOut = pl.runOpenVA(settingsOpenVA,
                            settingsPipeline,
                            settingsODK.odkID,
                            pl.pipelineRunDate)
        pipelineDHIS = pl.runDHIS(settingsDHIS,
                                  settingsPipeline)
        pl.storeResultsDB()
        os.remove("ODKFiles/odkBCExportNew.csv")
        os.remove("OpenVAFiles/openVA_input.csv")
        odkBC2 = pl.runODK(settingsODK,
                           settingsPipeline)
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pl.pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("SELECT eventDesc FROM EventLog;")
        query = c.fetchall()
        nDuplicates = [i[0] for i in query if "Duplicate" in i[0]]
        self.assertEqual(len(nDuplicates), nVA)
        shutil.rmtree("OpenVAFiles/" + pl.pipelineRunDate)
        shutil.rmtree("DHIS/blobs/")
        os.remove("OpenVAFiles/newStorage.csv")
        os.remove("OpenVAFiles/recordStorage.csv")
        os.remove("OpenVAFiles/entityAttributeValue.csv")
コード例 #20
0
class Check_Pipeline_runOpenVA_InterVA(unittest.TestCase):
    """Check runOpenVA method runs InterVA"""

    dbFileName = "copy_Pipeline.db"
    dbKey = "enilepiP"
    dbDirectory = "."
    nowDate = datetime.datetime.now()
    pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")

    xferDB = TransferDB(dbFileName = "copy_Pipeline.db",
                        dbDirectory = dbDirectory,
                        dbKey = dbKey,
                        plRunDate = pipelineRunDate)
    conn = xferDB.connectDB()

    c = conn.cursor()
    sql = "UPDATE Pipeline_Conf SET algorithm = ?, algorithmMetadataCode = ?"
    par = ("InterVA", "InterVA4|4.04|InterVA|5|2016 WHO Verbal Autopsy Form|v1_4_1")
    c.execute(sql, par)
    conn.commit()
    conn.close()
    pl = Pipeline(dbFileName,
                  dbDirectory,
                  dbKey,
                  True)
    settings = pl.config()
    settingsPipeline = settings["pipeline"]
    settingsODK = settings["odk"]
    settingsOpenVA = settings["openVA"]
    settingsDHIS = settings["dhis"]

    dirOpenVA = os.path.join(settingsPipeline.workingDirectory, "OpenVAFiles")
    dirODK = os.path.join(settingsPipeline.workingDirectory, "ODKFiles")
    shutil.rmtree(
        os.path.join(dirOpenVA, pl.pipelineRunDate),
        ignore_errors = True
    )
    if os.path.isfile("OpenVAFiles/recordStorage.csv"):
        os.remove("OpenVAFiles/recordStorage.csv")

    rOut = pl.runOpenVA(settingsOpenVA,
                        settingsPipeline,
                        settingsODK.odkID,
                        pl.pipelineRunDate)

    def test_runOpenVA_InterVA_1(self):
        """Check that runOpenVA() creates an R script for InterVA."""
        rScriptFile = os.path.join(self.dirOpenVA,
                                   self.pl.pipelineRunDate,
                                   "Rscript_" + self.pl.pipelineRunDate + ".R")
        self.assertTrue(os.path.isfile(rScriptFile))

    def test_runOpenVA_InterVA_2(self):
        """Check that runOpenVA() runs R script for InterVA."""
        rScriptFile = os.path.join(self.dirOpenVA,
                                   self.pl.pipelineRunDate,
                                   "Rscript_" + self.pl.pipelineRunDate + ".Rout")
        self.assertTrue(os.path.isfile(rScriptFile))

    def test_runOpenVA_InterVA_3(self):
        """Check that runOpenVA() creates resuls file for InterVA script."""
        rScriptFile = os.path.join(self.dirOpenVA,
                                   self.pl.pipelineRunDate,
                                   "Rscript_" + self.pl.pipelineRunDate + ".R")
        self.assertTrue(os.path.isfile(rScriptFile))
        shutil.rmtree("OpenVAFiles/" + self.pl.pipelineRunDate)
コード例 #21
0
class Check_storeResultsDB(unittest.TestCase):
    """Check storeResultsDB method marks duplicate records:"""
    @classmethod
    def setUpClass(cls):

        if not os.path.isfile('Pipeline.db'):
            createTransferDB('Pipeline.db', '.', 'enilepiP')

    def setUp(self):

        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        shutil.rmtree('DHIS/blobs/', ignore_errors=True)
        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        if not os.path.isfile('ODK-Briefcase-v1.18.0.jar'):
            downloadBriefcase()
        self.pl = Pipeline('Pipeline.db', '.', 'enilepiP', True)
        self.settings = self.pl.config()
        self.settingsPipeline = self.settings['pipeline']
        self.settingsODK = self.settings['odk']
        self.settingsOpenVA = self.settings['openVA']
        self.settingsDHIS = self.settings['dhis']

        self.xferDB = TransferDB(dbFileName='Pipeline.db',
                                 dbDirectory='.',
                                 dbKey='enilepiP',
                                 plRunDate=True)
        self.conn = self.xferDB.connectDB()
        self.c = self.conn.cursor()
        self.c.execute('DELETE FROM EventLog;')
        self.conn.commit()
        self.c.execute('DELETE FROM VA_Storage;')
        self.conn.commit()
        self.odkBC = self.pl.runODK(self.settingsODK, self.settingsPipeline)

    def test_runODK_checkDuplicates(self):
        """Check checkDuplicates() method:"""

        vaRecords = read_csv('ODKFiles/odkBCExportNew.csv')
        nVA = vaRecords.shape[0]
        rOut = self.pl.runOpenVA(self.settingsOpenVA, self.settingsPipeline,
                                 self.settingsODK.odkID,
                                 self.pl.pipelineRunDate)
        pipelineDHIS = self.pl.runDHIS(self.settingsDHIS,
                                       self.settingsPipeline)
        self.pl.storeResultsDB()
        os.remove('ODKFiles/odkBCExportNew.csv')
        os.remove('OpenVAFiles/pycrossva_input.csv')
        os.remove('OpenVAFiles/openVA_input.csv')
        odkBC2 = self.pl.runODK(self.settingsODK, self.settingsPipeline)
        self.c.execute('SELECT eventDesc FROM EventLog;')
        query = self.c.fetchall()
        nDuplicates = [i[0] for i in query if 'duplicate' in i[0]]
        self.assertEqual(len(nDuplicates), nVA)

    def tearDown(self):

        if os.path.isfile('ODKFiles/odkBCExportNew.csv'):
            os.remove('ODKFiles/odkBCExportNew.csv')
        if os.path.isfile('ODKFiles/odkBCExportPrev.csv'):
            os.remove('ODKFiles/odkBCExportPrev.csv')
        shutil.rmtree('DHIS/blobs/', ignore_errors=True)
        shutil.rmtree('ODKFiles/ODK Briefcase Storage/', ignore_errors=True)
        self.conn.close()

    @classmethod
    def tearDownClass(cls):

        os.remove('Pipeline.db')
コード例 #22
0
    def test_cleanPipeline_rmFiles(self):
        """Test file removal."""
        if not os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            shutil.copy("ODKFiles/previous_bc_export.csv",
                        "ODKFiles/odkBCExportPrev.csv")
        if not os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            shutil.copy("ODKFiles/another_bc_export.csv",
                        "ODKFiles/odkBCExportNew.csv")

        if not os.path.isfile("OpenVAFiles/openVA_input.csv"):
            shutil.copy("OpenVAFiles/sample_openVA_input.csv",
                        "OpenVAFiles/openVA_input.csv")
        if not os.path.isfile("OpenVAFiles/entityAttributeValue.csv"):
            shutil.copy("OpenVAFiles/sampleEAV.csv",
                        "OpenVAFiles/entityAttributeValue.csv")
        if not os.path.isfile("OpenVAFiles/recordStorage.csv"):
            shutil.copy("OpenVAFiles/sample_recordStorage.csv",
                        "OpenVAFiles/recordStorage.csv")
        if not os.path.isfile("OpenVAFiles/newStorage.csv"):
            shutil.copy("OpenVAFiles/sample_newStorage.csv",
                        "OpenVAFiles/newStorage.csv")

        os.makedirs("DHIS/blobs/", exist_ok = True)
        shutil.copy("OpenVAFiles/sample_newStorage.csv",
                    "DHIS/blobs/001-002-003.db")

        dbFileName = "copy_Pipeline.db"
        dbDirectory = "."
        dbKey = "enilepiP"
        useDHIS = True
        nowDate = datetime.datetime.now()
        pipelineRunDate = nowDate.strftime("%Y-%m-%d_%H:%M:%S")
        pl = Pipeline(dbFileName, dbDirectory, dbKey, useDHIS)
        pl.closePipeline()
        fileExist = False
        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            fileExist = True
        if os.path.isfile("ODKFiles/odkBCExportPrev.csv"):
            fileExist = True
        if os.path.isfile("ODKFiles/odkBCExportNew.csv"):
            fileExist = True
        if os.path.isfile("OpenVAFiles/openVA_input.csv"):
            fileExist = True
        if os.path.isfile("OpenVAFiles/entityAttributeValue.csv"):
            fileExist = True
        if os.path.isfile("OpenVAFiles/recordStorage.csv"):
            fileExist = True
        if os.path.isfile("OpenVAFiles/newStorage.csv"):
            fileExist = True
        if os.path.isfile("DHIS/blobs/001-002-003.db"):
            fileExist = True
        self.assertFalse(fileExist)
        xferDB = TransferDB(dbFileName = dbFileName,
                            dbDirectory = dbDirectory,
                            dbKey = dbKey,
                            plRunDate = pipelineRunDate)
        conn = xferDB.connectDB()
        c = conn.cursor()
        c.execute("UPDATE ODK_Conf SET odkLastRun = '1900-01-01_00:00:01';")
        conn.commit()
        conn.close()