def test_deleteRows(self): DBUtil.runDBScript(self.SCRIPT_FILE, False) query = "select count(*) from TestTypes;" # Insert some test data to delete tableName = "TestTypes" columnNames = self.DATA_COLS.split() idFile = StringIO() DBUtil.insertFile(self.DATA_FILE, tableName, columnNames, None, idFile) idValues = idFile.getvalue().split() # Count up rows before and after delete initialCount = DBUtil.execute(query)[0][0] DBUtil.deleteRows("TestTypes", idValues) afterCount = DBUtil.execute(query)[0][0] self.assertEqual(initialCount - len(idValues), afterCount) # Reinsert the test data to try deleting them by a non-default Id column idFile = StringIO() DBUtil.insertFile(self.DATA_FILE, tableName, columnNames, None, idFile) nonDefaultIds = [100, 200] initialCount = DBUtil.execute(query)[0][0] DBUtil.deleteRows("TestTypes", nonDefaultIds, "MyInteger") afterCount = DBUtil.execute(query)[0][0]
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self); self.manager = SimManager(); # Instance to test on from stride.clinical_item.ClinicalItemDataLoader import ClinicalItemDataLoader; ClinicalItemDataLoader.build_clinical_item_psql_schemata(); self.manager.buildCPOESimSchema(); log.info("Populate the database with test data") #### Basically import a bunch of rigged CSV or TSV files that have realistic simulating case and grading data # Get that data into the test database dataTextStr = \ """sim_result_id;name;description;group_string;priority -10;Temp;Temperature (F);Flowsheet>Vitals;10 -20;Pulse;Pulse / Heart Rate (HR);Flowsheet>Vitals;20 -30;SBP;Blood Pressure, Systolic (SBP);Flowsheet>Vitals;30 -40;DBP;Blood Pressure, Diastolic (DBP);Flowsheet>Vitals;40 -50;Resp;Respirations (RR);Flowsheet>Vitals;50 -60;FiO2;Fraction Inspired Oxygen;Flowsheet>Vitals;60 -70;Urine;Urine Output (UOP);Flowsheet>Vitals;70 """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_result", delim=";");
def test_insertFile_escapeStrings(self): # Create a test data file to insert, and verify no errors DBUtil.runDBScript( self.SCRIPT_FILE, False) # Assume this works based on test_runDBScript method tableName = "TestTypes" columnNames = ["MyInteger", "MyText"] dataFile = StringIO() dataFile.write('''-1\t"A"\n''') dataFile.write('''-2\t"B\xaeb"\n''') dataFile.write('''-3\t"C"\n''') dataFile.write('''-4\tD\n''') dataFile = StringIO(dataFile.getvalue()) DBUtil.insertFile(dataFile, tableName, columnNames, escapeStrings=True) verifyQuery = \ """select MyInteger, MyText from TestTypes where MyInteger < 0 order by MyInteger desc """ expectedData = \ [ [ -1, "A"], [ -2, u"B\\xaeb"], [ -3, "C"], [ -4, "D"], ] # Verify rows inserted with properly parsed dates results = DBUtil.execute(verifyQuery) self.assertEqual(expectedData, results)
def test_insertFile_dateParsing(self): # Create a test data file to insert, and verify no errors DBUtil.runDBScript( self.SCRIPT_FILE, False ) # Assume this works based on test_runDBScript method tableName = "TestTypes" columnNames = ["MyInteger","MyText","MyDateTime"] dataFile = StringIO() dataFile.write('''-1\t"12/11/2010"\t"12/11/2010"\n'''); dataFile.write('''-2\t"2013-04-15 13:45:21"\t"2013-04-15 13:45:21"\n'''); dataFile.write('''-3\t"2003-04-15 10:45:21"\t"2003-04-15 10:45:21"\n'''); dataFile.write('''-4\t"4/11/12 6:20"\t"4/11/12 6:20"\n'''); dataFile = StringIO(dataFile.getvalue()) dateColFormats = {"myDateTime":None} # Deliberately change capitalization to ensure robustness DBUtil.insertFile( dataFile, tableName, columnNames, dateColFormats=dateColFormats) verifyQuery = \ """select MyInteger, MyText, MyDateTime from TestTypes where MyInteger < 0 order by MyInteger desc """; expectedData = \ [ [ -1, "12/11/2010", datetime(2010,12,11) ], [ -2, "2013-04-15 13:45:21", datetime(2013,4,15,13,45,21) ], [ -3, "2003-04-15 10:45:21", datetime(2003,4,15,10,45,21) ], [ -4, "4/11/12 6:20", datetime(2012,4,11,6,20) ], ]; # Verify rows inserted with properly parsed dates results = DBUtil.execute(verifyQuery); self.assertEqual( expectedData, results );
def test_insertFile(self): # Create a test data file to insert, and verify no errors DBUtil.runDBScript( self.SCRIPT_FILE, False ) # Assume this works based on test_runDBScript method tableName = "TestTypes" idFile = StringIO() DBUtil.insertFile( self.MULTI_LINE_DATA_FILE, tableName, None, "\t", idFile ); # Assume column names extracted from first row of data file # Verify number rows inserted self.assertEqual( len(self.MULTI_LINE_DATA_ROWS), idFile.getvalue().count("\n") ) results = DBUtil.execute( self.DATA_QUERY ); self.assertEqual( self.MULTI_LINE_DATA_ROWS, results );
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self) log.info("Populate the database with test data") StrideLoader.build_stride_psql_schemata() ClinicalItemDataLoader.build_clinical_item_psql_schemata() ###### PREPARE SOME FAKE INPUT DATA TO BE CONVERTED ############## ###### PREPARE SOME FAKE INPUT DATA TO BE CONVERTED ############## ###### PREPARE SOME FAKE INPUT DATA TO BE CONVERTED ############## ###### PREPARE SOME FAKE INPUT DATA TO BE CONVERTED ############## ###### PREPARE SOME FAKE INPUT DATA TO BE CONVERTED ############## dataTextStr = \ """order_proc_anon_id,pat_anon_id,pat_enc_csn_anon_id,proc_code,organism_name,antibiotic_name,suseptibility,shifted_result_time -10,1,2,LABBLC,BACTEROIDES FRAGILIS,Clindamycin,Intermediate,9/10/2111 13:15 -11,2,3,LABBLC,COAG NEGATIVE STAPHYLOCOCCUS,Vancomycin,Susceptible,4/26/2109 9:49 -12,3,4,LABBLC,COAG NEGATIVE STAPHYLOCOCCUS,Oxacillin,Resistant,4/18/2109 4:48 -13,4,5,LABBLC,COAG NEGATIVE STAPHYLOCOCCUS,Vancomycin,Susceptible,3/28/2109 23:21 -14,5,6,LABBLC,ENTEROCOCCUS FAECALIS,Amoxicillin/Clavulanic Acid,Susceptible,6/3/2109 17:07 -14,5,6,LABBLC,ENTEROCOCCUS FAECALIS,Amoxicillin/Clavulanic Acid,Susceptible,6/3/2109 17:07 -20,10,11,LABBLC,ENTEROCOCCUS FAECALIS,Method,,6/10/2109 17:07 -15,6,7,LABBLC2,,,,6/4/2109 17:07 -16,7,8,LABBLC2,,,, -17,10,10,LABBLC2,ENTEROCOCCUS FAECALIS,Penicillin,,6/8/2109 17:07 -17,10,10,LABBLC2,ENTEROCOCCUS FAECALIS,,Intermediate,6/11/2109 17:07 -18,11,11,LABBLC2,ENTEROCOCCUS FAECALIS,Amikacin,Positive,6/11/2111 18:07 """ # Parse into DB insertion object # DBUtil.insertFile( StringIO(dataTextStr), "stride_culture_micro", delim=" ", dateColFormats={"trtmnt_tm_begin_date": None, "trtmnt_tm_end_date": None} ); DBUtil.insertFile(StringIO(dataTextStr), "stride_culture_micro", delim=",", dateColFormats={"shifted_result_time": None}) self.converter = STRIDECultureMicroConversion()
def _insertTestRecords(self): """Populate database for with patient data.""" # clinical_item_category testRecords = ED_TEST_INPUT_TABLES.get("clinical_item_category") DBUtil.insertFile(StringIO(testRecords), "clinical_item_category", \ delim="\t") # clinical_item testRecords = ED_TEST_INPUT_TABLES.get("clinical_item") DBUtil.insertFile(StringIO(testRecords), "clinical_item", delim="\t") # patient_item testRecords = ED_TEST_INPUT_TABLES.get("patient_item") DBUtil.insertFile(StringIO(testRecords), "patient_item", delim="\t")
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self) self.manager = SimManager() # Instance to test on from stride.clinical_item.ClinicalItemDataLoader import ClinicalItemDataLoader ClinicalItemDataLoader.build_clinical_item_psql_schemata() self.manager.buildCPOESimSchema() log.info("Populate the database with test data") # Basically import a bunch of rigged CSV or TSV files that have realistic simulating case and grading data # Get that data into the test database clinical_item_category_str = \ """clinical_item_category_id;source_table 1;source_table """ # Parse into DB insertion object DBUtil.insertFile(StringIO(clinical_item_category_str), "clinical_item_category", delim=";") clinical_item_str = \ """clinical_item_id;clinical_item_category_id;name 1;1;Clinical item 1 2;1;Clinical item 2 3;1;Clinical item 3 4;1;Clinical item 4 5;1;Clinical item 5 6;1;Clinical item 6 7;1;Clinical item 7 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(clinical_item_str), "clinical_item", delim=";") clinical_item_str = \ """sim_user_id;name 0;Default user 1;Jonathan Chen 2;User 2 3;User 3 4;User 4 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(clinical_item_str), "sim_user", delim=";") sim_patient_str = \ """sim_patient_id;name;age_years;gender 1;Patient One;40;Male 2;Patient Two;50;Female 3;Patient Three;60;Male 4;Patient Four;70;Female 5;Patient Five;80;Male 6;Patient Six;90;Female 7;Patient Seven;100;Male """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_patient_str), "sim_patient", delim=";") sim_state_str = \ """sim_state_id;name 1;Sim state 1 2;Sim state 2 3;Sim state 3 4;Sim state 4 5;Sim state 5 6;Sim state 6 7;Sim state 7 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_state_str), "sim_state", delim=";") sim_patient_order_str = \ """sim_patient_order_id;sim_user_id;sim_patient_id;clinical_item_id;relative_time_start;sim_state_id 1;0;1;1;1;1 2;1;1;2;2;2 3;1;1;3;3;3 4;1;1;4;4;4 5;1;1;5;5;5 6;1;1;6;6;6 7;1;1;7;7;7 8;2;2;1;1;1 9;3;2;2;2;1 10;3;2;3;3;2 11;2;3;1;1;1 12;3;3;2;2;2 13;3;3;3;3;3 14;1;4;1;1;2 15;1;4;2;2;3 16;2;5;1;1;3 17;2;5;2;2;4 18;3;6;4;1;1 19;3;6;4;2;2 20;3;6;4;3;3 21;4;7;5;1;1 22;4;7;5;2;2 23;4;7;5;3;3 24;4;7;5;4;4 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_patient_order_str), "sim_patient_order", delim=";") sim_grading_key_str = \ """sim_grader_id;sim_state_id;clinical_item_id;score;group_name Jonathan Chen;1;1;1;g1 Jonathan Chen;2;2;1;g2 Jonathan Chen;3;3;1;g3 Jonathan Chen;4;4;1; Jonathan Chen;5;5;1;g5 Jonathan Chen;6;6;1; Jonathan Chen;7;7;1;g7 Jonathan Chen;3;1;1;g8 Jonathan Chen;4;2;1;g8 Jonathan Chen;1;4;-1000; Jonathan Chen;2;4;10; Jonathan Chen;3;4;2000; Jonathan Chen;1;5;-1000;g9 Jonathan Chen;2;5;-1; Jonathan Chen;3;5;0;g10 Jonathan Chen;3;5;-500; """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_grading_key_str), "sim_grading_key", delim=";") self.expected_grades_by_patient_id = [ { "total_score": 6, # Default user (id = 0) is ignored and NULL group_names are counted separately "sim_patient_id": 1, "most_graded_user_id": 1, "most_active_user_id": 1, "sim_grader_id": "Jonathan Chen" }, { "total_score": 1, # Ungraded (clinical_item_id, sim_state_id) keys are omitted from summation "sim_patient_id": 2, "most_graded_user_id": 2, # Most graded user is User 2 (even though most active is User 3) "most_active_user_id": 3, "sim_grader_id": "Jonathan Chen" }, { "total_score": 3, "sim_patient_id": 3, "most_graded_user_id": 3, # Most graded user is the most active one "most_active_user_id": 3, "sim_grader_id": "Jonathan Chen" }, # 4: No grading available for the existing case { "total_score": 1, # Scores in the same group g8 are counted only once "sim_patient_id": 5, "most_graded_user_id": 2, "most_active_user_id": 2, "sim_grader_id": "Jonathan Chen" }, { "total_score": 1010, # Non-uniform scores (i.e., not all scores = 1) "sim_patient_id": 6, "most_graded_user_id": 3, "most_active_user_id": 3, "sim_grader_id": "Jonathan Chen" }, { "total_score": -1501, # All negative and one 0 score results in negative score "sim_patient_id": 7, "most_graded_user_id": 4, "most_active_user_id": 4, "sim_grader_id": "Jonathan Chen" } # 8: Case doesn't exist ]
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self); self.manager = SimManager(); # Instance to test on from stride.clinical_item.ClinicalItemDataLoader import ClinicalItemDataLoader; ClinicalItemDataLoader.build_clinical_item_psql_schemata(); self.manager.buildCPOESimSchema(); self.testPatientId = None; self.purgeTestRecords(); log.info("Populate the database with test data") self.clinicalItemCategoryIdStrList = list(); headers = ["clinical_item_category_id","source_table"]; dataModels = \ [ RowItemModel( [-1, "Labs"], headers ), RowItemModel( [-2, "Imaging"], headers ), RowItemModel( [-3, "Meds"], headers ), RowItemModel( [-4, "Nursing"], headers ), RowItemModel( [-5, "Problems"], headers ), RowItemModel( [-6, "Lab Results"], headers ), ]; for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item_category", dataModel ); self.clinicalItemCategoryIdStrList.append( str(dataItemId) ); headers = ["clinical_item_id","clinical_item_category_id","name","analysis_status"]; dataModels = \ [ RowItemModel( [-1, -1, "CBC",1], headers ), RowItemModel( [-2, -1, "BMP",1], headers ), RowItemModel( [-3, -1, "Hepatic Panel",1], headers ), RowItemModel( [-4, -1, "Cardiac Enzymes",1], headers ), RowItemModel( [-5, -2, "CXR",1], headers ), RowItemModel( [-6, -2, "RUQ Ultrasound",1], headers ), RowItemModel( [-7, -2, "CT Abdomen/Pelvis",1], headers ), RowItemModel( [-8, -2, "CT PE Thorax",1], headers ), RowItemModel( [-9, -3, "Acetaminophen",1], headers ), RowItemModel( [-10, -3, "Carvedilol",1], headers ), RowItemModel( [-11, -3, "Enoxaparin",1], headers ), RowItemModel( [-12, -3, "Warfarin",1], headers ), RowItemModel( [-13, -3, "Ceftriaxone",1], headers ), RowItemModel( [-14, -4, "Foley Catheter",1], headers ), RowItemModel( [-15, -4, "Vital Signs",1], headers ), RowItemModel( [-16, -4, "Fall Precautions",1], headers ), ]; for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item", dataModel ); dataTextStr = \ """sim_user_id;name -1;Test User """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_user", delim=";"); dataTextStr = \ """sim_patient_id;age_years;gender;name -1;60;Female;Test Female Patient -2;55;Male;Test Male Patient """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_patient", delim=";"); dataTextStr = \ """sim_result_id;name;description;group_string;priority -10;Temp;Temperature (F);Flowsheet>Vitals;10 -20;Pulse;Pulse / Heart Rate (HR);Flowsheet>Vitals;20 -30;SBP;Blood Pressure, Systolic (SBP);Flowsheet>Vitals;30 -40;DBP;Blood Pressure, Diastolic (DBP);Flowsheet>Vitals;40 -50;Resp;Respirations (RR);Flowsheet>Vitals;50 -60;FiO2;Fraction Inspired Oxygen;Flowsheet>Vitals;60 -70;Urine;Urine Output (UOP);Flowsheet>Vitals;70 -11000;WBC;WBC;LAB BLOOD ORDERABLES>Hematology>Automated Blood Count;11000 -11010;HGB;HEMOGLOBIN;LAB BLOOD ORDERABLES>Hematology>Automated Blood Count;11010 -11020;HCT;HEMATOCRIT;LAB BLOOD ORDERABLES>Hematology>Automated Blood Count;11020 -11030;PLT;PLATELET COUNT;LAB BLOOD ORDERABLES>Hematology>Automated Blood Count;11030 -13010;NA;SODIUM, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13010 -13020;K;POTASSIUM, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13020 -13030;CL;CHLORIDE, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13030 -13040;CO2;CO2, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13040 -13050;BUN;UREA NITROGEN,SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13050 -13060;CR;CREATININE, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13060 -13070;GLU;GLUCOSE, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13070 -13090;CA;CALCIUM, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13090 -13110;MG;MAGNESIUM, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13110 -13120;PHOS;PHOSPHORUS, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13120 -13210;TBIL;TOTAL BILIRUBIN;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13210 -13220;DBIL;CONJUGATED BILI;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13220 -13230;IBIL;UNCONJUGATED BILIRUBIN;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13230 -13240;AST;AST (SGOT), SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13240 -13250;ALT;ALT (SGPT), SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13250 -13260;ALKP;ALK P'TASE, TOTAL, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13260 -13270;ALB;ALBUMIN, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13270 -13280;TP;PROTEIN, TOTAL, SER/PLAS;LAB BLOOD ORDERABLES>Chemistry>General Chemistry;13280 """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_result", delim=";"); # Map orders to expected results. # Simplify expect vital signs to result in 5 minutes. Basic chemistry labs in 10 minutes, CBC in 15 minutes dataTextStr = \ """sim_order_result_map_id;clinical_item_id;sim_result_id;turnaround_time -1;-15;-10;300 -2;-15;-20;300 -3;-15;-30;300 -4;-15;-40;300 -5;-15;-50;300 -6;-1;-11000;900 -7;-1;-11010;900 -8;-1;-11020;900 -9;-1;-11030;900 -10;-2;-13010;600 -11;-2;-13020;600 -12;-2;-13030;600 -13;-2;-13040;600 -14;-2;-13050;600 -15;-2;-13060;600 -16;-2;-13070;600 -17;-2;-13090;600 -18;-3;-13210;600 -19;-3;-13240;600 -20;-3;-13250;600 -21;-3;-13260;600 -22;-3;-13270;600 -23;-3;-13280;600 """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_order_result_map", delim=";"); dataTextStr = \ """sim_state_id;name;description 0;Test 0; Test State 0 -1;Test 1;Test State 1 -2;Test 2;Test State 2 -3;Test 3;Test State 3 -4;Test 4;Test State 4 """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_state", delim=";"); dataTextStr = \ """sim_state_transition_id;pre_state_id;post_state_id;clinical_item_id;time_trigger;description -1;-1;-2;None;9000;Passive time from 1 to 2 -2;-2;-3;-11;None;Transition 2 to 3 if order for 11 (Enoxaparin) -3;-2;-3;-12;None;Transition 2 to 3 if order for 12 (Warfarin) (don't need both anti-coagulants. One adequate to trigger transition) -4;-2;-4;-13;None;Transition 2 to 4 if order for 13 (Ceftriaxone) -5;-3;-1;-10;9000;Transition 3 back to 1 if order for 10 (Carvedilol) OR 9000 seconds pass """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_state_transition", delim=";"); dataTextStr = \ """sim_patient_state_id;sim_patient_id;sim_state_id;relative_time_start;relative_time_end -1;-1;-1;-7200;0 -3;-1;-1;0;1800 -2;-1;-2;1800;None """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_patient_state", delim=";"); # Order Vital Signs at time 0, then basic labs (CBC, BMP, LFTs) at 10 minutes (600 seconds) dataTextStr = \ """sim_patient_order_id;sim_user_id;sim_patient_id;sim_state_id;clinical_item_id;relative_time_start;relative_time_end -1;-1;-1;-1;-15;0;None -2;-1;-1;-1;-1;600;None -3;-1;-1;-1;-2;600;None -4;-1;-1;-1;-3;600;None -5;-1;-1;-2;-15;1800;None -6;-1;-1;-2;-1;1800;None -7;-1;-1;-2;-2;1800;None """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_patient_order", delim=";"); dataTextStr = \ """sim_state_result_id;sim_result_id;sim_state_id;num_value;num_value_noise;text_value;result_flag -1;-10;0;98.7;0.2;; -2;-20;0;75;3;; -3;-30;0;130;4;; -4;-40;0;85;2;; -5;-50;0;12;1;; -6;-60;0;0.21;0;; -7;-70;0;500;100;; -8;-11000;0;7;1;; -9;-11010;0;13;0.5;; -10;-11020;0;40;1;; -11;-11030;0;300;25;; -12;-13010;0;140;4;; -13;-13020;0;4.5;0.4;; -14;-13030;0;95;3;; -15;-13040;0;24;1;; -16;-13050;0;12;3;; -17;-13060;0;0.7;0.2;; -18;-13070;0;140;12;; -19;-13090;0;9;0.4;; -20;-13110;0;2;0.3;; -21;-13120;0;3;0.5;; -22;-13210;0;0.2;0.1;; -23;-13240;0;29;5;; -24;-13250;0;20;4;; -25;-13260;0;85;8;; -26;-13270;0;4;0.4;; -27;-13280;0;6;0.5;; -28;-10;-1;101.4;0.4;Fever;H -29;-20;-1;115;4;Tachycardia;H -30;-30;-1;92;5;Hypotension;L -31;-40;-1;55;3;Hypotension;L -32;-70;-1;50;10;Low UOP;L -33;-11000;-1;12;1;Leukocytosis;H -34;-13060;-1;2.4;0.3;AKI;H -35;-20;-2;105;4;Tachycardia;H -36;-11000;-2;10;1;; -37;-13060;-2;1.9;0.3;AKI;H -38;-13070;-2;250;13;;H """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_state_result", delim=";"); dataTextStr = \ """sim_note_id;sim_state_id;relative_state_time;content -1;-1;7200;Initial Note -2;-2;0;Later Note """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "sim_note", delim=";");
DBUtil.execute \ ( """ -- Delete reference or diagnosis linking items delete from item_collection_item as ici where collection_type_id in (1,2,3,5); -- Delete the resultant orphaned parent collection records delete from item_collection where item_collection_id not in ( select item_collection_id from item_collection_item ); """, conn=conn ) # Populate the database with updates to the item_collection and item_collection_item tables to add more reference collections DBUtil.insertFile(open("item_collection.update.tab"), "item_collection") DBUtil.insertFile(open("item_collection_item.diagnosisLink.update.tab"), "item_collection_item") DBUtil.insertFile(open("item_collection_item.referenceOrders.update.tab"), "item_collection_item") conn.commit() finally: conn.close()
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self) log.info("Populate the database with test data") StrideLoader.build_stride_psql_schemata() ClinicalItemDataLoader.build_clinical_item_psql_schemata() dataTextStr = \ """pat_id\tdeath_date\tbirth_year\tgender\trace\tethnicity -100\tNone\t1958\tMALE\tAMERICAN INDIAN OR ALASKA NATIVE\tNON-HISPANIC/NON-LATINO -200\tNone\t1992\tMALE\tAMERICAN INDIAN OR ALASKA NATIVE\tHISPANIC/LATINO -300\tNone\t1993\tFEMALE\tASIAN\tNON-HISPANIC/NON-LATINO -400\t2011-09-28\t1997\tFEMALE\tASIAN\tHISPANIC/LATINO -500\tNone\t1952\tMALE\tASIAN\tPATIENT REFUSED -600\tNone\t1984\tMALE\tASIAN\tUNKNOWN -700\tNone\t1991\tFEMALE\tASIAN - HISTORICAL CONV\tNON-HISPANIC/NON-LATINO -800\tNone\t1962\tMALE\tASIAN, HISPANIC\tUNKNOWN -900\tNone\t1972\tMALE\tASIAN, HISPANIC\tHISPANIC/LATINO -1000\tNone\t1970\tMALE\tASIAN, NON-HISPANIC\tNON-HISPANIC/NON-LATINO -1100\tNone\t2001\tFEMALE\tASIAN, NON-HISPANIC\tUNKNOWN -1200\tNone\t1969\tFEMALE\tBLACK OR AFRICAN AMERICAN\tNON-HISPANIC/NON-LATINO -1300\tNone\t1945\tFEMALE\tBLACK OR AFRICAN AMERICAN\tHISPANIC/LATINO -1400\tNone\t1945\tFEMALE\tBLACK OR AFRICAN AMERICAN\tUNKNOWN -1500\t2012-05-02\t1956\tMALE\tBLACK OR AFRICAN AMERICAN\tPATIENT REFUSED -1600\tNone\t1981\tFEMALE\tBLACK, HISPANIC\tHISPANIC/LATINO -1700\tNone\t1985\tFEMALE\tBLACK, HISPANIC\tUNKNOWN -1800\tNone\t1932\tMALE\tBLACK, NON-HISPANIC\tNON-HISPANIC/NON-LATINO -1900\tNone\t1954\tMALE\tBLACK, NON-HISPANIC\tUNKNOWN -2000\tNone\t1932\tMALE\tNATIVE AMERICAN, HISPANIC\tHISPANIC/LATINO -2100\tNone\t1961\tFEMALE\tNATIVE AMERICAN, NON-HISPANIC\tUNKNOWN -2200\tNone\t1974\tMALE\tNATIVE HAWAIIAN OR OTHER PACIFIC ISLANDER \tNON-HISPANIC/NON-LATINO -2300\tNone\t1953\tFEMALE\tNATIVE HAWAIIAN OR OTHER PACIFIC ISLANDER \tHISPANIC/LATINO -2400\tNone\t1943\tFEMALE\tNATIVE HAWAIIAN OR OTHER PACIFIC ISLANDER \tUNKNOWN -2500\tNone\t1963\tFEMALE\tOTHER\tHISPANIC/LATINO -2600\tNone\t1974\tMALE\tOTHER\tNON-HISPANIC/NON-LATINO -2700\tNone\t1963\tFEMALE\tOTHER\tPATIENT REFUSED -2800\tNone\t2005\tFEMALE\tOTHER\tUNKNOWN -2900\tNone\t1996\tMALE\tOTHER, HISPANIC\tUNKNOWN -3000\tNone\t1952\tMALE\tOTHER, HISPANIC\tHISPANIC/LATINO -3100\tNone\t1983\tFEMALE\tOTHER, HISPANIC\tNON-HISPANIC/NON-LATINO -3200\tNone\t1971\tFEMALE\tOTHER, NON-HISPANIC\tNON-HISPANIC/NON-LATINO -3300\tNone\t1976\tMALE\tOTHER, NON-HISPANIC\tUNKNOWN -3400\tNone\t1995\tFEMALE\tOTHER, NON-HISPANIC\tHISPANIC/LATINO -3500\tNone\t1982\tMALE\tPACIFIC ISLANDER, NON-HISPANIC\tNON-HISPANIC/NON-LATINO -3600\tNone\t1940\tMALE\tPACIFIC ISLANDER, NON-HISPANIC\tUNKNOWN -3700\tNone\t1934\tMALE\tPATIENT REFUSED\tPATIENT REFUSED -3800\tNone\t1981\tFEMALE\tPATIENT REFUSED\tNON-HISPANIC/NON-LATINO -3900\tNone\t1998\tMALE\tPATIENT REFUSED\tHISPANIC/LATINO -4000\tNone\t1978\tMALE\tRACE AND ETHNICITY UNKNOWN\tUNKNOWN -4100\tNone\t1933\tFEMALE\tRACE AND ETHNICITY UNKNOWN\tNON-HISPANIC/NON-LATINO -4200\tNone\t1997\tFEMALE\tUNKNOWN\tUNKNOWN -4300\tNone\t1932\tMALE\tUNKNOWN\tNON-HISPANIC/NON-LATINO -4400\t2012-11-13\t1947\tFEMALE\tUNKNOWN\tHISPANIC/LATINO -4500\tNone\t1932\tMALE\tUNKNOWN\tPATIENT REFUSED -4600\tNone\t1936\tMALE\tUNKNOWN\t -4700\tNone\t1993\tFEMALE\tWHITE\tNON-HISPANIC/NON-LATINO -4800\tNone\t1948\tMALE\tWHITE\tHISPANIC/LATINO -4900\tNone\t1968\tMALE\tWHITE\tUNKNOWN -5000\tNone\t2003\tFEMALE\tWHITE\tPATIENT REFUSED -5100\tNone\t1970\tMALE\tWHITE\t -5200\tNone\t1998\tMALE\tWHITE, HISPANIC\tHISPANIC/LATINO -5300\tNone\t1986\tMALE\tWHITE, HISPANIC\tUNKNOWN -5400\tNone\t1997\tFEMALE\tWHITE, HISPANIC\tNON-HISPANIC/NON-LATINO -5500\tNone\t1964\tMALE\tWHITE, NON-HISPANIC\tNON-HISPANIC/NON-LATINO -5600\tNone\t1940\tMALE\tWHITE, NON-HISPANIC\tUNKNOWN -5700\tNone\t1962\tMALE\tWHITE, NON-HISPANIC\tHISPANIC/LATINO -5800\tNone\t1931\tFEMALE\tNone\tNone -5900\tNone\t1991\tFEMALE\tNone\tNON-HISPANIC/NON-LATINO -6000\tNone\t1973\tFEMALE\tNone\tUNKNOWN -6050\tNone\tNone\tNone\tNone\tNone -6100\tNone\t1953\tFEMALE\tNone\tHISPANIC/LATINO """ self.patientIds = [ "-100", "-200", "-300", "-400", "-500", "-600", "-700", "-800", "-900", "-1000", "-1100", "-1200", "-1300", "-1400", "-1500", "-1600", "-1700", "-1800", "-1900", "-2000", "-2100", "-2200", "-2300", "-2400", "-2500", "-2600", "-2700", "-2800", "-2900", "-3000", "-3100", "-3200", "-3300", "-3400", "-3500", "-3600", "-3700", "-3800", "-3900", "-4000", "-4100", "-4200", "-4300", "-4400", "-4500", "-4600", "-4700", "-4800", "-4900", "-5000", "-5100", "-5200", "-5300", "-5400", "-5500", "-5600", "-5700", "-5800", "-5900", "-6000", "-6050", "-6100" ] # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "stride_patient", delim="\t", dateColFormats={"death_date": None}) self.converter = STRIDEDemographicsConversion()
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self) self.manager = SimManager() # Instance to test on from stride.clinical_item.ClinicalItemDataLoader import ClinicalItemDataLoader ClinicalItemDataLoader.build_clinical_item_psql_schemata() self.manager.buildCPOESimSchema() log.info("Populate the database with test data") # Basically import a bunch of rigged CSV or TSV files that have realistic simulating case and grading data # Get that data into the test database clinical_item_category_str = \ """clinical_item_category_id;source_table 1;source_table """ # Parse into DB insertion object DBUtil.insertFile(StringIO(clinical_item_category_str), "clinical_item_category", delim=";") clinical_item_str = \ """clinical_item_id;clinical_item_category_id;name 1;1;Clinical item 1 2;1;Clinical item 2 3;1;Clinical item 3 4;1;Clinical item 4 5;1;Clinical item 5 6;1;Clinical item 6 7;1;Clinical item 7 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(clinical_item_str), "clinical_item", delim=";") clinical_item_str = \ """sim_user_id;name 0;Default user 1;Jonathan Chen 2;User 2 3;User 3 4;User 4 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(clinical_item_str), "sim_user", delim=";") sim_patient_str = \ """sim_patient_id;name;age_years;gender 1;Patient One;40;Male 2;Patient Two;50;Female 3;Patient Three;60;Male 4;Patient Four;70;Female 5;Patient Five;80;Male 6;Patient Six;90;Female 7;Patient Seven;100;Male """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_patient_str), "sim_patient", delim=";") sim_state_str = \ """sim_state_id;name 1;Sim state 1 2;Sim state 2 3;Sim state 3 4;Sim state 4 5;Sim state 5 6;Sim state 6 7;Sim state 7 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_state_str), "sim_state", delim=";") sim_patient_order_str = \ """sim_patient_order_id;sim_user_id;sim_patient_id;clinical_item_id;relative_time_start;sim_state_id 1;0;1;1;1;1 2;1;1;2;2;2 3;1;1;3;3;3 4;1;1;4;4;4 5;1;1;5;5;5 6;1;1;6;6;6 7;1;1;7;7;7 8;2;2;1;1;1 9;3;2;2;2;1 10;3;2;3;3;2 11;2;3;1;1;1 12;3;3;2;2;2 13;3;3;3;3;3 14;1;4;1;1;2 15;1;4;2;2;3 16;2;5;1;1;3 17;2;5;2;2;4 18;3;6;4;1;1 19;3;6;4;2;2 20;3;6;4;3;3 21;4;7;5;1;1 22;4;7;5;2;2 23;4;7;5;3;3 24;4;7;5;4;4 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_patient_order_str), "sim_patient_order", delim=";") sim_grading_key_str = \ """sim_grader_id;sim_state_id;clinical_item_id;score;group_name Jonathan Chen;1;1;1;g1 Jonathan Chen;2;2;1;g2 Jonathan Chen;3;3;1;g3 Jonathan Chen;4;4;1; Jonathan Chen;5;5;1;g5 Jonathan Chen;6;6;1; Jonathan Chen;7;7;1;g7 Jonathan Chen;3;1;1;g8 Jonathan Chen;4;2;1;g8 Jonathan Chen;1;4;-1000; Jonathan Chen;2;4;10; Jonathan Chen;3;4;2000; Jonathan Chen;1;5;-1000;g9 Jonathan Chen;2;5;-1; Jonathan Chen;3;5;0;g10 Jonathan Chen;3;5;-500; """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_grading_key_str), "sim_grading_key", delim=";")
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self) log.info("Populate the database with test data") from stride.clinical_item.ClinicalItemDataLoader import ClinicalItemDataLoader ClinicalItemDataLoader.build_clinical_item_psql_schemata() #self.purgeTestRecords(); self.clinicalItemCategoryIdStrList = list() headers = [ "clinical_item_category_id", "default_recommend", "source_table" ] dataModels = \ [ RowItemModel( [-1, 1, "Labs"], headers ), RowItemModel( [-2, 1, "Imaging"], headers ), RowItemModel( [-3, 1, "Meds"], headers ), RowItemModel( [-4, 1, "Nursing"], headers ), RowItemModel( [-5, 0, "Problems"], headers ), RowItemModel( [-6, 1, "Lab Results"], headers ), RowItemModel( [-7, 1, "Admit Dx"], headers ), RowItemModel( [-8, 0, "Demographics"], headers ), ] for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item_category", dataModel) self.clinicalItemCategoryIdStrList.append(str(dataItemId)) headers = [ "clinical_item_id", "clinical_item_category_id", "analysis_status", "default_recommend", "name" ] dataModels = \ [ RowItemModel( [-1, -1, 1, 1, "CBC"], headers ), RowItemModel( [-2, -1, 1, 1, "BMP"], headers ), RowItemModel( [-3, -1, 1, 1, "Hepatic Panel"], headers ), RowItemModel( [-4, -1, 1, 1, "Cardiac Enzymes"], headers ), RowItemModel( [-5, -2, 1, 1, "CXR"], headers ), RowItemModel( [-6, -2, 1, 1, "RUQ Ultrasound"], headers ), RowItemModel( [-7, -2, 1, 1, "CT Abdomen/Pelvis"], headers ), RowItemModel( [-8, -2, 1, 1, "CT PE Thorax"], headers ), RowItemModel( [-9, -3, 1, 1, "Acetaminophen"], headers ), RowItemModel( [-10, -3, 1, 1, "Carvedilol"], headers ), RowItemModel( [-11, -3, 1, 1, "Enoxaparin"], headers ), RowItemModel( [-12, -3, 1, 1, "Warfarin"], headers ), RowItemModel( [-13, -3, 1, 0, "Ceftriaxone"], headers ), RowItemModel( [-14, -4, 1, 1, "Foley Catheter"], headers ), RowItemModel( [-15, -4, 1, 1, "Strict I&O"], headers ), RowItemModel( [-16, -4, 1, 1, "Fall Precautions"], headers ), RowItemModel( [-22, -5, 1, 1, "Diagnosis 2"], headers ), RowItemModel( [-23, -5, 1, 1, "Diagnosis 3"], headers ), RowItemModel( [-24, -5, 1, 1, "Diagnosis 4"], headers ), RowItemModel( [-21, -7, 0, 1, "Diagnosis 1 (Admit)"], headers ), RowItemModel( [-25, -7, 1, 1, "Diagnosis 2 (Admit)"], headers ), RowItemModel( [-30, -6, 1, 1, "Troponin (High)"], headers ), RowItemModel( [-31, -6, 1, 1, "BNP (High)"], headers ), RowItemModel( [-32, -6, 1, 1, "Creatinine (High)"], headers ), RowItemModel( [-33, -6, 1, 1, "ESR (High)"], headers ), # Default exclude from recommendations RowItemModel( [-41, -8, 1, 1, "Male"], headers ), RowItemModel( [-42, -8, 1, 1, "Female"], headers ), RowItemModel( [-43, -8, 1, 1, "Birth"], headers ), RowItemModel( [-44, -8, 1, 1, "Birth1980s"], headers ), RowItemModel( [-45, -8, 1, 1, "Birth1970s"], headers ), RowItemModel( [-46, -8, 1, 1, "RaceWhite"], headers ), RowItemModel( [-47, -8, 1, 1, "RaceBlack"], headers ), RowItemModel( [-49, -8, 1, 1, "Death"], headers ), ] for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item", dataModel) headers = [ "patient_item_id", "patient_id", "clinical_item_id", "item_date", "analyze_date" ] dataModels = \ [ RowItemModel( [-101,-11111, -43, datetime(1972, 1, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-102,-11111, -45, datetime(1972, 1, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-103,-11111, -41, datetime(1972, 1, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-104,-11111, -46, datetime(1972, 1, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-52, -11111, -23, datetime(1999, 9, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-51, -11111, -21, datetime(2000, 1, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-1, -11111, -4, datetime(2000, 1, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-2, -11111, -10, datetime(2000, 1, 1, 1), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-3, -11111, -8, datetime(2000, 1, 1, 2), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-4, -11111, -4, datetime(2000, 1, 1, 3), datetime(2010, 1, 1, 0)], headers ), # Repeat item RowItemModel( [-60, -11111, -32, datetime(2000, 1, 1, 4), datetime(2010, 1, 1, 0)], headers ), # Within query time RowItemModel( [-61, -11111, -30, datetime(2000, 1, 4, 0), datetime(2010, 1, 1, 0)], headers ), # Within 1 week RowItemModel( [-63, -11111, -13, datetime(2000, 1, 4, 5), datetime(2010, 1, 1, 0)], headers ), # Exclude item RowItemModel( [-64, -11111, -24, datetime(2000, 1, 4,10), datetime(2010, 1, 1, 0)], headers ), # Exclude category RowItemModel( [-62, -11111, -31, datetime(2000, 1,10, 0), datetime(2010, 1, 1, 0)], headers ), # Beyond 1 week RowItemModel( [-71, -11111, -8, datetime(2000, 1, 4, 1), datetime(2010, 1, 1, 0)], headers ), # Repeat query item within 1 week verify period, don't use as a verify item RowItemModel( [-201, -11111,-49, datetime(2009, 1, 1, 1), datetime(2010, 1, 1, 0)], headers ), # Death date in far future RowItemModel( [-5, -11111, -12, datetime(2000, 2, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-121,-22222, -43, datetime(1983, 5, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-122,-22222, -44, datetime(1983, 5, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-123,-22222, -42, datetime(1983, 5, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-124,-22222, -47, datetime(1983, 5, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-10, -22222, -7, datetime(2000, 1, 5, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-12, -22222, -6, datetime(2000, 1, 9, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-13, -22222, -11, datetime(2000, 1, 9, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-131,-33333, -43, datetime(1983, 5, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-132,-33333, -44, datetime(1983, 5, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-133,-33333, -42, datetime(1983, 5, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-134,-33333, -46, datetime(1983, 5, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-14, -33333, -6, datetime(2000, 2, 9, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-15, -33333, -2, datetime(2000, 2,11, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-141,-44444, -43, datetime(1975, 3, 3, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-142,-44444, -45, datetime(1975, 3, 3, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-143,-44444, -42, datetime(1975, 3, 3, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-144,-44444, -46, datetime(1975, 3, 3, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-20, -44444, -21, datetime(2000, 1, 5, 0), datetime(2010, 1, 1, 0)], headers ), # Admit diagnosis RowItemModel( [-22, -44444, -6, datetime(2000, 1, 5, 0), datetime(2010, 1, 1, 0)], headers ), # Items recorded with date level precision, not time RowItemModel( [-23, -44444, -12, datetime(2000, 1, 5, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-24, -44444, -11, datetime(2000, 1, 6, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-25, -44444, -8, datetime(2000, 1, 6, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-204,-44444,-49, datetime(2000, 2, 1, 1), datetime(2010, 1, 1, 0)], headers ), # Death date within 1 month # Order Set Usage example RowItemModel( [-5002,-55555, -3, datetime(2000,10, 1, 0, 0), datetime(2010, 1, 1, 0)], headers ), # Very old item, not relevant to current query RowItemModel( [-5040,-55555, -25, datetime(2000,10,10, 0, 0), datetime(2010, 1, 1, 0)], headers ), # Admit diagnosis (coded at date level precision before time-level order data) RowItemModel( [-5005,-55555, -2, datetime(2000,10,10,10, 0), datetime(2010, 1, 1, 0)], headers ), # Non-order set item before RowItemModel( [-5010,-55555, -1, datetime(2000,10,10,10, 5), datetime(2010, 1, 1, 0)], headers ), # Order Set 1 RowItemModel( [-5020,-55555, -9, datetime(2000,10,10,10, 5), datetime(2010, 1, 1, 0)], headers ), # Order Set 1 RowItemModel( [-5030,-55555, -5, datetime(2000,10,10,10,10), datetime(2010, 1, 1, 0)], headers ), # Ad-hoc within 1 hour RowItemModel( [-5050,-55555, -8, datetime(2000,10,10,10,30), datetime(2010, 1, 1, 0)], headers ), # Order Set 2 RowItemModel( [-5060,-55555, -11, datetime(2000,10,10,10,30), datetime(2010, 1, 1, 0)], headers ), # Order Set 2 RowItemModel( [-5070,-55555, -12, datetime(2000,10,10,10,30), datetime(2010, 1, 1, 0)], headers ), # Ad-hoc Within 1 hour RowItemModel( [-5080,-55555, -10, datetime(2000,10,10,20, 0), datetime(2010, 1, 1, 0)], headers ), # Ad-hoc 10 hours later RowItemModel( [-5090,-55555, -1, datetime(2000,10,10,20, 0), datetime(2010, 1, 1, 0)], headers ), # Order Set 1 again (ignore repeats) RowItemModel( [-5100,-55555, -2, datetime(2000,10,10,20, 0), datetime(2010, 1, 1, 0)], headers ), # Order Set 1 again RowItemModel( [-5110,-55555, -3, datetime(2000,10,10,20, 0), datetime(2010, 1, 1, 0)], headers ), # Ad-hoc 10 hours later ] for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("patient_item", dataModel) dataTextStr = \ """item_collection_id;external_id;name;section;subgroup -1;-1;Test Order Set - 1;Meds;TreatmentMeds -2;-1;Test Order Set - 1;Meds;SymptomsMeds -3;-1;Test Order Set - 1;Labs;GeneralLabs -4;-2;Test Order Set - 2;Labs;GeneralLabs -5;-2;Test Order Set - 2;Imaging;Xrays -6;-2;Test Order Set - 2;Imaging;AdvancedImaging -7;-3;Test Order Set - 3;Imaging;GeneralImaging -8;-3;Test Order Set - 3;Nursing;GeneralNursing """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "item_collection", delim=";") dataTextStr = \ """item_collection_item_id;item_collection_id;clinical_item_id;collection_type_id -1;-1;-11;4 -2;-1;-12;4 -3;-1;-13;4 -4;-2;-9;4 -5;-2;-10;4 -6;-3;-1;4 -7;-3;-2;4 -8;-3;-3;4 -100;-3;-4;4 -9;-4;-1;4 -10;-4;-2;4 -11;-4;-3;4 -101;-4;-11;4 -12;-5;-5;4 -74;-6;-6;4 -77;-6;-7;4 -13;-6;-8;4 -14;-7;-5;4 -15;-7;-6;4 -16;-7;-7;4 -17;-7;-8;4 -18;-8;-14;4 -19;-8;-15;4 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "item_collection_item", delim=";") dataTextStr = \ """patient_item_collection_link_id;patient_item_id;item_collection_item_id -1;-5010;-6 -2;-5020;-4 -3;-5050;-13 -4;-5060;-101 -5;-5090;-6 -6;-5100;-7 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "patient_item_collection_link", delim=";") # Instance to test on self.analyzer = PreparePatientItems()
def _insertTestRecords(self): """Populate database for with patient data.""" # Populate clinical_item_category. testRecords = FM_TEST_INPUT_TABLES.get("clinical_item_category") DBUtil.insertFile(StringIO(testRecords), "clinical_item_category", \ delim="\t") # Populate clinical_item. testRecords = FM_TEST_INPUT_TABLES.get("clinical_item") DBUtil.insertFile(StringIO(testRecords), "clinical_item", delim="\t") # Populate patient_item. testRecords = FM_TEST_INPUT_TABLES.get("patient_item") DBUtil.insertFile(StringIO(testRecords), "patient_item", delim="\t", \ dateColFormats={"item_date": None}) # Populate stride_order_proc. testRecords = FM_TEST_INPUT_TABLES.get("stride_order_proc") DBUtil.insertFile(StringIO(testRecords), "stride_order_proc", \ delim="\t", \ dateColFormats={"item_date": None}) # Populate stride_order_results. testRecords = FM_TEST_INPUT_TABLES.get("stride_order_results") DBUtil.insertFile(StringIO(testRecords), "stride_order_results", \ delim="\t", dateColFormats={"result_time": None}) # Populate stride_flowsheet. testRecords = FM_TEST_INPUT_TABLES.get("stride_flowsheet") DBUtil.insertFile(StringIO(testRecords), "stride_flowsheet", \ delim="\t", \ dateColFormats={"shifted_record_dt_tm": None}) # Populate stride_order_med. testRecords = FM_TEST_INPUT_TABLES.get("stride_order_med") DBUtil.insertFile(StringIO(testRecords), "stride_order_med", \ delim="\t", dateColFormats = {"start_taking_time": None, \ "end_taking_time": None})
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self) log.info("Populate the database with test data") from stride.clinical_item.ClinicalItemDataLoader import ClinicalItemDataLoader ClinicalItemDataLoader.build_clinical_item_psql_schemata() #self.purgeTestRecords(); headers = [ "clinical_item_category_id", "default_recommend", "source_table" ] dataModels = \ [ RowItemModel( [-1, 1, "Labs"], headers ), RowItemModel( [-2, 1, "Imaging"], headers ), RowItemModel( [-3, 1, "Meds"], headers ), RowItemModel( [-4, 1, "Nursing"], headers ), RowItemModel( [-5, 1, "Problems"], headers ), RowItemModel( [-6, 1, "Lab Results"], headers ), RowItemModel( [-7, 0, "No Rec Category"], headers ), ] for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item_category", dataModel) headers = [ "clinical_item_id", "clinical_item_category_id", "patient_count", "name", "analysis_status" ] dataModels = \ [ RowItemModel( [-1, -1, 100, "CBC",1], headers ), RowItemModel( [-2, -1, 200, "BMP",0], headers ), # Clear analysis status, so this will be ignored unless changed RowItemModel( [-3, -1, 300, "Hepatic Panel",1], headers ), RowItemModel( [-4, -1, 400, "Cardiac Enzymes",1], headers ), RowItemModel( [-5, -2, 500, "CXR",1], headers ), RowItemModel( [-6, -2, 600, "RUQ Ultrasound",1], headers ), RowItemModel( [-7, -2, 700, "CT Abdomen/Pelvis",1], headers ), RowItemModel( [-8, -2, 800, "CT PE Thorax",1], headers ), RowItemModel( [-9, -3, 900, "Acetaminophen",1], headers ), RowItemModel( [-10, -3, 1000, "Carvedilol",1], headers ), RowItemModel( [-11, -3, 100, "Enoxaparin",1], headers ), RowItemModel( [-12, -3, 200, "Warfarin",1], headers ), RowItemModel( [-13, -3, 300, "Ceftriaxone",1], headers ), RowItemModel( [-14, -4, 400, "Foley Catheter",1], headers ), RowItemModel( [-15, -4, 500, "Strict I&O",1], headers ), RowItemModel( [-16, -4, 600, "Fall Precautions",1], headers ), RowItemModel( [-77, -7, 700, "No Rec Item",1], headers ), ] for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item", dataModel) dataTextStr = \ """patient_item_id;patient_id;clinical_item_id;item_date -1;-123;-6;3/11/2012 10:57 -2;-123;-7;3/11/2012 10:57 -3;-123;-1;4/11/2012 10:57 -4;-123;-2;4/11/2012 10:57 -5;-123;-3;4/11/2012 10:57 -6;-123;-4;4/11/2012 10:57 -8;-123;-8;4/11/2012 10:57 -9;-123;-9;4/11/2012 10:57 -10;-123;-10;4/11/2012 0:00 -11;-123;-11;4/11/2012 10:57 -12;-123;-12;4/11/2012 10:57 -13;-123;-13;4/11/2012 10:57 -14;-123;-12;4/12/2012 3:57 -15;-123;-14;4/12/2012 3:57 -16;-123;-15;4/12/2012 3:57 -17;-123;-16;4/12/2012 3:57 -18;-123;-13;5/12/2012 8:57 -19;-123;-5;5/12/2012 8:57 -21;-456;-4;12/24/2013 6:50 -22;-456;-4;12/24/2013 7:50 -24;-456;-77;12/24/2013 8:50 -25;-456;-10;12/24/2013 0:00 -26;-456;-12;12/24/2013 6:50 -27;-456;-12;12/24/2013 6:55 -28;-456;-12;12/24/2013 6:59 -29;-456;-12;12/24/2013 18:50 -30;-456;-12;12/24/2013 19:50 -31;-456;-14;12/24/2013 18:50 -32;-456;-8;12/24/2013 18:50 -33;-456;-8;12/24/2013 20:50 -34;-456;-8;12/24/2013 18:30 -35;-789;-1;8/19/2011 11:12 -36;-789;-3;8/19/2011 11:12 -37;-789;-3;8/19/2011 0:12 -38;-789;-3;8/19/2011 0:52 -39;-789;-5;8/19/2011 11:12 -40;-789;-9;8/19/2011 11:12 -41;-789;-9;8/19/2011 0:12 -42;-789;-9;8/19/2011 13:12 -43;-789;-10;8/19/2011 0:00 -44;-789;-11;8/19/2011 11:12 -45;-789;-13;8/19/2011 19:12 -46;-789;-15;8/19/2011 19:12 -47;-789;-15;8/19/2011 19:14 -48;-789;-15;8/19/2011 19:22 -49;-789;-15;8/19/2011 19:32 -50;-789;-15;8/19/2011 19:42 -1001;-321;-6;3/11/2012 10:57 -1002;-321;-7;3/11/2012 10:57 -1010;-321;-10;4/11/2012 0:00 -1003;-321;-1;4/11/2012 10:57 -1004;-321;-2;4/11/2012 10:57 -1005;-321;-3;4/11/2012 10:57 -1006;-321;-4;4/11/2012 10:57 -1008;-321;-8;4/11/2012 10:57 -1009;-321;-9;4/11/2012 10:57 -1011;-321;-11;4/11/2012 10:57 -1012;-321;-12;4/11/2012 10:57 -1013;-321;-13;4/11/2012 10:57 -1014;-321;-12;4/12/2012 3:57 -1015;-321;-14;4/12/2012 3:57 -1016;-321;-15;4/12/2012 3:57 -1017;-321;-16;4/12/2012 3:57 -1018;-321;-13;4/12/2012 8:57 -1019;-321;-5;4/12/2012 8:57 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "patient_item", delim=";") dataTextStr = \ """item_collection_id;external_id;name;section;subgroup -1;-1;Test Order Set - 1;Meds;TreatmentMeds -2;-1;Test Order Set - 1;Meds;SymptomsMeds -3;-1;Test Order Set - 1;Labs;GeneralLabs -4;-2;Test Order Set - 2;Labs;GeneralLabs -5;-2;Test Order Set - 2;Imaging;Xrays -6;-2;Test Order Set - 2;Imaging;AdvancedImaging -7;-3;Test Order Set - 3;Imaging;GeneralImaging -8;-3;Test Order Set - 3;Nursing;GeneralNursing -9;-4;Test Order Set - 4;Ad-hoc Orders;Ad-hoc Orders """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "item_collection", delim=";") dataTextStr = \ """item_collection_item_id;item_collection_id;clinical_item_id;collection_type_id -1;-1;-12;4 -2;-1;-13;4 -3;-2;-11;4 -4;-2;-10;4 -5;-3;-1;4 -6;-3;-2;4 -7;-4;-2;4 -8;-4;-3;4 -9;-5;-5;4 -10;-6;-6;4 -11;-6;-7;4 -12;-6;-8;4 -74;-6;-4;4 -77;-6;-77;4 -13;-7;-5;4 -14;-7;-6;4 -15;-8;-14;4 -16;-8;-15;4 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "item_collection_item", delim=";") dataTextStr = \ """patient_item_collection_link_id;patient_item_id;item_collection_item_id -1;-3;-5 -2;-4;-6 -3;-15;-15 -4;-32;-12 -1001;-1003;-5 -1002;-1004;-6 -1003;-1015;-15 -1004;-1019;-9 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "patient_item_collection_link", delim=";") # Sample Prepared Validation File self.validationFileStr = \ """patient_id\tqueryItemCountByIdJSON\tverifyItemCountByIdJSON\tbaseItemId\tbaseItemDate\tqueryStartTime\tqueryEndTime\tverifyEndTime\toutcome.7 -123\t{"-1": 1, "-2": 1, "-3": 1, "-4": 1, "-8": 1, "-9": 1, "-10": 1, "-11": 1, "-12": 1, "-13": 1}\t{"-12": 1, "-14": 1, "-15": 1, "-16": 1}\t10\t2012-04-11 00:00:00\t2012-04-11 10:57:00\t2012-04-11 14:57:00\t2012-04-12 10:57:00\t0 -456\t{"-4": 2, "-77": 1, "-10": 1, "-12": 3}\t{"-12": 2, "-14": 1, "-8": 3}\t10\t2013-12-24 00:00:00\t2013-12-24 06:50:00\t2013-12-24 10:50:00\t2013-12-25 06:50:00\t1 -789\t{"-1": 1, "-3": 3, "-5": 1, "-9": 3, "-10": 1, "-11": 1}\t{"-13": 1, "-15": 5}\t10\t2011-08-19 00:00:00\t2011-08-19 11:12:00\t2011-08-19 15:12:00\t2011-08-20 11:12:00\t0 -321\t{"-1": 1, "-2": 1, "-3": 1, "-4": 1, "-8": 1, "-9": 1, "-10": 1, "-11": 1, "-12": 1, "-13": 1}\t{"-12": 1, "-14": 1, "-15": 1, "-16": 1}\t10\t2012-04-11 00:00:00\t2012-04-11 10:57:00\t2012-04-11 14:57:00\t2012-04-12 10:57:00\t0 """ # Another Sample Prepared Validation File, with key Order Set triggers self.orderSetValidationFileStr = \ """patient_id\tqueryItemCountByIdJSON\tverifyItemCountByIdJSON\tbaseItemId\tbaseItemDate\tqueryStartTime\tqueryEndTime\tverifyEndTime\toutcome.7\torder_set_id -123\t{"-10": 1}\t{"-1": 1, "-2": 1, "-3": 1, "-4": 1, "-8": 1, "-9": 1, "-11": 1, "-12": 1, "-13": 1}\t10\t2012-04-11 00:00:00\t2012-04-11 10:57:00\t2012-04-11 10:57:00\t2012-04-11 11:57:00\t0\t-1 -456\t{"-10": 1, "-4": 2, "-77": 1, "-12": 4, "-14": 1, "-8": 1}\t{"-12": 1, "-8": 2}\t10\t2013-12-24 00:00:00\t2013-12-24 06:50:00\t2013-12-24 18:50:00\t2013-12-24 19:50:00\t1\t-2 -789\t{"-1": 1, "-3": 3, "-5": 1, "-9": 3, "-10": 1, "-11": 1}\t{"-13": 1, "-15": 5}\t10\t2011-08-19 00:00:00\t2011-08-19 11:12:00\t2011-08-19 15:12:00\t2011-08-20 11:12:00\t0\t-4 -321\t{"-10": 1}\t{"-1": 1, "-2": 1, "-3": 1, "-4": 1, "-8": 1, "-9": 1, "-11": 1, "-12": 1, "-13": 1}\t10\t2012-04-11 00:00:00\t2012-04-11 10:57:00\t2012-04-11 10:57:00\t2012-04-11 11:57:00\t0\t-1 -321\t{"-10": 1, "-1": 1, "-2": 1, "-3": 1, "-4": 1, "-8": 1, "-9": 1, "-11": 1, "-12": 1, "-13": 1}\t{"-12": 1,"-14": 1, "-15": 1, "-16": 1}\t10\t2012-04-11 00:00:00\t2012-04-11 10:57:00\t2012-04-12 3:57:00\t2012-04-12 4:57:00\t0\t-3 """ self.analyzer = OrderSetUsageAnalysis()
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self) from stride.clinical_item.ClinicalItemDataLoader import ClinicalItemDataLoader ClinicalItemDataLoader.build_clinical_item_psql_schemata() log.info("Populate the database with test data") self.clinicalItemCategoryIdStrList = list() headers = ["clinical_item_category_id", "source_table"] dataModels = \ [ RowItemModel( [-1, "Labs"], headers ), RowItemModel( [-2, "Imaging"], headers ), RowItemModel( [-3, "Meds"], headers ), RowItemModel( [-4, "Nursing"], headers ), RowItemModel( [-5, "Problems"], headers ), RowItemModel( [-6, "Lab Results"], headers ), ] for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item_category", dataModel) self.clinicalItemCategoryIdStrList.append(str(dataItemId)) headers = ["clinical_item_id", "clinical_item_category_id", "name"] dataModels = \ [ RowItemModel( [-1, -1, "CBC"], headers ), RowItemModel( [-2, -1, "BMP"], headers ), RowItemModel( [-3, -1, "Hepatic Panel"], headers ), RowItemModel( [-4, -1, "Cardiac Enzymes"], headers ), RowItemModel( [-5, -2, "CXR"], headers ), RowItemModel( [-6, -2, "RUQ Ultrasound"], headers ), RowItemModel( [-7, -2, "CT Abdomen/Pelvis"], headers ), RowItemModel( [-8, -2, "CT PE Thorax"], headers ), RowItemModel( [-9, -3, "Acetaminophen"], headers ), RowItemModel( [-10, -3, "Carvedilol"], headers ), RowItemModel( [-11, -3, "Enoxaparin"], headers ), RowItemModel( [-12, -3, "Warfarin"], headers ), RowItemModel( [-13, -3, "Ceftriaxone"], headers ), RowItemModel( [-14, -4, "Foley Catheter"], headers ), RowItemModel( [-15, -4, "Strict I&O"], headers ), RowItemModel( [-16, -4, "Fall Precautions"], headers ), ] for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item", dataModel) headers = [ "patient_item_id", "patient_id", "clinical_item_id", "item_date", "analyze_date" ] dataModels = \ [ RowItemModel( [-1, -11111, -4, datetime(2000, 1, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-2, -11111, -10, datetime(2000, 1, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-3, -11111, -8, datetime(2000, 1, 1, 2), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-4, -11111, -10, datetime(2000, 1, 2, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-5, -11111, -12, datetime(2000, 2, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-10, -22222, -7, datetime(2000, 1, 5, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-12, -22222, -6, datetime(2000, 1, 9, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-13, -22222, -11, datetime(2000, 1, 9, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-14, -33333, -6, datetime(2000, 2, 9, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-15, -33333, -2, datetime(2000, 2,11, 0), datetime(2010, 1, 1, 0)], headers ), ] for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("patient_item", dataModel) dataTextStr = \ """item_collection_id;external_id;name;section;subgroup -1;-1;Test Order Set - 1;Meds;TreatmentMeds -2;-1;Test Order Set - 1;Meds;SymptomsMeds -3;-1;Test Order Set - 1;Labs;GeneralLabs -4;-2;Test Order Set - 2;Labs;GeneralLabs -5;-2;Test Order Set - 2;Imaging;Xrays -6;-2;Test Order Set - 2;Imaging;AdvancedImaging -7;-3;Test Order Set - 3;Imaging;GeneralImaging -8;-3;Test Order Set - 3;Nursing;GeneralNursing """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "item_collection", delim=";") dataTextStr = \ """item_collection_item_id;item_collection_id;clinical_item_id;collection_type_id -1;-1;-12;4 -2;-1;-13;4 -3;-2;-11;4 -4;-2;-10;4 -5;-3;-1;4 -6;-3;-2;4 -7;-4;-2;4 -8;-4;-3;4 -9;-5;-5;4 -10;-6;-6;4 -11;-6;-7;4 -12;-6;-8;4 -13;-7;-5;4 -14;-7;-6;4 -15;-8;-14;4 -16;-8;-15;4 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "item_collection_item", delim=";") self.recommender = OrderSetRecommender()
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self) self.usage_reporter = SimManager() from stride.clinical_item.ClinicalItemDataLoader import ClinicalItemDataLoader ClinicalItemDataLoader.build_clinical_item_psql_schemata() self.usage_reporter.buildCPOESimSchema() log.info("Populate the database with test data") # Basically import a bunch of rigged CSV or TSV files that have realistic simulating case and grading data # Get that data into the test database clinical_item_category_str = \ """clinical_item_category_id;source_table 1;source_table """ # Parse into DB insertion object DBUtil.insertFile(StringIO(clinical_item_category_str), "clinical_item_category", delim=";") clinical_item_str = \ """clinical_item_id;clinical_item_category_id;name 1;1;Clinical item 1 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(clinical_item_str), "clinical_item", delim=";") clinical_item_str = \ """sim_user_id;name 1;Jonathan Chen """ # Parse into DB insertion object DBUtil.insertFile(StringIO(clinical_item_str), "sim_user", delim=";") sim_patient_str = \ """sim_patient_id;name;age_years;gender 1;Patient One;40;Male """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_patient_str), "sim_patient", delim=";") sim_state_str = \ """sim_state_id;name 1;Sim state 1 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_state_str), "sim_state", delim=";") sim_patient_order_str = \ """sim_patient_order_id;sim_user_id;sim_patient_id;clinical_item_id;relative_time_start;sim_state_id 1;1;1;1;1;1 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_patient_order_str), "sim_patient_order", delim=";") sim_grading_key_str = \ """sim_grader_id;sim_state_id;clinical_item_id;score;group_name;sim_case_name Jonathan Chen;1;1;1;g1;case_name Andre Kumar;1;1;2;g1;case_name """ # Parse into DB insertion object DBUtil.insertFile(StringIO(sim_grading_key_str), "sim_grading_key", delim=";") # Prepare survey file self.survey_csv = tempfile.NamedTemporaryFile(mode='w+', delete=False) self.survey_csv.write("Physician User Name,resident" + os.linesep + "Jonathan Chen,1") self.survey_csv.flush()
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self); log.info("Populate the database with test data") from stride.clinical_item.ClinicalItemDataLoader import ClinicalItemDataLoader; ClinicalItemDataLoader.build_clinical_item_psql_schemata(); self.clinicalItemCategoryIdStrList = list(); headers = ["clinical_item_category_id","default_recommend","source_table"]; dataModels = \ [ RowItemModel( [-1, 1, "Labs"], headers ), RowItemModel( [-2, 1, "Imaging"], headers ), RowItemModel( [-3, 1, "Meds"], headers ), RowItemModel( [-4, 0, "Nursing"], headers ), # Disable default recommend to allow for checks RowItemModel( [-5, 0, "Problems"], headers ), RowItemModel( [-6, 1, "Lab Results"], headers ), RowItemModel( [-7, 1, "Admit Dx"], headers ), RowItemModel( [-8, 0, "Demographics"], headers ), ]; for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item_category", dataModel ); self.clinicalItemCategoryIdStrList.append( str(dataItemId) ); headers = ["clinical_item_id","clinical_item_category_id","default_recommend","item_count","name"]; dataModels = \ [ RowItemModel( [-1, -1, 1, 30, "CBC"], headers ), RowItemModel( [-2, -1, 1, 30, "BMP"], headers ), RowItemModel( [-3, -1, 1, 95, "Hepatic Panel"], headers ), RowItemModel( [-4, -1, 1, 40, "Cardiac Enzymes"], headers ), RowItemModel( [-5, -2, 1, 40, "CXR"], headers ), RowItemModel( [-6, -2, 1, 70, "RUQ Ultrasound"], headers ), RowItemModel( [-7, -2, 1, 70, "CT Abdomen/Pelvis"], headers ), RowItemModel( [-8, -2, 1, 35, "CT PE Thorax"], headers ), RowItemModel( [-9, -3, 1, 0, "Acetaminophen"], headers ), RowItemModel( [-10, -3, 1, 45, "Carvedilol"], headers ), RowItemModel( [-11, -3, 1, 50, "Enoxaparin"], headers ), RowItemModel( [-12, -3, 1, 75, "Warfarin"], headers ), RowItemModel( [-13, -3, 0, 0, "Ceftriaxone"], headers ), # Disable default recommend to allow for checks RowItemModel( [-14, -4, 1, 0, "Foley Catheter"], headers ), RowItemModel( [-15, -4, 1, 0, "Strict I&O"], headers ), RowItemModel( [-16, -4, 1, 0, "Fall Precautions"], headers ), ]; for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item", dataModel ); headers = ["patient_item_id","patient_id","clinical_item_id","item_date","analyze_date"]; dataModels = \ [ RowItemModel( [-1, -11111, -4, datetime(2000, 1, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-2, -11111, -10, datetime(2000, 1, 1, 1), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-3, -11111, -8, datetime(2000, 1, 1, 2), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-5, -11111, -12, datetime(2000, 2, 1, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-6, -11111, -6, datetime(2000, 2, 1, 1), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-10, -22222, -7, datetime(2000, 1, 5, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-12, -22222, -6, datetime(2000, 1, 9, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-13, -22222, -11, datetime(2000, 1, 9, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-14, -33333, -6, datetime(2000, 2, 9, 0), datetime(2010, 1, 1, 0)], headers ), RowItemModel( [-15, -33333, -2, datetime(2000, 2,11, 0), datetime(2010, 1, 1, 0)], headers ), ]; for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("patient_item", dataModel ); headers = \ [ "clinical_item_id","subsequent_item_id", "count_0","count_3600","count_86400","count_604800","count_any", "time_diff_sum", "time_diff_sum_squares", ]; dataModels = \ [ RowItemModel( [ -1, -1, 30, 30, 30, 30, 30, 0.0, 0.0], headers ), RowItemModel( [ -2, -2, 30, 30, 30, 30, 30, 0.0, 0.0], headers ), RowItemModel( [ -3, -3, 95, 95, 97, 97, 97, 0.0, 0.0], headers ), RowItemModel( [ -4, -4, 40, 40, 40, 40, 40, 0.0, 0.0], headers ), RowItemModel( [ -5, -5, 40, 40, 50, 50, 50, 0.0, 0.0], headers ), RowItemModel( [ -6, -6, 70, 70, 70, 70, 70, 0.0, 0.0], headers ), RowItemModel( [ -7, -7, 70, 70, 70, 70, 70, 0.0, 0.0], headers ), RowItemModel( [ -8, -8, 35, 35, 35, 50, 80, 0.0, 0.0], headers ), RowItemModel( [-10,-10, 45, 45, 55, 60, 90, 0.0, 0.0], headers ), RowItemModel( [-11,-11, 50, 50, 50, 80, 90, 0.0, 0.0], headers ), RowItemModel( [-12,-12, 75, 75, 75, 80, 90, 0.0, 0.0], headers ), RowItemModel( [ -2, -4, 0, 2, 3, 3, 3, 200.0, 50000.0], headers ), RowItemModel( [ -2, -6, 2, 2, 5, 5, 5, 300.0, 11990.0], headers ), RowItemModel( [ -3, -1, 20, 23, 23, 23, 23, 400.0, 344990.0], headers ), RowItemModel( [ -4, -5, 3, 3, 13, 43, 43, 340.0, 343110.0], headers ), RowItemModel( [ -4, -6, 23, 33, 33, 33, 63, 420.0, 245220.0], headers ), RowItemModel( [ -4, -7, 27, 33, 33, 33, 63, 40.0, 5420.0], headers ), RowItemModel( [ -4,-10, 25, 35, 40, 45, 63, 47.0, 5420.0], headers ), RowItemModel( [ -5, -4, 0, 0, 20, 20, 20, 540.0, 54250.0], headers ), RowItemModel( [ -8,-12, 15,15, 15, 15, 15, 25.0, 520.0], headers ), RowItemModel( [ -10,-11, 12,12, 16, 16, 20, 20.0, 220.0], headers ), RowItemModel( [ -10,-12, 10,10, 10, 10, 10, 20.0, 120.0], headers ), ]; for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item_association", dataModel ); dataTextStr = \ """item_collection_id;external_id;name;section;subgroup -1;-1;Test Order Set - 1;Meds;TreatmentMeds -2;-1;Test Order Set - 1;Meds;SymptomsMeds -3;-1;Test Order Set - 1;Labs;GeneralLabs -4;-2;Test Order Set - 2;Labs;GeneralLabs -5;-2;Test Order Set - 2;Imaging;Xrays -6;-2;Test Order Set - 2;Imaging;AdvancedImaging -7;-3;Test Order Set - 3;Imaging;GeneralImaging -8;-3;Test Order Set - 3;Nursing;GeneralNursing -9;-3;Test Order Set - 3;Meds;RandomMeds """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "item_collection", delim=";"); dataTextStr = \ """item_collection_item_id;item_collection_id;clinical_item_id;collection_type_id -1;-1;-11;4 -2;-1;-12;4 -3;-1;-13;4 -4;-2;-9;4 -5;-2;-10;4 -6;-3;-1;4 -7;-3;-2;4 -8;-3;-3;4 -100;-3;-4;4 -9;-4;-1;4 -10;-4;-2;4 -11;-4;-3;4 -101;-4;-11;4 -12;-5;-5;4 -74;-6;-6;4 -77;-6;-7;4 -13;-6;-8;4 -14;-7;-5;4 -15;-7;-6;4 -16;-7;-7;4 -17;-7;-8;4 -18;-8;-14;4 -19;-8;-15;4 -20;-9;-11;4 -21;-9;-12;4 -22;-9;-13;4 """ # Parse into DB insertion object DBUtil.insertFile( StringIO(dataTextStr), "item_collection_item", delim=";"); # Instance to test on self.analyzer = RecommendationClassificationAnalysis(); self.preparer = PreparePatientItems();
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self) log.info("Populate the database with test data") StrideLoader.build_stride_psql_schemata() ClinicalItemDataLoader.build_clinical_item_psql_schemata() dataTextStr = """order_proc_id\tpat_id\tpat_enc_csn_id\torder_type\tproc_id\tproc_code\tdescription -30560253\t-7803\t-1772\tLab\t471521\tLABACETA\tACETAMINOPHEN, SERUM -31300455\t-2168\t-261\tLab\t471521\tLABACETA\tACETAMINOPHEN, SERUM -29501223\t-9860\t-1772\tLab\t471521\tLABACETA\tACETAMINOPHEN, SERUM -31823670\t-2130\t-3897\tLab\t471521\tLABACETA\tACETAMINOPHEN, SERUM -31237072\t-124\t-8391\tLab\t471521\tLABACETA\tACETAMINOPHEN, SERUM -29966444\t-5690\t-1150\tLab\t471521\tLABACETA\tACETAMINOPHEN, SERUM -33197720\t-9926\t-4898\tLab\t471521\tLABACETA\tACETAMINOPHEN, SERUM -36668349\t-9815\t-3658\tLab\t471521\tLABACETA\tACETAMINOPHEN, SERUM -33280031\t-3858\t-6463\tLab\t471521\tLABACETA\tACETAMINOPHEN, SERUM -38543619\t-6562\t-4489\tLab\t898794\tLABCSMP\tCANCER SOMATIC MUTATION PANEL -35954787\t-7074\t-6965\tLab\t898794\tLABCSMP\tCANCER SOMATIC MUTATION PANEL -22793877\t-3261\t-4837\tLab\t471944\tLABCBCD\tCBC WITH DIFF -40604146\t-7480\t-8730\tLab\t896082\t10355R\tHLA - MONITORING BY IGG -33765278\t-4255\t-622\tLab\t896082\t10355R\tHLA - MONITORING BY IGG -39004110\t-5750\t-4953\tLab\t472748\tLABYLEPTN\tLEPTIN -22910018\t-1862\t-621\tLab\t472785\tLABMGN\tMAGNESIUM, SERUM/PLASMA -22840955\t-9532\t-639\tLab\t472837\tLABTNI\tTROPONIN I -21479311\t-9844\t-5135\tLab\t473684\tLABMETB\tMETABOLIC PANEL, BASIC -19231504\t-1518\t-3744\tLab\t473684\tLABMETB\tMETABOLIC PANEL, BASIC -19007449\t-9542\t-4105\tLab\t473684\tLABMETB\tMETABOLIC PANEL, BASIC -1748206\t-1099\t-9890\tLab\t473766\tLABY25VD\tVITAMIN D, 25-HYDROXY -2794591\t-4038\t-6687\tLab\t473766\tLABY25VD\tVITAMIN D, 25-HYDROXY -3580354\t-2795\t-752\tLab\t473766\tLABY25VD\tVITAMIN D, 25-HYDROXY -3347071\t-6139\t-7104\tLab\t473766\tLABY25VD\tVITAMIN D, 25-HYDROXY -4464954\t-4591\t-1383\tLab\t473766\tLABY25VD\tVITAMIN D, 25-HYDROXY -3393444\t-5157\t-5537\tLab\t473766\tLABY25VD\tVITAMIN D, 25-HYDROXY -2658433\t-6894\t-211\tLab\t473766\tLABY25VD\tVITAMIN D, 25-HYDROXY """ DBUtil.insertFile(StringIO(dataTextStr), "stride_order_proc", delim="\t") # Deliberately design dates in far future to facilitate isolated testing dataTextStr = \ """order_proc_id\tline\tresult_time\tcommon_name\tbase_name\tord_num_value\tresult_flag\tresult_in_range_yn -4464954\t2\t5/28/2113 23:28\t25-HYDROXY D3\t25OHD3\t55\tNone\tNone -3580354\t2\t12/17/2113 0:40\t25-HYDROXY D3\t25OHD3\t49\tNone\tNone -3393444\t2\t10/9/2113 5:03\t25-HYDROXY D3\t25OHD3\t65\tNone\tNone -3347071\t2\t9/8/2113 22:10\t25-HYDROXY D3\t25OHD3\t2\tNone\tNone -2794591\t2\t3/19/2113 19:26\t25-HYDROXY D3\t25OHD3\t70\tNone\tNone -2658433\t2\t7/5/2111 0:28\t25-HYDROXY D3\t25OHD3\t45\tNone\tNone -1748206\t2\t7/3/2111 14:21\t25-HYDROXY D3\t25OHD3\t50\tNone\tNone -36668349\t1\t10/30/2111 7:23\tACETAMINOPHEN(ACETA)\tACETA\t7.7\tNone\tNone -33280031\t1\t11/29/2111 7:41\tACETAMINOPHEN(ACETA)\tACETA\t9999999\tNone\tNone -33197720\t1\t11/29/2111 15:22\tACETAMINOPHEN(ACETA)\tACETA\tNone\tNone\tNone -31823670\t1\t11/29/2111 14:08\tACETAMINOPHEN(ACETA)\tACETA\t5.4\tNone\tNone -31300455\t1\t11/29/2111 18:58\tACETAMINOPHEN(ACETA)\tACETA\t270.7\tNone\tNone -31237072\t1\t11/29/2111 5:45\tACETAMINOPHEN(ACETA)\tACETA\t50.6\tNone\tNone -30560253\t1\t11/29/2111 16:13\tACETAMINOPHEN(ACETA)\tACETA\t2.6\tNone\tNone -29966444\t1\t11/29/2111 2:27\tACETAMINOPHEN(ACETA)\tACETA\t4.2\tNone\tNone -29501223\t1\t11/29/2111 0:15\tACETAMINOPHEN(ACETA)\tACETA\t5.1\tNone\tNone -22793877\t4\t11/29/2111 14:36\tHEMATOCRIT(HCT)\tHCT\t19.7\tLow Panic\tNone -22793877\t3\t11/30/2111 7:36\tHEMOGLOBIN(HGB)\tHGB\t7\tLow Panic\tNone -40604146\t15\t12/13/2111 18:12\tINTERPRETATION/ COMMENTS CLASS II 9374R\t9374R\tNone\tNone\tNone -33765278\t10\t9/22/2112 20:26\tINTERPRETATION/ COMMENTS CLASS II 9374R\t9374R\t9999999\tNone\tNone -39004110\t1\t8/26/2112 15:07\tLEPTIN\tYLEPT1\t20\tNone\tNone -22910018\t1\t11/13/2112 8:18\tMAGNESIUM, SER/PLAS(MGN)\tMG\t2.1\tNone\tY -22793877\t6\t10/17/2112 1:09\tMCH(MCH)\tMCH\t31.7\tNone\tY -22793877\t7\t12/13/2112 2:54\tMCHC(MCHC)\tMCHC\t35.4\tNone\tY -22793877\t5\t11/11/2112 2:54\tMCV(MCV)\tMCV\t89.7\tNone\tY -22793877\t9\t1/30/2113 13:28\tPLATELET COUNT(PLT)\tPLT\t11\tLow\tNone -22793877\t2\t7/11/2113 23:24\tRBC(RBC)\tRBC\t2.2\tLow\tNone -22793877\t8\t1/27/2113 14:44\tRDW(RDW)\tRDW\t33.3\tHigh\tNone -21479311\t1\t8/31/2109 15:42\tSODIUM, SER/PLAS\tNA\t142\tNone\tNone -19231504\t1\t8/20/2109 12:22\tSODIUM, SER/PLAS\tNA\t134\tLow\tNone -19007449\t1\t9/13/2109 11:55\tSODIUM, SER/PLAS\tNA\t157\tHigh\tNone -38543619\t15\t10/23/2109 14:30\tTP53(GTP53)\tGTP53\t9999999\tNone\tNone -35954787\t15\t8/19/2109 16:39\tTP53(GTP53)\tGTP53\t9999999\tNone\tNone -22793877\t1\t9/25/2109 16:10\tWBC(WBC)\tWBC\t0.2\tLow Panic\tNone """ DBUtil.insertFile(StringIO(dataTextStr), "stride_order_results", delim="\t", dateColFormats={"result_time": None}) self.converter = STRIDEOrderResultsConversion()
def setUp(self): """Prepare state for test cases""" DBTestCase.setUp(self) log.info("Populate the database with test data") from stride.clinical_item.ClinicalItemDataLoader import ClinicalItemDataLoader ClinicalItemDataLoader.build_clinical_item_psql_schemata() self.clinicalItemCategoryIdStrList = list() headers = ["clinical_item_category_id", "source_table"] dataModels = \ [ RowItemModel( [-1, "Labs"], headers ), RowItemModel( [-2, "Imaging"], headers ), RowItemModel( [-3, "Meds"], headers ), RowItemModel( [-4, "Nursing"], headers ), RowItemModel( [-5, "Problems"], headers ), RowItemModel( [-6, "Lab Results"], headers ), ] for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item_category", dataModel) self.clinicalItemCategoryIdStrList.append(str(dataItemId)) headers = [ "clinical_item_id", "clinical_item_category_id", "name", "analysis_status" ] dataModels = \ [ RowItemModel( [-1, -1, "CBC",1], headers ), RowItemModel( [-2, -1, "BMP",0], headers ), # Clear analysis status, so this will be ignored unless changed RowItemModel( [-3, -1, "Hepatic Panel",1], headers ), RowItemModel( [-4, -1, "Cardiac Enzymes",1], headers ), RowItemModel( [-5, -2, "CXR",1], headers ), RowItemModel( [-6, -2, "RUQ Ultrasound",1], headers ), RowItemModel( [-7, -2, "CT Abdomen/Pelvis",1], headers ), RowItemModel( [-8, -2, "CT PE Thorax",1], headers ), RowItemModel( [-9, -3, "Acetaminophen",1], headers ), RowItemModel( [-10, -3, "Carvedilol",1], headers ), RowItemModel( [-11, -3, "Enoxaparin",1], headers ), RowItemModel( [-12, -3, "Warfarin",1], headers ), RowItemModel( [-13, -3, "Ceftriaxone",1], headers ), RowItemModel( [-14, -4, "Foley Catheter",1], headers ), RowItemModel( [-15, -4, "Strict I&O",1], headers ), RowItemModel( [-16, -4, "Fall Precautions",1], headers ), ] for dataModel in dataModels: (dataItemId, isNew) = DBUtil.findOrInsertItem("clinical_item", dataModel) dataTextStr = \ """item_collection_id;external_id;name;section;subgroup -1;-1;Test Order Set - 1;Meds;TreatmentMeds -2;-1;Test Order Set - 1;Meds;SymptomsMeds -3;-1;Test Order Set - 1;Labs;GeneralLabs -4;-2;Test Order Set - 2;Labs;GeneralLabs -5;-2;Test Order Set - 2;Imaging;Xrays -6;-2;Test Order Set - 2;Imaging;AdvancedImaging -7;-3;Test Order Set - 3;Imaging;GeneralImaging -8;-3;Test Order Set - 3;Nursing;GeneralNursing """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "item_collection", delim=";") dataTextStr = \ """item_collection_item_id;item_collection_id;clinical_item_id;collection_type_id -1;-1;-12;4 -2;-1;-13;4 -3;-2;-11;4 -4;-2;-10;4 -5;-3;-1;4 -6;-3;-2;4 -7;-4;-2;4 -8;-4;-3;4 -9;-5;-5;4 -10;-6;-6;4 -11;-6;-7;4 -12;-6;-8;4 -13;-7;-5;4 -14;-7;-6;4 -15;-8;-14;4 -16;-8;-15;4 """ # Parse into DB insertion object DBUtil.insertFile(StringIO(dataTextStr), "item_collection_item", delim=";") # Sample Prepared Validation File self.validationFileStr = \ """patient_id\tqueryItemCountByIdJSON\tverifyItemCountByIdJSON\tbaseItemId\tbaseItemDate\tqueryStartTime\tqueryEndTime\tverifyEndTime\toutcome.7 123\t{"-1": 1, "-2": 1, "-3": 1, "-4": 1, "-5": 1, "-8": 1, "-9": 1, "-10": 1, "-11": 1, "-12": 1, "-13": 1}\t{"-12": 1, "-14": 1, "-15": 1, "-16": 1}\t10\t2012-04-11 00:00:00\t2012-04-11 10:57:00\t2012-04-11 14:57:00\t2012-04-12 10:57:00\t0 456\t{"-2": 1, "-4": 2, "-8": 4, "-10": 1, "-12": 6}\t{"-12": 6, "-14": 7, "-16": 8}\t10\t2013-12-24 00:00:00\t2013-12-24 06:50:00\t2013-12-24 10:50:00\t2013-12-25 06:50:00\t1 789\t{"-1": 1, "-3": 3, "-5": 5, "-9": 9, "-10": 1, "-11": 11}\t{"-13": 13, "-15": 15}\t10\t2011-08-19 00:00:00\t2011-08-19 11:12:00\t2011-08-19 15:12:00\t2011-08-20 11:12:00\t0 """ self.analyzer = OrderSetRecommenderClassificationAnalysis()