Exemplo n.º 1
0
    def orderSetSearch(self, itemQuery, conn=None):
        """Look for clinical items based on specified query criteria"""
        extConn = True
        if conn is None:
            conn = self.connFactory.connection()
            extConn = False
        try:
            query = SQLQuery()
            query.addSelect("ic.item_collection_id")
            query.addSelect("ic.external_id")
            query.addSelect("ic.name as collection_name")
            query.addSelect("ic.section")
            query.addSelect("ic.subgroup")
            query.addSelect("ci.clinical_item_category_id")
            query.addSelect("ci.clinical_item_id")
            query.addSelect("ci.name")
            query.addSelect("ci.description")
            query.addFrom("item_collection as ic")
            query.addFrom("item_collection_item as ici")
            query.addFrom("clinical_item as ci")
            query.addWhere("ic.item_collection_id = ici.item_collection_id")
            query.addWhere("ici.clinical_item_id = ci.clinical_item_id")
            query.addWhereNotEqual("ic.section", AD_HOC_SECTION)
            if itemQuery.searchStr is not None:
                searchWords = itemQuery.searchStr.split()
                for searchWord in searchWords:
                    query.addWhereOp(
                        "ic.name", "~*",
                        "^%(searchWord)s|[^a-z]%(searchWord)s" %
                        {"searchWord": searchWord})
                    # Prefix search by regular expression
            if itemQuery.analysisStatus is not None:
                query.addWhereEqual("ci.analysis_status",
                                    itemQuery.analysisStatus)
            query.addOrderBy("lower(ic.name)")
            query.addOrderBy("ic.external_id")
            query.addOrderBy("lower(ic.section)")
            query.addOrderBy("lower(ic.subgroup)")
            query.addOrderBy("ci.clinical_item_id")
            query.addOrderBy("ci.name")
            dataTable = DBUtil.execute(query,
                                       includeColumnNames=True,
                                       conn=conn)
            dataModels = modelListFromTable(dataTable)

            # Aggregate up into order sets
            orderSetModel = None
            for row in dataModels:
                if orderSetModel is None or row[
                        "external_id"] != orderSetModel["external_id"]:
                    if orderSetModel is not None:
                        # Prior order set exists, yield/return it before preparing next one
                        yield orderSetModel
                    orderSetModel = \
                        {   "external_id": row["external_id"],
                            "name": row["collection_name"],
                            "itemList": list(),
                        }
                orderSetModel["itemList"].append(row)
            yield orderSetModel
            # Yield the last processed model
        finally:
            if not extConn:
                conn.close()
Exemplo n.º 2
0
    def signOrders(self,
                   userId,
                   patientId,
                   currentTime,
                   orderItemIds,
                   discontinuePatientOrderIds=None,
                   conn=None):
        """Commit new order item IDs for the given patient and starting now,
        and discontinue (set end date) for any existing orders specified.
        
        Record any patient state transitions the orders would trigger
        """
        extConn = True
        if conn is None:
            conn = self.connFactory.connection()
            extConn = False
        try:
            # Denormalized recording of current patient state to facilitate easy retrieval linked to orders later
            patientInfo = self.loadPatientInfo([patientId],
                                               currentTime,
                                               conn=conn)[0]
            stateId = patientInfo["sim_state_id"]
            postStateIdByItemId = patientInfo["postStateIdByItemId"]

            orderItemIdSet = set(orderItemIds)
            # Ensure unique and facilitate set operations

            insertDict = {
                "sim_user_id": userId,
                "sim_patient_id": patientId,
                "sim_state_id": stateId,
                "relative_time_start": currentTime
            }
            for itemId in orderItemIdSet:
                insertDict["clinical_item_id"] = itemId
                DBUtil.insertRow("sim_patient_order", insertDict, conn=conn)

            # See if any of these new orders triggered state transitions
            triggerItemIds = postStateIdByItemId.viewkeys() & orderItemIdSet
            while triggerItemIds:  # Found a trigger item
                triggerItemId = None
                if len(
                        triggerItemIds
                ) > 1:  # Found multiple. Weird. Arbitrarily act on the one that appeared first in the input list
                    for itemId in orderItemIds:
                        if itemId in triggerItemIds:
                            triggerItemId = itemId
                            break
                else:
                    triggerItemId = triggerItemIds.pop()
                postStateId = postStateIdByItemId[triggerItemId]

                # Record the state transition
                self.recordStateTransition(patientId,
                                           stateId,
                                           postStateId,
                                           currentTime,
                                           conn=conn)

                # Reload patientInfo to reflect new patient state
                patientInfo = self.loadPatientInfo([patientId],
                                                   currentTime,
                                                   conn=conn)[0]
                stateId = patientInfo["sim_state_id"]
                postStateIdByItemId = patientInfo["postStateIdByItemId"]

                orderItemIdSet.discard(triggerItemId)
                # Don't keep looking for this one, important to avoid infinite loop
                triggerItemIds = postStateIdByItemId.viewkeys(
                ) & orderItemIdSet

            if discontinuePatientOrderIds is not None:
                updateDict = {
                    "relative_time_end": currentTime
                }
                for patientOrderId in discontinuePatientOrderIds:
                    DBUtil.updateRow("sim_patient_order",
                                     updateDict,
                                     patientOrderId,
                                     conn=conn)
                # If order is discontinued/cancelled at the same (or before) time of entry,
                #   take that as a signal to cleanup and delete the record altogether
                #   (effectively there was no time at which the order was ever allowed to exist)
                deleteQuery = SQLQuery()
                deleteQuery.delete = True
                deleteQuery.addFrom("sim_patient_order")
                deleteQuery.addWhereEqual("sim_patient_id", patientId)
                deleteQuery.addWhere(
                    "relative_time_end <= relative_time_start")
                DBUtil.execute(deleteQuery, conn=conn)
        finally:
            conn.commit()
            if not extConn:
                conn.close()
Exemplo n.º 3
0
    def loadResults(self, patientId, relativeTime, conn=None):
        """Load all results active by the given relativeTime.
        Will look for sim_patient_state times and sim_patient_order for diagnostic orders,
        to extrapolate all state-specific results for each order, or using default values
        if no state specific ones available.
        """
        extConn = True
        if conn is None:
            conn = self.connFactory.connection()
            extConn = False
        try:
            # First query for all expected result labels and states, without state-specific values as
            #   may want outer join behvaior against default state values
            query = SQLQuery()

            query.addSelect("distinct spo.sim_state_id")
            query.addSelect("sr.sim_result_id")
            query.addSelect("sr.name")
            query.addSelect("sr.description")
            query.addSelect("sr.priority")
            query.addSelect("sr.group_string")

            query.addSelect("spo.relative_time_start")
            query.addSelect("sorm.turnaround_time")
            query.addSelect(
                "(spo.relative_time_start + sorm.turnaround_time) as result_relative_time"
            )

            query.addFrom("sim_patient_order as spo")
            query.addFrom("sim_order_result_map as sorm")
            query.addFrom("sim_result as sr")

            query.addWhere("spo.clinical_item_id = sorm.clinical_item_id")
            query.addWhere("sorm.sim_result_id = sr.sim_result_id")

            query.addWhereEqual("spo.sim_patient_id", patientId)
            # Only unlock results if appropiate prereq orders were placed in the past (and longer than the turnaround time)
            query.addWhereOp("spo.relative_time_start + sorm.turnaround_time",
                             "<=", relativeTime)
            # Also check that the triggering order was not cancelled before the completion of the turnaround time
            query.addWhere(
                "( spo.relative_time_end is null or spo.relative_time_start + sorm.turnaround_time <= spo.relative_time_end )"
            )

            query.addOrderBy("result_relative_time")
            query.addOrderBy("sr.priority")

            resultTable = DBUtil.execute(query,
                                         includeColumnNames=True,
                                         conn=conn)
            resultModels = modelListFromTable(resultTable)

            # Pass through results to get set of states to search for
            stateIds = set([DEFAULT_STATE_ID])
            # Include default state to fall back on
            for resultModel in resultModels:
                stateIds.add(resultModel["sim_state_id"])

            # Second query for state-specific values
            valueQuery = SQLQuery()
            valueQuery.addSelect("ssr.sim_state_id")
            valueQuery.addSelect("ssr.sim_result_id")
            valueQuery.addSelect("ssr.num_value")
            valueQuery.addSelect("ssr.num_value_noise")
            valueQuery.addSelect("ssr.text_value")
            valueQuery.addSelect("ssr.result_flag")
            valueQuery.addSelect("ssr.clinical_item_id")
            # Output clinical item if result flag means something
            valueQuery.addFrom("sim_state_result as ssr")
            valueQuery.addWhereIn("ssr.sim_state_id", stateIds)
            valueTable = DBUtil.execute(valueQuery,
                                        includeColumnNames=True,
                                        conn=conn)
            valueModels = modelListFromTable(valueTable)

            # Store in-memory dictionary for rapid cross-referencing "join" to result table
            valueModelByStateIdByResultId = dict()
            for valueModel in valueModels:
                resultId = valueModel["sim_result_id"]
                stateId = valueModel["sim_state_id"]
                if resultId not in valueModelByStateIdByResultId:
                    valueModelByStateIdByResultId[resultId] = dict()
                valueModelByStateIdByResultId[resultId][stateId] = valueModel

            # Now go back through original results and join up state-specific values, or use default values if needed
            resultValueModels = list()
            for resultModel in resultModels:
                resultId = resultModel["sim_result_id"]
                stateId = resultModel["sim_state_id"]
                if resultId in valueModelByStateIdByResultId:
                    valueModelByStateId = valueModelByStateIdByResultId[
                        resultId]
                    if stateId in valueModelByStateId:
                        # Have a state-specific value, populate that
                        valueModel = valueModelByStateId[stateId]
                        resultModel.update(valueModel)
                    elif DEFAULT_STATE_ID in valueModelByStateId:
                        # No state-specific value, but have a default one to populate instead
                        valueModel = valueModelByStateId[DEFAULT_STATE_ID]
                        resultModel.update(valueModel)
                    resultValueModels.append(resultModel)
                else:
                    # No result information available, even in default state. Skip these
                    #resultModel["num_value"] = None;
                    #resultModel["num_value_noise"] = None;
                    #resultModel["text_value"] = None;
                    #resultModel["result_flag"] = None;
                    #resultModel["clinical_item_id"] = None;
                    pass

            return resultValueModels
        finally:
            if not extConn:
                conn.close()
Exemplo n.º 4
0
    line = line.strip().split("\t")
    treatment_aggregation_map[line[0]] = line[1]
print(treatment_aggregation_map)

# Create SCRIPT_FILE
SCRIPT_FILE = StringIO()
SCRIPT_FILE.write("psql stride jwang198")

# Find all clinical_item_ids associated with each patient_id
# SELECT * FROM clinical_item WHERE clinical_item_category_id = 161;

DATA_QUERY = SQLQuery()
DATA_QUERY.addSelect("clinical_item_id")
DATA_QUERY.addSelect("description")
DATA_QUERY.addSelect("clinical_item_category_id")
DATA_QUERY.addWhereEqual("clinical_item_category_id", "161")
DATA_QUERY.addFrom("clinical_item")

print(DATA_QUERY)

# Write out data to CSV
DBUtil.runDBScript(SCRIPT_FILE, False)
results = DBUtil.execute(DATA_QUERY)

output = open("/Users/jwang/Desktop/Results/treatment_teams.csv", "w")
output.write(
    "clinical_item_id,description,clinical_item_category_id,treatment_team\n")

for line in results:
    treatment_team = "Unclassified"
    if line[1] in treatment_aggregation_map:
Exemplo n.º 5
0
    def copyPatientTemplate(self, patientData, templatePatientId, conn=None):
        """Create a new patient record based on the given template patient ID to copy from.
        Will copy shallow attributes, overridden by any provided in the given patientData,
        as well as any patient states, notes, or physician orders UP TO (and including)
        relative time zero, but not subsequent states, notes, or physician orders 
        (the latter is expected to reflect real user interaction records).
        """
        extConn = True
        if conn is None:
            conn = self.connFactory.connection()
            extConn = False
        try:
            templatePatientData = DBUtil.loadRecordModelById("sim_patient",
                                                             templatePatientId,
                                                             conn=conn)
            del templatePatientData["sim_patient_id"]
            # Remove prior ID to allow for new one
            templatePatientData.update(patientData)
            # Override with new content (if exists)
            DBUtil.insertRow("sim_patient", templatePatientData, conn=conn)
            # Create new patient record
            patientId = DBUtil.execute(DBUtil.identityQuery("sim_patient"),
                                       conn=conn)[0][0]

            # Copy initial template patient states
            query = SQLQuery()
            query.addSelect("*")
            # Copy all columns
            query.addFrom("sim_patient_state as sps")
            query.addWhereEqual("sps.sim_patient_id", templatePatientId)
            query.addWhereOp("relative_time_start", "<=", 0)
            query.addOrderBy("relative_time_start")
            dataTable = DBUtil.execute(query,
                                       includeColumnNames=True,
                                       conn=conn)
            dataModels = modelListFromTable(dataTable)
            nStates = len(dataModels)
            for i, dataModel in enumerate(dataModels):
                del dataModel["sim_patient_state_id"]
                # Discard copied ID to allow new one
                if i == nStates - 1:
                    del dataModel["relative_time_end"]
                    # Last state. Blank out end time to reflect open ended for simulation
                dataModel["sim_patient_id"] = patientId
                DBUtil.insertRow("sim_patient_state", dataModel, conn=conn)

            # Copy initial template orders
            query = SQLQuery()
            query.addSelect("*")
            query.addFrom("sim_patient_order as spo")
            query.addWhereEqual("sim_patient_id", templatePatientId)
            query.addWhereOp("relative_time_start", "<=", 0)
            query.addOrderBy("relative_time_start")
            dataTable = DBUtil.execute(query,
                                       includeColumnNames=True,
                                       conn=conn)
            dataModels = modelListFromTable(dataTable)
            for dataModel in dataModels:
                del dataModel["sim_patient_order_id"]
                dataModel["sim_patient_id"] = patientId
                DBUtil.insertRow("sim_patient_order", dataModel, conn=conn)

            conn.commit()
            # Transactional commit for multi-step process
            return patientId
        finally:
            if not extConn:
                conn.close()
Exemplo n.º 6
0
    def test_buildFeatureMatrix_multiFlowsheet(self):
        """
        Test buildFeatureMatrix and addFlowsheet.
        """
        # Verify FeatureMatrixFactory throws Error if patientEpisodeInput
        # has not been set.
        with self.assertRaises(ValueError):
            self.factory.processPatientEpisodeInput()

        # Initialize DB cursor.
        cursor = self.connection.cursor()

        # Build SQL query for list of patient episodes.
        patientEpisodeQuery = SQLQuery()
        patientEpisodeQuery.addSelect("CAST(pat_id AS bigint)")
        patientEpisodeQuery.addSelect("sop.order_proc_id AS order_proc_id")
        patientEpisodeQuery.addSelect("proc_code")
        patientEpisodeQuery.addSelect("order_time")
        patientEpisodeQuery.addSelect(
            "COUNT(CASE result_in_range_yn WHEN 'Y' THEN 1 ELSE null END) AS normal_results"
        )
        patientEpisodeQuery.addFrom("stride_order_proc AS sop")
        patientEpisodeQuery.addFrom("stride_order_results AS sor")
        patientEpisodeQuery.addWhere("sop.order_proc_id = sor.order_proc_id")
        patientEpisodeQuery.addWhereEqual("proc_code", "LABMETB")
        patientEpisodeQuery.addGroupBy(
            "pat_id, sop.order_proc_id, proc_code, order_time")
        patientEpisodeQuery.addOrderBy(
            "pat_id, sop.order_proc_id, proc_code, order_time")
        cursor.execute(str(patientEpisodeQuery), patientEpisodeQuery.params)

        # Set and process patientEpisodeInput.
        self.factory.setPatientEpisodeInput(cursor, "pat_id", "order_time")
        self.factory.processPatientEpisodeInput()
        resultEpisodeIterator = self.factory.getPatientEpisodeIterator()
        resultPatientEpisodes = list()
        for episode in resultEpisodeIterator:
            episode["pat_id"] = int(episode["pat_id"])
            episode["order_time"] = DBUtil.parseDateValue(
                episode["order_time"])
            resultPatientEpisodes.append(episode)

        # Verify results (note sort order).
        expectedPatientEpisodes = FM_TEST_OUTPUT[
            "test_processPatientEpisodeInput"]
        self.assertEqualList(resultPatientEpisodes, expectedPatientEpisodes)

        # Add flowsheet features.
        flowsheetNames = ["Resp", "FiO2", "Glasgow Coma Scale Score"]
        # Look for lab data 90 days before each episode, but never afterself.
        preTimeDelta = datetime.timedelta(-90)
        postTimeDelta = datetime.timedelta(0)
        self.factory.addFlowsheetFeatures(flowsheetNames, preTimeDelta,
                                          postTimeDelta)
        self.factory.buildFeatureMatrix()
        resultMatrix = self.factory.readFeatureMatrixFile()

        # Verify results.
        expectedMatrix = FM_TEST_OUTPUT[
            "test_buildFeatureMatrix_multiFlowsheet"]["expectedMatrix"]
        self.assertEqualTable(expectedMatrix, resultMatrix[2:], precision=5)

        try:
            os.remove(self.factory.getMatrixFileName())
        except OSError:
            pass
# clinical_item_category_id = 13 (Discharge)
# clinical_item_category_id = 23 (Admission)

SCRIPT_FILE = StringIO()
SCRIPT_FILE.write("psql stride jwang198")
DATA_QUERY = SQLQuery()

DATA_QUERY.addSelect("pat_anon_id")
DATA_QUERY.addSelect("shifted_transf_in_dt_tm")
DATA_QUERY.addSelect("department_in")
DATA_QUERY.addSelect("event_in")
DATA_QUERY.addSelect("shifted_transf_out_dt_tm")
DATA_QUERY.addSelect("event_out")
DATA_QUERY.addSelect("pat_enc_csn_anon_id")
DATA_QUERY.addWhereEqual("event_out = 'Discharge' OR event_in", 'Admission')

DATA_QUERY.addFrom("stride_adt")
print(DATA_QUERY)

# Write out data to CSV
DBUtil.runDBScript(SCRIPT_FILE, False)
results = DBUtil.execute(DATA_QUERY)

output = open("/Users/jwang/Desktop/Results/admission_discharge_flow.csv", "w")
output.write(
    "patient_id,admission_time,department,admission,discharge_time,discharge,encounter_id\n"
)

for line in results:
    output.write("{0},{1},{2},{3},{4},{5},{6}\n".format(
Exemplo n.º 8
0
# H&P Note

SCRIPT_FILE = StringIO()
SCRIPT_FILE.write("psql stride jwang198")
DATA_QUERY = SQLQuery()

DATA_QUERY.addSelect("author_name")
DATA_QUERY.addSelect("pat_id")
DATA_QUERY.addSelect("note_date")
DATA_QUERY.addSelect("note_type")
DATA_QUERY.addSelect("provider_type")
DATA_QUERY.addSelect("specialty")
DATA_QUERY.addSelect("pat_enc_csn_id")

DATA_QUERY.addFrom("stride_note")
DATA_QUERY.addWhereEqual("note_type", "H&P")
DATA_QUERY.addOrderBy("author_name", dir="asc")
print(DATA_QUERY)

# Write out data to CSV
DBUtil.runDBScript(SCRIPT_FILE, False)
results = DBUtil.execute(DATA_QUERY)

output_hp = open("/Users/jwang/Desktop/Results/physician_patient_map_hp.csv",
                 "w")
output_hp.write(
    "clinician_name,patient_id,note_date,note_type,provider_type,specialty,encounter_id\n"
)

for line in results:
    if (line[4] == "Medical Student"
# Create SCRIPT_FILE
SCRIPT_FILE = StringIO()
SCRIPT_FILE.write("psql stride jwang198")

# Load in top patient count diagnoses (we will only include these as binary covariates)
# select *
# from clinical_item
# where clinical_item_category_id = 2
# order by patient_count desc
DATA_QUERY = SQLQuery()
DATA_QUERY.addSelect("clinical_item_id")
DATA_QUERY.addSelect("name")
DATA_QUERY.addSelect("description")
DATA_QUERY.addSelect("patient_count")
DATA_QUERY.addFrom("clinical_item")
DATA_QUERY.addWhereEqual("patient_count > 0 and clinical_item_category_id", 2)
top_admission_diagnoses_f = open(
    "/Users/jwang/Desktop/Results/top_admission_diagnoses.csv", "w")
top_admission_diagnoses = {}
top_admission_diagnoses_string = []

DBUtil.runDBScript(SCRIPT_FILE, False)
results = DBUtil.execute(DATA_QUERY)
top_admission_diagnoses_f.write(
    "clinical_item_id,name,description,patient_count\n")
for line in results:
    top_admission_diagnoses_f.write("{0},{1},{2},{3}\n".format(
        line[0], line[1], line[2], line[3]))
    top_admission_diagnoses[line[0]] = line[2]
    top_admission_diagnoses_string.append(str(line[0]))
top_admission_diagnoses_f.close()
DATA_QUERY = SQLQuery()

# From patient_item or clinical_item
DATA_QUERY.addSelect("patient_id")
DATA_QUERY.addSelect("clinical_item_category_id")
DATA_QUERY.addSelect('name')
DATA_QUERY.addSelect("description")

# Join
DATA_QUERY.addFrom("patient_item")
DATA_QUERY.addJoin(
    "clinical_item",
    "patient_item.clinical_item_id = clinical_item.clinical_item_id",
    joinType="INNER")
DATA_QUERY.addWhereEqual("clinical_item_category_id = 161 AND description",
                         "Tt Med Univ (Primary)")  # Everyone
#DATA_QUERY.addWhereEqual("clinical_item_category_id = 161 AND description", "Tt Pamf Med (Primary)") # Expert

DATA_QUERY.addOrderBy("patient_id", dir="ASC")

print(DATA_QUERY)

# Write out data to CSV
DBUtil.runDBScript(SCRIPT_FILE, False)
results = DBUtil.execute(DATA_QUERY)

unique_patient_ids = {}

# output = open("/Users/jwang/Desktop/expert.csv", "w")
# outlist = open("/Users/jwang/Desktop/expert_list.csv", "w")
output = open("/Users/jwang/Desktop/everyone.csv",