Esempio n. 1
0
    def commitUpdateBuffer(self, updateBuffer, linkedItemIdsByBaseId, conn=None):
        """Take data accumulated in updateBuffer from prior update methods and
        commit them as incremental changes to the database.
        Clear buffer thereafter.
        """
        extConn = conn is not None;
        if not extConn:
            conn = self.connFactory.connection();
        try:
            if "incrementDataByItemIdPair" in updateBuffer:
                # Ensure baseline records exist to facilitate subsequent incremental update queries
                itemIdPairs = list(updateBuffer["incrementDataByItemIdPair"].keys());
                self.prepareItemAssociations(itemIdPairs, linkedItemIdsByBaseId, conn);

                # Construct incremental update queries based on each item pair's incremental counts/sums
                nItemPairs = len(itemIdPairs);
                log.debug("Primary increment updates for %d item pairs" % nItemPairs );
                incrementProg = ProgressDots(name="Increments");
                incrementProg.total = nItemPairs;
                cursor = conn.cursor();
                try:
                    for (itemIdPair, incrementData) in updateBuffer["incrementDataByItemIdPair"].items():
                        query = ["UPDATE clinical_item_association SET"];
                        for col, increment in incrementData.items():
                            query.append("%(col)s=%(col)s+%(increment)s" % {"col":col,"increment":increment});
                            query.append(",");
                        query.pop();    # Drop extra comma at end of list
                        query.append("WHERE clinical_item_id=%(p)s AND subsequent_item_id=%(p)s" % {"p":DBUtil.SQL_PLACEHOLDER} );
                        query = str.join(" ", query);
                        itemIdPair = eval(itemIdPair)
                        cursor.execute(query, itemIdPair);
                        incrementProg.update();
                    # incrementProg.printStatus();
                finally:
                    cursor.close();

            if "analyzedPatientItemIds" in updateBuffer:
                # Record analysis date for the given patient items
                patientItemIdSet = updateBuffer["analyzedPatientItemIds"];
                nItems = len(patientItemIdSet);
                log.debug("Record %d analyzed items" % nItems );
                if nItems > 0:
                    paramList = [datetime.now()];
                    updateSize = 0;
                    for itemId in patientItemIdSet:
                        paramList.append(itemId);
                        updateSize += 1;

                        if self.itemsPerUpdate is not None and updateSize > self.itemsPerUpdate:
                            # Update what we have so far to avoid excessive single mass query that may overwhelm database timeout
                            DBUtil.execute \
                            (   """update patient_item
                                set analyze_date = %(p)s
                                where patient_item_id in (%(pList)s)
                                and analyze_date is null
                                """ % {"p": DBUtil.SQL_PLACEHOLDER, "pList":generatePlaceholders(updateSize)},
                                tuple(paramList),
                                conn=conn
                            );
                            # Reset item list parameters
                            paramList = [datetime.now()];
                            updateSize = 0;
                    # Final Update
                    DBUtil.execute \
                    (   """update patient_item
                        set analyze_date = %(p)s
                        where patient_item_id in (%(pList)s)
                        and analyze_date is null
                        """ % {"p": DBUtil.SQL_PLACEHOLDER, "pList":generatePlaceholders(updateSize)},
                        tuple(paramList),
                        conn=conn
                    );

            # Flag that any cached association metrics will be out of date
            self.dataManager.clearCacheData("analyzedPatientCount");
            self.dataManager.clearCacheData("clinicalItemCountsUpdated");

            # Database commit
            conn.commit();

            # Wipe out buffer to reflect incremental changes done, so any new ones should be recorded fresh
            updateBuffer.clear();
            updateBuffer["nAssociations"] = 0;
        finally:
            if not extConn:
                conn.close();
Esempio n. 2
0
from medinfo.db import DBUtil;
from medinfo.db.Model import SQLQuery;
from medinfo.common.Util import ProgressDots;
from medinfo.common.Util import log;

conn = DBUtil.connection();
try:
    results = DBUtil.execute("select clinical_item_id from clinical_item where clinical_item_category_id = 161",conn=conn);
    clinicalItemIds = tuple([row[0] for row in results]);
    log.info("Deleting for %s Clinical Items" % len(clinicalItemIds) );

    query = SQLQuery();
    query.addSelect("patient_item_id");
    query.addFrom("patient_item");
    query.addWhereIn("clinical_item_id", clinicalItemIds );
    
    prog = ProgressDots();
    prog.total = DBUtil.execute(query.totalQuery(), conn=conn)[0][0];
    
    # Go ahead and load full result set into memory, so don't have potential concurrency issues with deleting items as traversing them
    results = DBUtil.execute(query, conn=conn );
    for row in results:
        patientItemId = row[0];
        DBUtil.execute("delete from patient_item where patient_item_id = %s", (patientItemId,), conn=conn);
        prog.update();
    prog.printStatus();
finally:
    conn.close();