def loadReleaseResults(release, genomeToId, divToId, evalueToId, geneToId, resultsGen): ''' resultsGen: a generator that yields ((qdb, sdb, div, evalue), orthologs) tuples. convert the results into a rows, and insert them into the results table. `id` int(10) unsigned NOT NULL auto_increment, `query_db` smallint(5) unsigned NOT NULL, `subject_db` smallint(5) unsigned NOT NULL, `divergence` tinyint(3) unsigned NOT NULL, `evalue` tinyint(3) unsigned NOT NULL, `filename` text, `mod_time` datetime default NULL, `orthologs` longblob, `num_orthologs` int(10) unsigned NOT NULL, ''' def convertForDb(result): # convert various items into the form the database table wants. Change strings into database ids. Encode orthologs, etc. (qdb, sdb, div, evalue), orthologs = result qdbId = genomeToId[qdb] sdbId = genomeToId[sdb] divId = divToId[div] evalueId = evalueToId[evalue] dbOrthologs = [(geneToId[qid], geneToId[sid], float(dist)) for qid, sid, dist in orthologs] # orthologs using db ids and floats, not strings. encodedOrthologs = encodeOrthologs(dbOrthologs) numOrthologs = len(orthologs) return qdbId, sdbId, divId, evalueId, encodedOrthologs, numOrthologs numPerGroup = 400 # not too huge, not too slow. sql1 = 'INSERT IGNORE INTO {} (query_db, subject_db, divergence, evalue, mod_time, orthologs, num_orthologs) VALUES '.format(releaseTable(release, 'results')) for i, group in enumerate(util.groupsOfN(resultsGen, numPerGroup)): sql = sql1 + ', '.join(['(%s, %s, %s, %s, NOW(), %s, %s) ' for j in range(len(group))]) # cannot just use numPerGroup, b/c last group can have fewer results. argsLists = [convertForDb(result) for result in group] args = list(itertools.chain.from_iterable(argsLists)) # flatten args into one long list for the sql with connCM() as conn: dbutil.insertSQL(conn, sql, args=args)
def put(self, key, value): encodedKey = json.dumps(key) encodedValue = json.dumps(value) with self.connect() as conn: with dbutil.doTransaction(conn): sql = 'INSERT INTO ' + self.table + ' (name, value) VALUES (%s, %s) ON DUPLICATE KEY UPDATE value=%s' return dbutil.insertSQL(conn, sql, args=[encodedKey, encodedValue, encodedValue])
def put(self, key, value): encodedKey = json.dumps(key) encodedValue = json.dumps(value) with self.manager as conn: with dbutil.doTransaction(conn): sql = 'INSERT INTO ' + self.table + ' (name, value) VALUES (%s, %s) ON DUPLICATE KEY UPDATE value=%s' return dbutil.insertSQL( conn, sql, args=[encodedKey, encodedValue, encodedValue])
def send(self, message, timeout=None): ''' timeout: if None, the default read lock timeout is used. if not None, this is the number of seconds before the read lock on this message expires. ''' if timeout is None: timeout = self.timeout sql = 'INSERT INTO message_queue (queue, message, timeout) VALUES (%s, %s, %s)' with self.manager as conn: with dbutil.doTransaction(conn): return dbutil.insertSQL(conn, sql, args=[self.queue, message, timeout])