def read_multi_message(id_list): db.engine.dispose() result = db.engine.execute(text("select record from records where id = ANY(:recids)"), recids = id_list) msg_handler = MessageHandler() for row in result: msg_handler.add_message(row[0], "output_queue")
def read_message(recordid): db.engine.dispose() result = db.engine.execute(text("select record from records where id = :recid"), recid = recordid) msg_handler = MessageHandler() for row in result: msg_handler.add_message(row[0], "output_queue")
def messages(): if request.method == 'POST': message = request.data msg_handler = MessageHandler() msg_handler.add_message(message, "read_ids") return "Message added: {0}".format(message) else: msg_handler = MessageHandler() message = msg_handler.get_message("read_ids") return "Message retrieved: {0}".format(message)
def simple_read(self): start_time = datetime.datetime.now() sql = "select id, record from records" result = db_connection.execute(sql) msg_handler = MessageHandler() for row in result: msg_handler.add_message(row[1], "output_queue") end_time = datetime.datetime.now() time_taken = (end_time - start_time).total_seconds() return time_taken
def add_batch_ids_to_queue(record_ids, batch_size): batches = [] for idx, cur_id in enumerate(record_ids): if idx % batch_size == 0: if idx == 0: some_ids = [] else: batches.append(some_ids) some_ids = [] some_ids.append(cur_id) msg_handler = MessageHandler() for batch in batches: msg_handler.add_message(json.dumps({"ids":batch}), "input_queue")
def multi_proc3(self, batch): start_time = datetime.datetime.now() sql = "select count(id) from records" count_result = db_connection.execute(sql) for row in count_result: count = row[0] break sql = "select id from records" result = db_connection.execute(sql) record_ids = [] for idx, row in enumerate(result): if (idx % int(batch) == 0) or (idx == count - 1): if idx == 0: some_records = [] else: record_ids.append(some_records) some_records = [] some_records.append(row[0]) #Add id messages to input queue msg_handler = MessageHandler() for records in record_ids: msg_handler.add_message(json.dumps({"ids":records}), "input_queue") worker_results = [] p = Pool(4) for i in range(4): worker_results.append(p.apply_async(read_id_from_queue, ())) p.close() for r in worker_results: r.get() p.join() # This blocks until all the processes have finished end_time = datetime.datetime.now() time_taken = (end_time - start_time).total_seconds() return time_taken
def add_ids_to_queue(record_ids): msg_handler = MessageHandler() msg_handler.add_message(json.dumps({"ids":record_ids}), "input_queue")