def archive_prices(): log = common.get_logger() log.info('Archiving Prices..') conn = common.get_connection() trans = conn.begin() try: #Insert new users conn.execute(""" INSERT INTO priceHistory (entity_id, price) SELECT entity.id, entity.price FROM entity WHERE (SELECT count(price) FROM pricehistory WHERE entity_id=entity.id)=0 AND price IS NOT NULL """) #Update existing users conn.execute("""INSERT INTO priceHistory (entity_id, price) SELECT entity.id, entity.price FROM entity WHERE entity.price != (SELECT price FROM priceHistory WHERE entity_id=entity.id ORDER BY timestamp DESC LIMIT 1)""") trans.commit() except: trans.rollback() raise conn.close() log.info('..done.')
def archive_values(): log = common.get_logger() log.info('Archiving values..') conn = common.get_connection() trans = conn.begin() try: #Insert new users conn.execute(""" INSERT INTO ValueHistory (user_id, value) (select id, value from User where (select count(value) from ValueHistory where user_id=User.id)=0 and value is not NULL) """) #Update existing users conn.execute("""INSERT INTO ValueHistory (user_id, value) SELECT User.id, User.value FROM User WHERE User.value != (SELECT value FROM ValueHistory WHERE user_id=User.id ORDER BY timestamp DESC LIMIT 1)""") trans.commit() except: trans.rollback() raise conn.close() log.info('..done.')
def get_because_of_hf(id_list, visit_list, path): conn = common.get_connection() readmission_dict = dict() with conn.cursor() as cursor: sql = "select patient_id, max(visit_id), diagnosis_desc from diagnosis where diagnosis_type='A' and " \ "REGEXP_LIKE(diagnosis_desc,'心|尖瓣|房颤|冠脉|冠状动脉|动脉|早搏') group by patient_id,diagnosis_desc" for row in cursor.execute(sql): if row[0] not in readmission_dict: if row[0] in id_list and row[1] in visit_list: readmission_dict[row[0]] = 1 with open(path, 'r', encoding='UTF-8', newline="") as file: csv_reader = csv.reader(file) lines = [] for i, line in enumerate(csv_reader): if i == 0: line.append("是否心源性再入院") else: patient_id = line[0] line.append(str(readmission_dict.get(patient_id, -1))) lines.append(line) with open(path, 'w', encoding='UTF-8', newline="") as file: writer = csv.writer(file) writer.writerows(lines) return readmission_dict
def get_readmission(path, patient_id_list=None): conn = common.get_connection() visit_dict = dict() discharged_time = dict() with conn.cursor() as cursor: # sql = "select patient_id, max(visit_id) from pat_visit where visit_id>=2 group by patient_id " # sql = "select patient_id,max(visit_id),discharge_date_time from pat_visit group by patient_id, discharge_date_time" sql = "select patient_id,max(visit_id),discharge_date_time from pat_visit where visit_id>=2 group by patient_id, discharge_date_time" for row in cursor.execute(sql): visit_dict[row[0]] = row[1] discharged_time[row[0]] = row[2] if patient_id_list is None: pass else: selected_dict = dict() for item in patient_id_list: selected_dict[item] = str(visit_dict[item]) visit_dict = selected_dict with open(path, 'w+', encoding='utf-8-sig', newline="") as file: matrix_to_write = [] csv_write = csv.writer(file) head = ['pat_id', 'visit_id', 'discharged_time'] matrix_to_write.append(head) for patient_id in visit_dict: line = list() line.append(patient_id) line.append(visit_dict[patient_id]) line.append(discharged_time[patient_id]) matrix_to_write.append(line) csv_write.writerows(matrix_to_write) return visit_dict
def lambda_handler(event, context): connection_id = event["requestContext"].get("connectionId") data = json.loads(event['body'])['data'] room = data['room'] token = data.get('token') user = get_user(token) if user: previous_joined_room_ids = [] connection = get_connection(connection_id) if connection: previous_joined_room_ids = connection['rooms'] delete_connection_from_rooms(event, connection_id, user, [room['id']]) room_ids = [ r_id for r_id in previous_joined_room_ids if r_id != room['id'] ] # save connection - {'user':{}, 'rooms':[]} save_connection(connection_id, user, room_ids) # save user - {'connections':[]} # save_user(connection_id, user['id']) # TODO: client shouldn't see other user's connections res = {"name": "left room", "data": {"roomId": room['id']}} return {'statusCode': 200, 'body': json.dumps(res)} else: return {'statusCode': 400, 'body': json.dumps('not logged in!')}
def run_transactions(): logging.basicConfig(level=logging.DEBUG) conn = common.get_connection() cursor = conn.cursor() while True: run_transaction_iteration(conn, cursor) time.sleep(5) logging.info('Sleeping for 5 seconds..') cursor.close() conn.close()
def get_diuretic(id_list, mapping_file, path): conn = common.get_connection() diuretic_name_map = dict() with open(mapping_file, 'r', encoding='gbk', newline="") as file: csv_reader = csv.reader(file) for line in csv_reader: for i in range(1, len(line)): if len(line[i]) <= 1: continue diuretic_name_map[line[i]] = line[0] diuretic_dict = dict() for patient_id in id_list: diuretic_dict[patient_id] = {'Potassium diuretic': 0, 'Urine diuretic': 0, 'Thiazide diuretic': 0, 'Receptor antagonist': 0} cursor = conn.cursor() sql = "select patient_id, order_text from orders temp1 where order_class = 'A' and " \ "temp1.visit_id < (select temp2.maxid from (select patient_id, max(visit_id) as maxid " \ "from orders group by patient_id) temp2 where temp1.patient_id = temp2.patient_id) " for row in cursor.execute(sql): patient_id, order_text = row if patient_id in id_list: for item in diuretic_name_map: if order_text is not None and item in order_text: normalized_name = diuretic_name_map[item] if normalized_name == '保钾利尿剂': diuretic_dict[patient_id]['Potassium diuretic'] = 1 if normalized_name == '袢利尿剂': diuretic_dict[patient_id]['Urine diuretic'] = 1 if normalized_name == '噻嗪类利尿剂': diuretic_dict[patient_id]['Thiazide diuretic'] = 1 if normalized_name == '受体拮抗剂': diuretic_dict[patient_id]['Receptor antagonist'] = 1 with open(path, 'r', encoding='ISO-8859-1', newline="") as file: csv_reader = csv.reader(file) lines = [] for i, line in enumerate(csv_reader): if i == 0: line.append("Potassium diuretic") line.append("Urine diuretic") line.append("Thiazide diuretic") line.append("Receptor antagonist") else: patient_id = line[0] result = diuretic_dict.get(patient_id, [-1, -1, -1, -1]) if isinstance(result, dict): result = [result['Potassium diuretic'], result['Urine diuretic'], result['Thiazide diuretic'], result['Receptor antagonist']] line.extend(result) lines.append(line) with open(path, 'w', encoding='ISO-8859-1', newline="") as file: writer = csv.writer(file) writer.writerows(lines) return diuretic_dict
def get_beta(id_list, mapping_file, path): conn = common.get_connection() beta_name_map = dict() with open(mapping_file, 'r', encoding='gbk', newline="") as file: csv_reader = csv.reader(file) for line in csv_reader: for i in range(1, len(line)): if len(line[i]) <= 1: continue beta_name_map[line[i]] = line[0] beta_dict = dict() for patient_id in id_list: beta_dict[patient_id] = {'Metoprolol': 0, 'Bisoprol': 0, 'Carvedilol': 0} cursor = conn.cursor() sql = "select patient_id, order_text from orders temp1 where order_class = 'A' and " \ "temp1.visit_id < (select temp2.maxid from (select patient_id, max(visit_id) as maxid " \ "from orders group by patient_id) temp2 where temp1.patient_id = temp2.patient_id) " for row in cursor.execute(sql): patient_id, order_text = row if patient_id in id_list: for item in beta_dict: if order_text is not None and item in order_text: normalized_name = beta_name_map[item] if normalized_name == '美托洛尔': beta_dict[patient_id]['Metoprolol'] = 1 if normalized_name == '比索洛尔': beta_dict[patient_id]['Bisoprol'] = 1 if normalized_name == '卡维地洛': beta_dict[patient_id]['Carvedilol'] = 1 with open(path, 'r', encoding='ISO-8859-1', newline="") as file: csv_reader = csv.reader(file) lines = [] for i, line in enumerate(csv_reader): if i == 0: line.append("Metoprolol") line.append("Bisoprol") line.append("Carvedilol") else: patient_id = line[0] result = beta_dict.get(patient_id, [-1, -1, -1, -1]) if isinstance(result, dict): result = [result['Metoprolol'], result['Bisoprol'], result['Carvedilol']] line.extend(result) lines.append(line) with open(path, 'w', encoding='ISO-8859-1', newline="") as file: writer = csv.writer(file) writer.writerows(lines) return beta_dict
def calculate_price_changes(): log = common.get_logger() log.info('Calculating price changes..') conn = common.get_connection() trans = conn.begin() try: conn.execute("truncate PriceChange") conn.execute("SELECT id FROM Entity") users = conn.fetchall() for user in users: eid = user[0] change = calculate_price_change(conn, eid, "INTERVAL 1 DAY") if change is None: change = 0 insert_price_change(conn, eid, "1D", change) change = calculate_price_change(conn, eid, "INTERVAL 7 DAY") if change is None: change = 0 insert_price_change(conn, eid, "7D", change) change = calculate_price_change(conn, eid, "INTERVAL 1 MONTH") if change is None: change = 0 insert_price_change(conn, eid, "1M", change) change = calculate_price_change(conn, eid, "INTERVAL 3 MONTH") if change is None: change = 0 insert_price_change(conn, eid, "3M", change) change = calculate_price_change(conn, eid, "INTERVAL 6 MONTH") if change is None: change = 0 insert_price_change(conn, eid, "6M", change) change = calculate_price_change(conn, eid, "INTERVAL 1 YEAR") if change is None: change = 0 insert_price_change(conn, eid, "1Y", change) trans.commit() except: trans.rollback() raise conn.close() log.info('..done.')
def get_pharmacy(id_list, mapping_file, path): conn = common.get_connection() angiotensin_name_map = dict() with open(mapping_file, 'r', encoding='gbk', newline='') as file: csv_reader = csv.reader(file) for line in csv_reader: for i in range(1, len(line)): if len(line[i]) <= 1: continue angiotensin_name_map[line[i]] = line[0] #最后要返回的是angiotensin_dict angiotensin_dict = dict() for patient_id in id_list: angiotensin_dict[patient_id] = {'ACEI': 0, 'ARB': 0, 'ARNI': 0} cursor = conn.cursor() #开始修改emmm sql = "select patient_id, order_text from orders temp1 where order_class = 'A' and temp1.visit_id < (select temp2.maxid from (select patient_id, max(visit_id) as maxid from orders group by patient_id) temp2 where temp1.patient_id = temp2.patient_id) " for row in cursor.execute(sql): patient_id, order_text = row if patient_id in id_list: for item in angiotensin_name_map: if order_text is not None and item in order_text: normalized_name = angiotensin_name_map[item] if normalized_name == 'ACEI': angiotensin_dict[patient_id]['ACEI'] = 1 if normalized_name == 'ARB': angiotensin_dict[patient_id]['ARB'] = 1 if normalized_name == 'ARNI': angiotensin_dict[patient_id]['ARNI'] = 1 with open(path, 'r', encoding='utf-8', newline="") as file: csv_reader = csv.reader(file) lines = [] for i, line in enumerate(csv_reader): if i == 0: line.append("ACEI") line.append("ARB") line.append("ARNI") else: patient_id = line[0] result = angiotensin_dict.get(patient_id, [-1, -1, -1]) if isinstance(result, dict): result = [result['ACEI'], result['ARB'], result['ARNI']] line.extend(result) lines.append(line) with open(path, 'w', encoding='UTF-8', newline="") as file: writer = csv.writer(file) writer.writerows(lines) return angiotensin_dict
def calculate_value_changes(): log = common.get_logger() log.info('Calculating value changes..') conn = common.get_connection() trans = conn.begin() try: conn.execute("DELETE FROM ValueChange where 1=1") conn.execute("SELECT id, value FROM User") users = conn.fetchall() for user in users: uid = user[0] change = calculate_value_change(conn, uid, "INTERVAL 1 DAY") if change is None: change = 0 insert_value_change(conn, uid, "1D", change) change = calculate_value_change(conn, uid, "INTERVAL 7 DAY") if change is None: change = 0 insert_value_change(conn, uid, "7D", change) change = calculate_value_change(conn, uid, "INTERVAL 1 MONTH") if change is None: change = 0 insert_value_change(conn, uid, "1M", change) change = calculate_value_change(conn, uid, "INTERVAL 3 MONTH") if change is None: change = 0 insert_value_change(conn, uid, "3M", change) change = calculate_value_change(conn, uid, "INTERVAL 6 MONTH") if change is None: change = 0 insert_value_change(conn, uid, "6M", change) change = calculate_value_change(conn, uid, "INTERVAL 1 YEAR") if change is None: change = 0 insert_value_change(conn, uid, "1Y", change) trans.commit() except: trans.rollback() raise conn.close() log.info('..done.')
def calc_prices(include_untouched=False): log = common.get_logger() log.info('Calculating Prices..') conn = common.get_connection() trans = conn.begin() try: sql = """update Entity set price = (select (sum(quantity * cost)/sum(quantity)) from Shares where entity_id = Entity.id ), touched = 0 where Entity.touched=1""" if include_untouched: sql += ' or Entity.touched=0' conn.execute(sql) trans.commit() except: trans.rollback() raise conn.close() log.info('..done.')
def lambda_handler(event, context): connection_id = event["requestContext"].get("connectionId") data = json.loads(event['body'])['data'] rooms = data['rooms'] room_ids = [r['id'] for r in rooms] token = data.get('token') get_chat_history = data.get('getChatHistory') user = get_user(token) if user: previous_joined_room_ids = [] connection = get_connection(connection_id) if connection: previous_joined_room_ids = connection['rooms'] # join room if not already in joined_rooms = {} for room in rooms: room_info = join_room(connection_id, user, room['id'], room['type'], event) if get_chat_history: room_info['chatHistory'] = get_room_messages(room['id']) joined_rooms[room['id']] = room_info # leave room not in payload for room_id in previous_joined_room_ids: if not (room_id in room_ids): delete_connection_from_rooms(event, connection_id, user, [room_id]) # save connection - {'user':{}, 'rooms':[]} save_connection(connection_id, user, room_ids) # save user - {'connections':[]} # save_user(connection_id, user['id']) # TODO: client shouldn't see other user's connections res = {"name": "room info", "data": joined_rooms} return {'statusCode': 200, 'body': json.dumps(res)} else: return {'statusCode': 400, 'body': json.dumps('not logged in!')}
def calculate_points_changes(): log = common.get_logger() log.info('Calculating points changes..') conn = common.get_connection() trans = conn.begin() try: trans.execute("DELETE FROM PointsChange where 1=1") result = conn.execute("SELECT id FROM Entity") users = result.fetchall() for user in users: eid = user[0] change = calculate_price_change(conn, eid, "INTERVAL 1 DAY") if change is not None: insert_price_change(conn, eid, "1D", change) change = calculate_price_change(conn, eid, "INTERVAL 7 DAY") if change is not None: insert_price_change(conn, eid, "7D", change) change = calculate_price_change(conn, eid, "INTERVAL 3 MONTH") if change is not None: insert_price_change(conn, eid, "3M", change) change = calculate_price_change(conn, eid, "INTERVAL 6 MONTH") if change is not None: insert_price_change(conn, eid, "6M", change) change = calculate_price_change(conn, eid, "INTERVAL 1 YEAR") if change is not None: insert_price_change(conn, eid, "1Y", change) trans.commit() except: trans.rollback() raise conn.close() log.info('..done.')
def calculate_values(): log = common.get_logger() log.info('Calculating Values..') conn = common.get_connection() trans = conn.begin() try: #Step 1: Calculate all prices conn.execute("""update User set value = (( select sum(Shares.quantity * Entity.price) from Shares inner join Entity on Entity.id = entity_id where Shares.user_id=User.id) + User.cash), valueTouched = 0 where User.valueTouched=1""") #Step 2: Set all null prices equal to cash, i.e. for users who don't own shares. conn.execute("""UPDATE User set User.value = User.cash where (select count(id) from Shares where Shares.user_id=User.id)=0 """) trans.commit() except: trans.rollback() raise conn.close() log.info('..done.')
def get_discharge_date(id_list, visit_list, path): conn = common.get_connection() admission_date_dict = dict() with conn.cursor() as cursor: sql = "select patient_id,max(visit_id),discharge_date_time from pat_visit group by patient_id, discharge_date_time" for row in cursor.execute(sql): if row[0] not in admission_date_dict: if row[0] in id_list and row[1] in visit_list: admission_date_dict[row[0]] = row[2] with open(path, 'r', encoding='ISO-8859-1', newline="") as file: csv_reder = csv.reader(file) lines = [] for i, line in enumerate(csv_reder): if i == 0: line.append("discharge_date_time") else: patient_id = line[0] line.append(str(admission_date_dict.get(patient_id, -1))) lines.append(line) with open(path, 'w', encoding='ISO-8859-1', newline="") as file: writer = csv.writer(file) writer.writerows(lines) return admission_date_dict
def lambda_handler(event, context): # called by chatbox not injection script # so it's ok to include chat history connection_id = event["requestContext"].get("connectionId") data = json.loads(event['body'])['data'] room = data['room'] token = data.get('token') user = get_user(token) if user: previous_joined_room_ids = [] newly_joined_rooms = {} connection = get_connection(connection_id) if connection: previous_joined_room_ids = connection['rooms'] room_info = join_room(connection_id, user, room['id'], room['type'], event) room_info['chatHistory'] = get_room_messages(room['id']) newly_joined_rooms[room['id']] = room_info # save connection - {'user':{}, 'rooms':[]} save_connection(connection_id, user, list(set(previous_joined_room_ids + [room['id']]))) # save user - {'connections':[]} # save_user(connection_id, user['id']) # TODO: client shouldn't see other user's connections res = {"name": "room info", "data": newly_joined_rooms} return {'statusCode': 200, 'body': json.dumps(res)} else: return {'statusCode': 400, 'body': json.dumps('not logged in!')}
# copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. import common, sys if __name__ == "__main__": connection = common.get_connection() try: print "Connected to:", connection.server.address # Accessing connection.modules.platform is equivalent to importing the # 'platform' module from the remote interpreter. print "The remote host's platform is:", \ connection.modules.platform.platform() finally: connection.close()
getImageFileIndex['map'] = """ function(doc){ if(doc.type==="file"){ const att=doc._attachments; const contentType=att[Object.keys(att)[0]].content_type; if(contentType.substring(0,6)==="image/"){ emit(doc.name,doc); } } } """ getImageFileIndex['reduce'] = "_count" args = common.parse_args() conn = common.get_connection(args.use_ssl, args.couch_server, args.couch_port) credentials = common.get_credentials(args.adminuser, args.adminpass) get_headers = common.get_headers(credentials) put_headers = common.put_headers(credentials) # Update all the wiki design docs conn.request("GET", '/_all_dbs', headers=get_headers) db_list = common.decode_response(conn.getresponse()) wiki_list = [db for db in db_list if db[0:5] == "wiki_"] # Update the wiki dbs for wiki in wiki_list: print("Examining " + wiki) # Fetch design doc ddoc_uri = '/' + wiki + '/_design/' + wiki_ddoc
def process_events(start_timestamp, end_timestamp, override): log = common.get_logger() log.info("Processing events..") conn = common.get_connection() trans = conn.begin() try: if override not in ["True", "False"]: print "Invalid override argument." sys.exit() # Find all events in the desired time range result = conn.execute( """ SELECT Event.id, Event.entity_id, Event.action_id, Event.eventTime, (Action.points * Event.quantity) FROM Event INNER JOIN Action ON Event.action_id = Action.id WHERE Event.eventTime between %s and %s """ % (start_timestamp, end_timestamp) ) rows = result.fetchall() for row in rows: event_id = row[0] entity_id = row[1] event_time = row[3] points = row[4] # Find all users owning related shares result = conn.execute( """ select User.id, Shares.quantity from User INNER JOIN Shares ON User.id = Shares.user_id WHERE Shares.entity_id = %s AND (('%s' BETWEEN Shares.startTime and Shares.endTime) OR ('%s' > Shares.startTime AND Shares.endTime = '00000000') ) """ % (entity_id, event_time, event_time) ) user_rows = result.fetchall() for userRow in user_rows: user_id = userRow[0] num_shares = userRow[1] result = conn.execute( """ select COUNT(*) FROM Points WHERE Points.event_id = %s AND Points.user_id = %s""" % (event_id, user_id) ) res = result.fetchone() existing_records = res[0] if override == "False" and existing_records > 0: continue amount = num_shares * points conn.execute( """ INSERT INTO Points (user_id, event_id, amount) VALUES (%s, %s, %s) """ % (user_id, event_id, amount) ) print user_id, event_id, amount trans.commit() except: trans.rollback() raise conn.close() log.info("..done.")
# The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. import common, sys, time if __name__ == "__main__": connection = common.get_connection() try: print "Connected to:", connection.server.address # Write data to the stdout file descriptor in the remote interpreter. # This demonstrates the use of the background I/O redirector thread, # asynchronous requests. for line in sys.stdin: connection.modules.os.write( connection.modules.sys.stdout.fileno(), line) # Give the output a chance to come through. It would be nice to be able # to call "flush" on sys.stdout and have it block until the data is # written to the client interpreter's file. time.sleep(1) sys.stdout.flush()