def manage_task_type(type_id): cursor = get_connection().cursor() if request.method == 'GET': cursor.execute('SELECT CODE, NAME, GROUP_ID, "GROUP", "LABEL", t.ID, tt.ID ' 'FROM LTN_DEVELOP.TYPES t ' 'JOIN LTN_DEVELOP.TASK_TYPES tt ON t.ID = tt.TYPE_ID ' 'WHERE tt.ID = ?', (type_id,)) row = cursor.fetchone() if row: return respond_with({"code": row[0], "name": row[1], "groupId": row[2], "group": row[3], "label": row[4], "type_id": row[5], "id": row[6]}) return 'NOT FOUND', 404 elif request.method == 'PUT': req = request.get_json() updated_type = req.get('type') cursor.execute('SELECT ID FROM LTN_DEVELOP.TASK_TYPES WHERE ID = ?', (type_id,)) already_existing = cursor.fetchone() if already_existing: cursor.execute('UPDATE LTN_DEVELOP.TASK_TYPES SET ID = ?, LABEL = ?, TYPE_ID = ? ' 'WHERE ID = ?', (updated_type.get('id'), updated_type.get('label'), updated_type.get('type_id'), type_id)) get_connection().commit() return 'UPDATED', 200 else: task_id = req.get('task') is_relation = req.get('relation') cursor.execute('INSERT INTO LTN_DEVELOP.TASK_TYPES (LABEL, TASK_ID, TYPE_ID, RELATION) ' 'VALUES (?, ?, ?, ?)', (updated_type.get('label'), task_id, updated_type.get('type_id'), is_relation)) get_connection().commit() return 'CREATED', 200 elif request.method == 'DELETE': cursor.execute('DELETE FROM LTN_DEVELOP.TASK_TYPES WHERE ID = ?', (type_id,)) get_connection().commit() return 'DELETED', 200
def get_user_documents(user_id): if user_id != current_user.get_id(): return "Not authorized to view the documents of this user.", 401 cursor = get_connection().cursor() cursor.execute( "SELECT ID, USER_ID, DOCUMENT_ID, VISIBILITY, CREATED_AT, UPDATED_AT " "FROM LTN_DEVELOP.USER_DOCUMENTS " "WHERE USER_ID = ? OR VISIBILITY > 0 ORDER BY DOCUMENT_ID", (user_id, )) user_documents = list() for result in cursor.fetchall(): user_documents.append({ "id": result[0], "user_id": result[1], "document_id": result[2], "visibility": result[3], "created_at": result[4].strftime('%Y-%m-%d %H:%M:%S'), "updated_at": result[5].strftime('%Y-%m-%d %H:%M:%S') }) cursor.close() return respond_with(user_documents)
def manage_task(task_id): cursor = get_connection().cursor() if request.method == 'GET': cursor.execute('SELECT t.id, t.name, t.domain, t.author, u.name ' 'FROM LTN_DEVELOP.TASKS t LEFT OUTER JOIN LTN_DEVELOP.USERS u ON u.id = t.author ' 'WHERE t.id = ?', (task_id,)) result = cursor.fetchone() cursor.execute('SELECT d.id, count(ud.id) ' 'FROM LTN_DEVELOP.TASKS t ' 'JOIN LTN_DEVELOP.DOCUMENTS d ON d.task = t.id ' 'LEFT OUTER JOIN LTN_DEVELOP.USER_DOCUMENTS ud ON ud.document_id = d.id ' 'AND (ud.visibility = 1 OR ud.user_id = ?) ' 'WHERE t.id = ? ' 'GROUP BY d.id ORDER BY d.id ASC', (current_user.get_id(), task_id)) documents = list() for row in cursor.fetchall(): documents.append({'document_id': row[0], 'user_document_count': row[1]}) return respond_with({'task_id': result[0], 'task_name': result[1], 'task_domain': result[2], 'user_id': result[3], 'user_name': result[4], 'documents': documents}) elif request.method == 'POST': req = request.get_json() if req.get('task_id') is not None: sql_to_prepare = 'CALL LTN_DEVELOP.update_task (?, ?, ?, ?, ?)' else: sql_to_prepare = 'CALL LTN_DEVELOP.add_task (?, ?, ?, ?, ?)' params = { 'TASK_ID': req.get('task_id'), 'TASK_NAME': req.get('task_name'), 'TABLE_NAME': req.get('task_domain'), 'ER_ANALYSIS_CONFIG': req.get('task_config'), 'NEW_AUTHOR': req.get('user_id') } if params.get('TABLE_NAME', None) is None: generate_table_name(params) if params.get('NEW_AUTHOR', None) is None: params['NEW_AUTHOR'] = current_user.get_id() psid = cursor.prepare(sql_to_prepare) ps = cursor.get_prepared_statement(psid) try: cursor.execute_prepared(ps, [params]) get_connection().commit() except: pass # Rows affected warning return 'OK', 200 elif request.method == 'DELETE': sql_to_prepare = 'CALL LTN_DEVELOP.delete_task (?)' params = {'TASK_ID': task_id} psid = cursor.prepare(sql_to_prepare) ps = cursor.get_prepared_statement(psid) try: cursor.execute_prepared(ps, [params]) get_connection().commit() except: pass # Rows affected warning return 'OK', 200
def get_tasks(): cursor = get_connection().cursor() cursor.execute('SELECT t.id, t.name, t.domain, t.config, t.author, u.name ' 'FROM LTN_DEVELOP.TASKS t LEFT OUTER JOIN LTN_DEVELOP.USERS u ON u.id = t.author ORDER BY t.id') tasks = list() for result in cursor.fetchall(): tasks.append({'task_id': result[0], 'task_name': result[1], 'task_domain': result[2], 'task_config': result[3], 'user_id': result[4], 'user_name': result[5]}) return respond_with(tasks)
def get_document(document_id): if get_connection() is None: try_reconnecting() if request.method == 'GET': try: result = load_document(document_id, current_user.get_id()) return respond_with(result) except Exception, e: print e reset_connection() return 'Error while loading the document.', 500
def return_entities(): req = request.get_json() document_id = req['document_id'] user1 = req['user1'] user2 = req['user2'] cursor = get_connection().cursor() predictions = sorted(get_entities_for_user_document(cursor, document_id, user1), key=lambda x: x.start) gold_standard = sorted(get_entities_for_user_document(cursor, document_id, user2), key=lambda x: x.start) p = 0 matches, left_aligns, right_aligns, overlaps, misses, wrong_type = 0, 0, 0, 0, 0, {} for entity in gold_standard: if len(predictions) == 0: misses += 1 continue while predictions[p].end < entity.start: if p == len(predictions) - 1: break p += 1 can_miss = True for candidate in predictions[p:]: if candidate.start > entity.end: if can_miss: misses += 1 can_miss = False break if candidate.end < entity.start: break can_miss = False if candidate.start != entity.start: if candidate.end == entity.end: if candidate.type != entity.type: wrong_type["right-aligns"] = wrong_type.get("right-aligns", 0) + 1 right_aligns += 1 else: if candidate.type != entity.type: wrong_type["overlaps"] = wrong_type.get("overlaps", 0) + 1 overlaps += 1 else: if candidate.end == entity.end: if candidate.type != entity.type: wrong_type["matches"] = wrong_type.get("matches", 0) + 1 matches += 1 else: if candidate.type != entity.type: wrong_type["left-aligns"] = wrong_type.get("left-aligns", 0) + 1 left_aligns += 1 if can_miss: misses += 1 return respond_with({"matches": matches, "left-aligns": left_aligns, "right-aligns": right_aligns, "overlaps": overlaps, "misses": misses, "wrong-type": wrong_type})
def login(): if get_connection() is None: try_reconnecting() req = request.get_json() if req and 'username' in req and 'password' in req: try: user = load_user(req['username']) if user and req['password'] == user.token: login_user(user, remember=True) user.token = None return respond_with(user.__dict__) except Exception, e: reset_connection() return str(e) + " Please try again later.", 500
def manage_task_type(type_id): cursor = get_connection().cursor() if request.method == 'GET': cursor.execute( 'SELECT CODE, NAME, GROUP_ID, "GROUP", "LABEL", t.ID, tt.ID ' 'FROM LTN_DEVELOP.TYPES t ' 'JOIN LTN_DEVELOP.TASK_TYPES tt ON t.ID = tt.TYPE_ID ' 'WHERE tt.ID = ?', (type_id, )) row = cursor.fetchone() if row: return respond_with({ "code": row[0], "name": row[1], "groupId": row[2], "group": row[3], "label": row[4], "type_id": row[5], "id": row[6] }) return 'NOT FOUND', 404 elif request.method == 'PUT': req = request.get_json() updated_type = req.get('type') cursor.execute('SELECT ID FROM LTN_DEVELOP.TASK_TYPES WHERE ID = ?', (type_id, )) already_existing = cursor.fetchone() if already_existing: cursor.execute( 'UPDATE LTN_DEVELOP.TASK_TYPES SET ID = ?, LABEL = ?, TYPE_ID = ? ' 'WHERE ID = ?', (updated_type.get('id'), updated_type.get('label'), updated_type.get('type_id'), type_id)) get_connection().commit() return 'UPDATED', 200 else: task_id = req.get('task') is_relation = req.get('relation') cursor.execute( 'INSERT INTO LTN_DEVELOP.TASK_TYPES (LABEL, TASK_ID, TYPE_ID, RELATION) ' 'VALUES (?, ?, ?, ?)', (updated_type.get('label'), task_id, updated_type.get('type_id'), is_relation)) get_connection().commit() return 'CREATED', 200 elif request.method == 'DELETE': cursor.execute('DELETE FROM LTN_DEVELOP.TASK_TYPES WHERE ID = ?', (type_id, )) get_connection().commit() return 'DELETED', 200
def get_tasks(): cursor = get_connection().cursor() cursor.execute( 'SELECT t.id, t.name, t.domain, t.config, t.author, u.name ' 'FROM LTN_DEVELOP.TASKS t LEFT OUTER JOIN LTN_DEVELOP.USERS u ON u.id = t.author ORDER BY t.id' ) tasks = list() for result in cursor.fetchall(): tasks.append({ 'task_id': result[0], 'task_name': result[1], 'task_domain': result[2], 'task_config': result[3], 'user_id': result[4], 'user_name': result[5] }) return respond_with(tasks)
def predict(): data = request.get_json() task_id = data['task_id'] jobs = data.get('jobs', [PREDICT_ENTITIES]) document_id = data['document_id'] user_id = data.get('user_id', current_user.get_id()) current_prediction_user = prediction_user_for_user(user_id) prediction_user_doc_id = load_user_doc_id(document_id, current_prediction_user) delete_user_document(prediction_user_doc_id) document_data = json.loads(data.get('current_state', None)) if document_data is None: document_data = load_document(document_id, user_id) else: # the current status has to be saved first in order to disambiguate the ids of the annotations user_doc_id = load_user_doc_id(document_id, current_user.get_id()) successful = save_document(document_data, user_doc_id, document_id, current_user.get_id(), task_id) if not successful: return "Could not save the document", 500 if PREDICT_ENTITIES in jobs: cursor = get_connection().cursor() cursor.execute( 'INSERT INTO "LTN_DEVELOP"."USER_DOCUMENTS" ' 'VALUES (?, ?, ?, 0, current_timestamp, current_timestamp)', ( prediction_user_doc_id, current_prediction_user, document_id, )) cursor.close() get_connection().commit() predict_entities(document_id, task_id, prediction_user_doc_id) if PREDICT_RELATIONS in jobs: if PREDICT_ENTITIES not in jobs: save_document(document_data, prediction_user_doc_id, document_id, current_prediction_user, task_id, False) predicted_pairs = predict_relations(prediction_user_doc_id, task_id) if PREDICT_ENTITIES not in jobs: remove_entities_without_relations(predicted_pairs, document_data, prediction_user_doc_id) document_data = load_document(document_id, current_user.get_id(), True) return respond_with(document_data)
def get_document_details(document_id): user_documents = list() cursor = get_connection().cursor() user_id = current_user.get_id() cursor.execute( 'SELECT d.id, MIN(d.user_id), MIN(u.name), COUNT(DISTINCT e.id), COUNT(distinct p.id), MIN(d.visibility) ' 'FROM LTN_DEVELOP.USER_DOCUMENTS d ' 'JOIN LTN_DEVELOP.USERS u ON u.id = d.user_id ' 'LEFT OUTER JOIN LTN_DEVELOP.ENTITIES e ON e.user_doc_id = d.id ' 'LEFT OUTER JOIN LTN_DEVELOP.PAIRS p ON p.user_doc_id = d.id AND p.ddi = 1 ' 'WHERE d.document_id = ? AND (d.visibility = 1 OR d.user_id = ?) ' 'GROUP BY d.id', (document_id, user_id)) for row in cursor.fetchall(): user_documents.append({ 'id': row[0], 'user_id': row[1], 'user_name': row[2], 'entities': row[3], 'pairs': row[4], 'visible': bool(row[5]), 'from_current_user': row[1] == user_id }) return respond_with(user_documents)
def predict(): data = request.get_json() task_id = data['task_id'] jobs = data.get('jobs', [PREDICT_ENTITIES]) document_id = data['document_id'] user_id = data.get('user_id', current_user.get_id()) current_prediction_user = prediction_user_for_user(user_id) prediction_user_doc_id = load_user_doc_id(document_id, current_prediction_user) delete_user_document(prediction_user_doc_id) document_data = json.loads(data.get('current_state', None)) if document_data is None: document_data = load_document(document_id, user_id) else: # the current status has to be saved first in order to disambiguate the ids of the annotations user_doc_id = load_user_doc_id(document_id, current_user.get_id()) successful = save_document(document_data, user_doc_id, document_id, current_user.get_id(), task_id) if not successful: return "Could not save the document", 500 if PREDICT_ENTITIES in jobs: cursor = get_connection().cursor() cursor.execute('INSERT INTO "LTN_DEVELOP"."USER_DOCUMENTS" ' 'VALUES (?, ?, ?, 0, current_timestamp, current_timestamp)', (prediction_user_doc_id, current_prediction_user, document_id,)) cursor.close() get_connection().commit() predict_entities(document_id, task_id, prediction_user_doc_id) if PREDICT_RELATIONS in jobs: if PREDICT_ENTITIES not in jobs: save_document(document_data, prediction_user_doc_id, document_id, current_prediction_user, task_id, False) predicted_pairs = predict_relations(prediction_user_doc_id, task_id) if PREDICT_ENTITIES not in jobs: remove_entities_without_relations(predicted_pairs, document_data, prediction_user_doc_id) document_data = load_document(document_id, current_user.get_id(), True) return respond_with(document_data)
def get_task_entity_types(task_id): return respond_with(get_task_types(task_id, relation=False))
def serve_base_types(): return respond_with(get_base_types())
def manage_task(task_id): cursor = get_connection().cursor() if request.method == 'GET': cursor.execute( 'SELECT t.id, t.name, t.domain, t.author, u.name ' 'FROM LTN_DEVELOP.TASKS t LEFT OUTER JOIN LTN_DEVELOP.USERS u ON u.id = t.author ' 'WHERE t.id = ?', (task_id, )) result = cursor.fetchone() cursor.execute( 'SELECT d.id, count(ud.id) ' 'FROM LTN_DEVELOP.TASKS t ' 'JOIN LTN_DEVELOP.DOCUMENTS d ON d.task = t.id ' 'LEFT OUTER JOIN LTN_DEVELOP.USER_DOCUMENTS ud ON ud.document_id = d.id ' 'AND (ud.visibility = 1 OR ud.user_id = ?) ' 'WHERE t.id = ? ' 'GROUP BY d.id ORDER BY d.id ASC', (current_user.get_id(), task_id)) documents = list() for row in cursor.fetchall(): documents.append({ 'document_id': row[0], 'user_document_count': row[1] }) return respond_with({ 'task_id': result[0], 'task_name': result[1], 'task_domain': result[2], 'user_id': result[3], 'user_name': result[4], 'documents': documents }) elif request.method == 'POST': req = request.get_json() if req.get('task_id') is not None: sql_to_prepare = 'CALL LTN_DEVELOP.update_task (?, ?, ?, ?, ?)' else: sql_to_prepare = 'CALL LTN_DEVELOP.add_task (?, ?, ?, ?, ?)' params = { 'TASK_ID': req.get('task_id'), 'TASK_NAME': req.get('task_name'), 'TABLE_NAME': req.get('task_domain'), 'ER_ANALYSIS_CONFIG': req.get('task_config'), 'NEW_AUTHOR': req.get('user_id') } if params.get('TABLE_NAME', None) is None: generate_table_name(params) if params.get('NEW_AUTHOR', None) is None: params['NEW_AUTHOR'] = current_user.get_id() psid = cursor.prepare(sql_to_prepare) ps = cursor.get_prepared_statement(psid) try: cursor.execute_prepared(ps, [params]) get_connection().commit() except: pass # Rows affected warning return 'OK', 200 elif request.method == 'DELETE': sql_to_prepare = 'CALL LTN_DEVELOP.delete_task (?)' params = {'TASK_ID': task_id} psid = cursor.prepare(sql_to_prepare) ps = cursor.get_prepared_statement(psid) try: cursor.execute_prepared(ps, [params]) get_connection().commit() except: pass # Rows affected warning return 'OK', 200
def get_user(user_id): user = load_user(user_id) if not user: return "User not found", 404 user.token = None return respond_with(user.__dict__)
def get_users(): cursor = get_connection().cursor() users = User.all(cursor) cursor.close() return respond_with(map(lambda user: user.__dict__, users))
def get_current_user(): return respond_with(current_user.__dict__)
def get_task_relation_types(task_id): return respond_with(get_task_types(task_id, relation=True))
def return_entities(): req = request.get_json() document_id = req['document_id'] user1 = req['user1'] user2 = req['user2'] cursor = get_connection().cursor() predictions = sorted(get_entities_for_user_document( cursor, document_id, user1), key=lambda x: x.start) gold_standard = sorted(get_entities_for_user_document( cursor, document_id, user2), key=lambda x: x.start) p = 0 matches, left_aligns, right_aligns, overlaps, misses, wrong_type = 0, 0, 0, 0, 0, {} for entity in gold_standard: if len(predictions) == 0: misses += 1 continue while predictions[p].end < entity.start: if p == len(predictions) - 1: break p += 1 can_miss = True for candidate in predictions[p:]: if candidate.start > entity.end: if can_miss: misses += 1 can_miss = False break if candidate.end < entity.start: break can_miss = False if candidate.start != entity.start: if candidate.end == entity.end: if candidate.type != entity.type: wrong_type["right-aligns"] = wrong_type.get( "right-aligns", 0) + 1 right_aligns += 1 else: if candidate.type != entity.type: wrong_type["overlaps"] = wrong_type.get("overlaps", 0) + 1 overlaps += 1 else: if candidate.end == entity.end: if candidate.type != entity.type: wrong_type["matches"] = wrong_type.get("matches", 0) + 1 matches += 1 else: if candidate.type != entity.type: wrong_type["left-aligns"] = wrong_type.get( "left-aligns", 0) + 1 left_aligns += 1 if can_miss: misses += 1 return respond_with({ "matches": matches, "left-aligns": left_aligns, "right-aligns": right_aligns, "overlaps": overlaps, "misses": misses, "wrong-type": wrong_type })
def fetch_pubmed_abstract(pubmed_id): try: article = PubMedFetcher(cachedir=".cache/").article_by_pmid(pubmed_id) return respond_with(article.abstract) except InvalidPMID: return 'Invalid PubmedID', 500