def create_query_execution(query, engine_id, data_cell_id=None, originator=None): with DBSession() as session: verify_query_engine_permission(engine_id, session=session) uid = current_user.id query_execution = logic.create_query_execution(query=query, engine_id=engine_id, uid=uid, session=session) data_doc = None if data_cell_id: datadoc_logic.append_query_executions_to_data_cell( data_cell_id, [query_execution.id], session=session) data_cell = datadoc_logic.get_data_cell_by_id(data_cell_id, session=session) data_doc = data_cell.doc try: run_query_task.apply_async(args=[ query_execution.id, ]) query_execution_dict = query_execution.to_dict() if data_doc: socketio.emit( "data_doc_query_execution", ( originator, query_execution_dict, data_cell_id, ), namespace="/datadoc", room=data_doc.id, broadcast=True, ) return query_execution_dict except Exception as e: # We might encounter ConnectionError caused by # Redis connection failing logic.create_query_execution_error( query_execution.id, error_type=None, error_message_extracted="Encountered Error connecting to Redis", error_message=str(e), commit=False, session=session, ) query_execution.status = QueryExecutionStatus.ERROR session.commit() raise e
def get_datadoc_ids_by_query_execution(query_execution_id): with DBSession() as session: verify_query_execution_permission(query_execution_id, session=session) pair_id = next( iter( logic.get_datadoc_id_from_query_execution_id( query_execution_id, session=session)), None, ) if pair_id is None: return None doc_id, cell_id = pair_id cell_title = None if user_can_read(doc_id, current_user.id, session=session): cell_info = datadoc_logic.get_data_cell_by_id(cell_id, session=session) if cell_info: cell_title = cell_info.meta.get("title") return {"doc_id": doc_id, "cell_id": cell_id, "cell_title": cell_title}
def paste_data_cell(cell_id: int, cut: bool, doc_id: int, index: int, sid="", session=None): data_cell = logic.get_data_cell_by_id(cell_id, session=session) assert data_cell is not None, "Data cell does not exist" data_doc = logic.get_data_doc_by_id(doc_id, session=session) old_data_doc = data_cell.doc same_doc = old_data_doc.id == doc_id # Make sure they are in the same environment and have access assert (old_data_doc.environment_id == data_doc.environment_id ), "Must be in the same environment" verify_environment_permission([data_doc.environment_id]) # Users need to be able to write in the doc copied to assert_can_write(doc_id, session=session) if not same_doc: if cut: # To cut the user need to be able to write the original doc assert_can_write(old_data_doc.id, session=session) else: # To copy the user need to be able to read the original doc assert_can_read(old_data_doc.id, session=session) if cut: old_cell_index = logic.get_data_doc_data_cell( cell_id, session=session).cell_order logic.move_data_doc_cell_to_doc(cell_id, doc_id, index, session=session) if same_doc: # Account for shift in original index # See more details in move_data_doc_cell_to_doc if old_cell_index < index: index -= 1 socketio.emit( "data_cell_moved", # sid, from_index, to_index ( sid, old_cell_index, index, ), namespace=DATA_DOC_NAMESPACE, room=doc_id, broadcast=True, ) else: socketio.emit( "data_cell_inserted", ( sid, index, data_cell.to_dict(), ), namespace=DATA_DOC_NAMESPACE, room=doc_id, broadcast=True, ) socketio.emit( "data_cell_deleted", ( sid, cell_id, ), namespace=DATA_DOC_NAMESPACE, room=old_data_doc.id, broadcast=True, ) else: # Copy new_cell_dict = insert_data_cell( doc_id, index, data_cell.cell_type.name, data_cell.context, data_cell.meta, sid, session=session, ) # Copy all query history over logic.copy_cell_history(cell_id, new_cell_dict["id"], session=session) # To resolve the sender's promise socketio.emit( "data_cell_pasted", (sid), namespace=DATA_DOC_NAMESPACE, room=doc_id, broadcast=False, )