def create_datadoc_schedule( id, cron, kwargs, ): kwargs_valid, kwargs_valid_reason = validate_datadoc_schedule_config( kwargs) api_assert(kwargs_valid, kwargs_valid_reason) api_assert(validate_cron(cron), "Invalid cron expression") schedule_name = schedule_logic.get_data_doc_schedule_name(id) with DBSession() as session: assert_can_write(id, session=session) data_doc = logic.get_data_doc_by_id(id, session=session) verify_environment_permission([data_doc.environment_id]) return schedule_logic.create_task_schedule( schedule_name, "tasks.run_datadoc.run_datadoc", cron=cron, kwargs={ **kwargs, "user_id": current_user.id, "doc_id": id }, task_type="user", session=session, )
def update_datadoc_editor( id, write=None, read=None, originator=None, # Used for websocket to identify sender, optional ): with DBSession() as session: editor = logic.get_data_doc_editor_by_id(id, session=session) if editor: assert_can_write(editor.data_doc_id, session=session) editor = logic.update_data_doc_editor(id, read, write, session=session) if editor: editor_dict = editor.to_dict() socketio.emit( "data_doc_editor", ( originator, editor_dict["data_doc_id"], editor_dict["uid"], editor_dict, ), namespace="/datadoc", room=editor_dict["data_doc_id"], broadcast=True, ) return editor_dict
def update_datadoc_schedule(id, cron=None, enabled=None, kwargs=None): if kwargs is not None: kwargs_valid, kwargs_valid_reason = validate_datadoc_schedule_config( kwargs) api_assert(kwargs_valid, kwargs_valid_reason) if cron is not None: api_assert(validate_cron(cron), "Invalid cron expression") schedule_name = schedule_logic.get_data_doc_schedule_name(id) with DBSession() as session: assert_can_write(id, session=session) schedule = schedule_logic.get_task_schedule_by_name(schedule_name, session=session) api_assert(schedule, "Schedule does not exist") verify_data_doc_permission(id, session=session) updated_fields = {} if cron is not None: updated_fields["cron"] = cron if enabled is not None: updated_fields["enabled"] = enabled if kwargs is not None: updated_fields["kwargs"] = { **kwargs, "user_id": current_user.id, "doc_id": id, } return schedule_logic.update_task_schedule( schedule.id, session=session, **updated_fields, )
def insert_data_cell(doc_id, index, cell_type, context=None, meta=None, sid="", session=None): assert_can_write(doc_id, session=session) verify_data_doc_permission(doc_id, session=session) data_cell = logic.create_data_cell(cell_type=cell_type, context=context, meta=meta, commit=False, session=session) logic.insert_data_doc_cell(data_doc_id=doc_id, cell_id=data_cell.id, index=index, session=session) data_cell_dict = data_cell.to_dict() socketio.emit( "data_cell_inserted", ( sid, index, data_cell_dict, ), namespace=DATA_DOC_NAMESPACE, room=doc_id, broadcast=True, ) return data_cell_dict
def run_data_doc(id): schedule_name = schedule_logic.get_data_doc_schedule_name(id) with DBSession() as session: assert_can_write(id, session=session) verify_data_doc_permission(id, session=session) schedule = schedule_logic.get_task_schedule_by_name(schedule_name, session=session) api_assert(schedule, "Schedule does not exist") run_and_log_scheduled_task(schedule.id, session=session)
def delete_datadoc_schedule(id): schedule_name = schedule_logic.get_data_doc_schedule_name(id) with DBSession() as session: assert_can_write(id, session=session) verify_data_doc_permission(id, session=session) schedule = schedule_logic.get_task_schedule_by_name(schedule_name, session=session) if schedule: schedule_logic.delete_task_schedule(schedule.id, session=session)
def remove_datadoc_access_request(doc_id, uid, originator=None): assert_can_write(doc_id) logic.remove_datadoc_access_request(doc_id=doc_id, uid=uid) socketio.emit( "data_doc_access_request", (originator, doc_id, uid, None), namespace="/datadoc", room=doc_id, broadcast=True, )
def add_datadoc_editor( doc_id, uid, read=None, write=None, originator=None, # Used for websocket to identify sender, optional ): with DBSession() as session: assert_can_write(doc_id, session=session) editor = logic.create_data_doc_editor(data_doc_id=doc_id, uid=uid, read=read, write=write, commit=False) editor_dict = editor.to_dict() access_request = logic.get_data_doc_access_request_by_doc_id( doc_id=doc_id, uid=uid) if access_request: logic.remove_datadoc_access_request(doc_id=doc_id, uid=uid, commit=False) session.commit() # Update queries in elasticsearch to reflect new permissions logic.update_es_queries_by_datadoc_id(doc_id, session=session) if access_request: socketio.emit( "data_doc_access_request", (originator, doc_id, uid, None), namespace="/datadoc", room=doc_id, broadcast=True, ) socketio.emit( "data_doc_editor", (originator, doc_id, uid, editor_dict), namespace="/datadoc", room=doc_id, broadcast=True, ) logic.update_es_data_doc_by_id(doc_id) send_add_datadoc_editor_email(doc_id, uid, read, write) return editor_dict
def delete_datadoc_editor( id, originator=None, # Used for websocket to identify sender, optional ): with DBSession() as session: editor = logic.get_data_doc_editor_by_id(id, session=session) if editor: editor_dict = editor.to_dict() assert_can_write(editor.data_doc_id, session=session) logic.delete_data_doc_editor( id=id, doc_id=editor.data_doc_id, session=session ) socketio.emit( "data_doc_editor", (originator, editor_dict["data_doc_id"], editor_dict["uid"], None), namespace="/datadoc", room=editor_dict["data_doc_id"], broadcast=True, )
def update_data_cell(cell_id, fields, sid="", session=None): data_doc = logic.get_data_doc_by_data_cell_id(cell_id, session=session) assert_can_write(data_doc.id, session=session) verify_environment_permission([data_doc.environment_id]) data_cell = logic.update_data_cell( id=cell_id, session=session, **fields, ) data_cell_dict = data_cell.to_dict() socketio.emit( "data_cell_updated", (sid, data_cell_dict), namespace=DATA_DOC_NAMESPACE, room=data_doc.id, broadcast=True, ) return data_cell_dict
def delete_data_cell(doc_id, cell_id, sid="", session=None): assert_can_write(doc_id, session=session) verify_data_doc_permission(doc_id, session=session) logic.delete_data_doc_cell(data_doc_id=doc_id, data_cell_id=int(cell_id), session=session) socketio.emit( "data_cell_deleted", ( sid, cell_id, ), namespace=DATA_DOC_NAMESPACE, room=doc_id, broadcast=True, ) return True
def update_datadoc(doc_id, fields, sid="", session=None): # Check to see if author has permission assert_can_write(doc_id, session=session) verify_data_doc_permission(doc_id, session=session) doc = logic.update_data_doc( id=doc_id, session=session, **fields, ) doc_dict = doc.to_dict() socketio.emit( "data_doc_updated", ( sid, doc_dict, ), namespace=DATA_DOC_NAMESPACE, room=doc_id, broadcast=True, ) return doc_dict
def move_data_cell(doc_id, from_index, to_index, sid="", session=None): assert_can_write(doc_id, session=session) verify_data_doc_permission(doc_id, session=session) logic.move_data_doc_cell( data_doc_id=doc_id, from_index=int(from_index), to_index=int(to_index), session=session, ) socketio.emit( "data_cell_moved", ( sid, from_index, to_index, ), namespace=DATA_DOC_NAMESPACE, room=doc_id, broadcast=True, ) # Should we return data instead? return True
def save_and_export_dag(id, exporter_name): assert_can_write(id) return export_dag(data_doc_id=id, dag_exporter_name=exporter_name)
def paste_data_cell(cell_id: int, cut: bool, doc_id: int, index: int, sid="", session=None): data_cell = logic.get_data_cell_by_id(cell_id, session=session) assert data_cell is not None, "Data cell does not exist" data_doc = logic.get_data_doc_by_id(doc_id, session=session) old_data_doc = data_cell.doc same_doc = old_data_doc.id == doc_id # Make sure they are in the same environment and have access assert (old_data_doc.environment_id == data_doc.environment_id ), "Must be in the same environment" verify_environment_permission([data_doc.environment_id]) # Users need to be able to write in the doc copied to assert_can_write(doc_id, session=session) if not same_doc: if cut: # To cut the user need to be able to write the original doc assert_can_write(old_data_doc.id, session=session) else: # To copy the user need to be able to read the original doc assert_can_read(old_data_doc.id, session=session) if cut: old_cell_index = logic.get_data_doc_data_cell( cell_id, session=session).cell_order logic.move_data_doc_cell_to_doc(cell_id, doc_id, index, session=session) if same_doc: # Account for shift in original index # See more details in move_data_doc_cell_to_doc if old_cell_index < index: index -= 1 socketio.emit( "data_cell_moved", # sid, from_index, to_index ( sid, old_cell_index, index, ), namespace=DATA_DOC_NAMESPACE, room=doc_id, broadcast=True, ) else: socketio.emit( "data_cell_inserted", ( sid, index, data_cell.to_dict(), ), namespace=DATA_DOC_NAMESPACE, room=doc_id, broadcast=True, ) socketio.emit( "data_cell_deleted", ( sid, cell_id, ), namespace=DATA_DOC_NAMESPACE, room=old_data_doc.id, broadcast=True, ) else: # Copy new_cell_dict = insert_data_cell( doc_id, index, data_cell.cell_type.name, data_cell.context, data_cell.meta, sid, session=session, ) # Copy all query history over logic.copy_cell_history(cell_id, new_cell_dict["id"], session=session) # To resolve the sender's promise socketio.emit( "data_cell_pasted", (sid), namespace=DATA_DOC_NAMESPACE, room=doc_id, broadcast=False, )
def create_or_update_dag_export(id, dag, meta): assert_can_write(id) return logic.create_or_update_dag_export(data_doc_id=id, dag=dag, meta=meta)
def get_datadoc_access_requests(doc_id): assert_can_write(doc_id) return logic.get_data_doc_access_requests_by_doc_id(doc_id)