def get_preview(): """Get files preview Returns ------- json previewFiles: preview of selected files error: True if error, else False errorMessage: the error message of error, else an empty string """ data = request.get_json() try: files_handler = FilesHandler(current_app, session) files_handler.handle_files(data['filesId']) results = [] for file in files_handler.files: file.set_preview() res = file.get_preview() results.append(res) except Exception as e: traceback.print_exc(file=sys.stdout) return jsonify({ 'previewFiles': [], 'error': True, 'errorMessage': str(e) }), 500 return jsonify({ 'previewFiles': results, 'error': False, 'errorMessage': '' })
def integrate(): """Integrate a file Returns ------- json task_id: celery task id error: True if error, else False errorMessage: the error message of error, else an empty string """ data = request.get_json() session_dict = {'user': session['user']} task = None try: files_handler = FilesHandler(current_app, session, host_url=request.host_url) files_handler.handle_files([data["fileId"], ]) for file in files_handler.files: data["externalEndpoint"] = data["externalEndpoint"] if data["externalEndpoint"] else None data["customUri"] = data["customUri"] if data["customUri"] else None dataset_info = { "celery_id": None, "file_id": file.id, "name": file.human_name, "graph_name": file.file_graph, "public": data["public"] if session["user"]["admin"] else False } dataset = Dataset(current_app, session, dataset_info) dataset.save_in_db() data["dataset_id"] = dataset.id task = current_app.celery.send_task('integrate', (session_dict, data, request.host_url)) dataset.update_celery(task.id) except Exception as e: traceback.print_exc(file=sys.stdout) return jsonify({ 'error': True, 'errorMessage': str(e), 'task_id': '' }), 500 return jsonify({ 'error': False, 'errorMessage': '', 'task_id': task.id if task else '' })
def integrate_file(self, info, public=False): """Summary Parameters ---------- info : TYPE Description Returns ------- TYPE Description """ files_handler = FilesHandler(self.app, self.session) files_handler.handle_files([ info["id"], ]) for file in files_handler.files: dataset_info = { "celery_id": "000000000", "file_id": file.id, "name": file.name, "graph_name": file.file_graph, "public": public } dataset = Dataset(self.app, self.session, dataset_info) dataset.save_in_db() if file.type == "csv/tsv": file.integrate(dataset.id, info["columns_type"], public=public) elif file.type == "gff/gff3": file.integrate(dataset.id, info["entities"], public=public) elif file.type == "bed": file.integrate(dataset.id, info["entity_name"], public=public) # done dataset.update_in_db("success") dataset.set_info_from_db() return { "timestamp": file.timestamp, "start": dataset.start, "end": dataset.end }
def edit_file(): """Edit file name Returns ------- json files: list of all files of current user error: True if error, else False errorMessage: the error message of error, else an empty string """ data = request.get_json() current_app.logger.debug(data) if not (data and data.get("id") and data.get("newName")): return jsonify({ 'files': [], 'diskSpace': 0, 'error': True, 'errorMessage': "Missing parameters" }), 400 files_id = [data["id"]] new_name = data["newName"] try: files_handler = FilesHandler(current_app, session) files_handler.handle_files(files_id) for file in files_handler.files: file.edit_name_in_db(new_name) files = files_handler.get_files_infos() except Exception as e: traceback.print_exc(file=sys.stdout) return jsonify({ 'files': [], 'diskSpace': 0, 'error': True, 'errorMessage': str(e) }), 500 return jsonify({ 'files': files, 'diskSpace': files_handler.get_size_occupied_by_user(), 'error': False, 'errorMessage': '' })
def integrate(): """Integrate a file Returns ------- json datasets_id: dataset ids error: True if error, else False errorMessage: the error message of error, else an empty string """ data = request.get_json() if not (data and data.get("fileId")): return jsonify({ 'error': True, 'errorMessage': "Missing fileId parameter", 'dataset_ids': None }), 400 session_dict = {'user': session['user']} task = None dataset_ids = [] try: files_handler = FilesHandler(current_app, session, host_url=request.host_url) files_handler.handle_files([ data["fileId"], ]) for file in files_handler.files: data["externalEndpoint"] = data["externalEndpoint"] if ( data.get("externalEndpoint") and isinstance(file, RdfFile)) else None data["customUri"] = data["customUri"] if ( data.get("customUri") and not isinstance(file, RdfFile)) else None dataset_info = { "celery_id": None, "file_id": file.id, "name": file.human_name, "graph_name": file.file_graph, "public": data.get("public") if session["user"]["admin"] else False } dataset = Dataset(current_app, session, dataset_info) dataset.save_in_db() data["dataset_id"] = dataset.id dataset_ids.append(dataset.id) task = current_app.celery.send_task( 'integrate', (session_dict, data, request.host_url)) dataset.update_celery(task.id) except Exception as e: traceback.print_exc(file=sys.stdout) return jsonify({ 'error': True, 'errorMessage': str(e), 'dataset_ids': None }), 500 return jsonify({ 'error': False, 'errorMessage': '', 'dataset_ids': dataset_ids })
def integrate(self, session, data, host_url): """Integrate a file into the triplestore Parameters ---------- session : dict AskOmics session data : dict fileId: file to integrate public: integrate as public or private data host_url : string AskOmics host url Returns ------- dict error: True if error, else False errorMessage: the error message of error, else an empty string """ files_handler = FilesHandler(app, session, host_url=host_url, external_endpoint=data["externalEndpoint"], custom_uri=data["customUri"]) files_handler.handle_files([ data["fileId"], ]) public = data["public"] if session["user"]["admin"] else False for file in files_handler.files: try: dataset_info = { "celery_id": self.request.id, "id": data["dataset_id"], "file_id": file.id, "name": file.human_name, "graph_name": file.file_graph, "public": public } dataset = Dataset(app, session, dataset_info) dataset.update_in_db("started", update_date=True, update_graph=True) if file.type == "csv/tsv": file.integrate(data["dataset_id"], data['columns_type'], data['header_names'], public=public) elif file.type == "gff/gff3": file.integrate(data["dataset_id"], data["entities"], public=public) elif file.type == "turtle": file.integrate(public=public) elif file.type == "bed": file.integrate(data["dataset_id"], data["entity_name"], public=public) # done dataset.update_in_db("success", ntriples=file.ntriples) except Exception as e: traceback.print_exc(file=sys.stdout) trace = traceback.format_exc() dataset.update_in_db("failure", error=True, error_message=str(e), traceback=trace) # Rollback file.rollback() raise e return {'error': True, 'errorMessage': str(e)} return {'error': False, 'errorMessage': ''}