def entity_upload(uploaded_file, solution_id, config): context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: file_data = save_to_folder(solution_id, uploaded_file, MOUNT_PATH, "domainObjects", "uploads", flag=True) if file_data['status'] == "success": payload = { "type": "xml", "file_path": file_data["data"]["file_path"], "saveType": "import" } return entity_save(solution_id, payload, config) else: return {"status": "failure", "msg": "Failed to upload file"} # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return {'status': 'failure', 'msg': 'Error ' + str(e)} finally: context.end_span()
def ingest_file(request, collection, aws_bucket, aws_path): response = {"status":"failure"} if request.method == 'POST'and len(request.FILES) != 0: files_list = read_multiple_files(request) for file_key, file_value in files_list.items(): # Saving File to media folder. solution_id = get_solution_from_session(request) file_data = save_to_folder(solution_id, file_value[0],MOUNT_PATH,"documents","uploads",flag=True) if file_data["status"] == "success": # file_name = file_data["data"]["filename"] # filename = " ".join(file_name.split()).replace(" ","_") # uploaded_file_url = file_data['data']["file_path"] # # posting to Amazon S3 # resp = post_s3(str(filename), ROOT + uploaded_file_url, aws_bucket, aws_path) # # Formatting data for insert # data = create_data(None, file_data) # if resp['status'] == 'success': # result_id = MongoDbConn.insert(collection, data) # resp["document_id"] = str(result_id) # else: # response['msg'] = "Error while ingesting the files into S3" # return response #pipeline_name = PIPELINE_VARIABLES["INGEST_DOCUMENT"] #if solution_id == 'testcm_7ba1bb84-1362-434d-b596-0f01273c172e': pipeline_name = PIPELINE_VARIABLES["FILE_SOURCE"] payload = {"data": {"file_path" : file_data["data"]["relative_path"], "pipeline_name": pipeline_name, "request_type": SERVICE_REQUEST_TYPES["INGEST_DOCUMENT"]}, "solution_id":solution_id} resp = post(API_GATEWAY_POST_JOB_URI+PIPELINE["TRIGGER_PIPELINE"],payload) if resp['status'] != 'success': response["msg"] = resp["msg"] return response else: response['msg'] = "Error while saving the file" return response elif request.method == "POST": solution_id = get_solution_from_session(request) payload = json.loads(request.body.decode()) if "files" in payload and payload["files"]: for file in payload["files"]: sftp_data = get_mountpath_fromsftp(solution_id,file) payload = {"data": {"file_path" : sftp_data["relative_path"]}, "solution_id":solution_id} resp = post(API_GATEWAY_POST_JOB_URI+DOCUMENT_ENDPOINT["ingest_flow"],payload) if resp['status'] != 'success': response["msg"] = resp["msg"] return response else: response["msg"] = "No files attached to the request" return response response['status'] = "success" response['msg'] = "File(s) uploaded Successfully" return response
def upload_binary(uploaded_file, solution_id, payload): """ :param uploaded_file: File to be uploaded :param solution_id: Session solution id :param payload: request payload :return: response """ result = {"status": "failure"} if uploaded_file: save_result = save_to_folder(solution_id, uploaded_file, MOUNT_PATH, "binaries", "uploads", flag=True) else: save_result = dict(status="success", data={}) sftp_data = get_mountpath_fromsftp(solution_id, payload["files"][0]) save_result["data"]["file_path"] = sftp_data["file_path"] if save_result["status"] == "success": file_data = save_result["data"] dataset = [{ "name": payload["file_name"], "description": payload["description"], "value": file_data["file_path"] }] data = { "solution_id": solution_id, "data": { "binaries": dataset }, "metadata": {} } upload_result = post_job(LEARNING_CONFIG["upload_binary"], data) if 'job_id' in upload_result: result["job_id"] = upload_result["job_id"] if not is_request_timeout(upload_result): status, msg = get_response(upload_result) if status: result["status"] = "success" result["msg"] = "File uploaded successfully" else: if 'message' in msg and 'error_message' in msg['message']: result["error"] = msg['message']['error_message'] else: result[ "error"] = 'Some error occurred while uploading the binary file' result["msg"] = "Error while uploading file" else: result["msg"] = "Request timed out" else: result["msg"] = "Internal error occurred in saving file" return result
def upload_dataset(uploaded_file, solution_id, payload): result = {"status": "failure"} if uploaded_file: save_result = save_to_folder(solution_id, uploaded_file, MOUNT_PATH, "datasets", "uploads", flag=True) else: save_result = dict(status="success", data={}) sftp_data = get_mountpath_fromsftp(solution_id, payload["files"][0]) save_result["data"]["file_path"] = sftp_data["file_path"] if save_result["status"] == "success": file_data = save_result["data"] dataset = { "name": payload["file_name"], "description": payload["description"], "data_format": payload["format"], "value": file_data["file_path"] } data = { "solution_id": solution_id, "data": { "dataset": dataset }, "metadata": {} } upload_result = post_job(LEARNING_CONFIG["upload"], data) if 'job_id' in upload_result: result["job_id"] = upload_result["job_id"] if not is_request_timeout(upload_result): status, msg = get_response(upload_result) if status: result["status"] = "success" result["msg"] = "File uploaded successfully" else: result["error"] = msg result["msg"] = "Error while uploading file" else: result["msg"] = "Request timed out" else: result["msg"] = "Internal error occurred in saving file" return result
def create_new_resource(request,solution_id): response = dict(status="failure") payload = dict(request.POST) payload_dict = dict() for key in payload.keys(): payload_dict[str(key)] = str(payload[key][0]) uploaded_file = request.FILES['file'] result = save_to_folder(solution_id, uploaded_file, MOUNT_PATH, "resources", "uploads", flag=True) data = result["data"] data.update(payload_dict) result = save_resource_file(data, solution_id) if result["status"] == "success": response["status"] = "success" response["msg"] = "Files uploaded successfully" else: response["msg"] = result["msg"] return response
def ingest_template_request(request): if request.method == "POST": solution_id = get_solution_from_session(request) if len(request.FILES) != 0: payload = request.POST file_data = save_to_folder(solution_id, request.FILES["file"], MOUNT_PATH, "templates", "uploads", flag=True) file_path = file_data["data"]["relative_path"] if file_data[ "data"] else "" return JsonResponse( ingest_template(solution_id, payload, file_path=file_path)) else: payload = json.loads(request.body.decode()) return JsonResponse(ingest_template(solution_id, payload))
def template_train_upload_post(request): job_id = None context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: solution_id = get_solution_from_session(request) # asserting for key items assert len(request.FILES) > 0 data = json.loads(request.POST.get("data","0")) assert "template_id" in data files = read_multiple_files(request) data = {"template_id": data["template_id"]} file_paths = list() # iterating files and saving each file for file_key, file_value in files.items(): file_data = save_to_folder(solution_id, file_value[0], MOUNT_PATH, "templates", "samples", flag=True) assert file_data['status'] == "success" file_path = file_data["data"]["relative_path"] file_paths.append(file_path) data["file_path"] = file_paths # payload to post payload = {"solution_id": solution_id, "data": data} response = post_job(TEMPLATE_TRAIN_UPLOAD_ENDPOINT, payload) if 'job_id' in response: job_id = response["job_id"] if not is_request_timeout(response): status, result = get_response(response) if status: resp = get_nested_value(response, "result.result.metadata") return {"status": "success", "msg": "files uploaded successfully", 'data': resp, 'job_id':job_id} else: return {"status": "failure", "msg": "failed to upload", 'error': result, 'job_id':job_id} else: return {"status": "failure", "msg": "request timeout", 'error': response, 'job_id':job_id} # TODO raise specific exception except AssertionError as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) tb = traceback.format_exc() if job_id: return {"status": "failure", "msg": "Assertion failed, " + str(e), "traceback": str(tb), 'job_id':job_id} else: return {"status": "failure", "msg": "Assertion failed, " + str(e), "traceback": str(tb)} except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) tb = traceback.format_exc() if job_id: return {"status": "failure", "msg": "unknown error, " + str(e), "traceback": str(tb), 'job_id':job_id} else: return {"status": "failure", "msg": "unknown error, " + str(e), "traceback": str(tb)} finally: context.end_span()