def update_email_details(solution_id,payload): job_id = None context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: data = dict(solution_id = solution_id, data=dict(source_type="email",service_name="source",solution_id=solution_id, configuration=payload)) response = post_job(CONFIGURE_EMAIL,data) if 'job_id' in response: job_id = response["job_id"] if not is_request_timeout(response): status, result = get_response(response) if status: MongoDbConn.update(SOURCE_COLLECTION, where_clause=dict(solution_id=solution_id,source_type="email"), query=data["data"]) temp_result = create_email_template(solution_id,payload) if temp_result["status"] == "success": return {"status": "success", "msg": "Updated email details.", 'job_id': job_id} else: return temp_result else: return {'status': 'failure', 'msg': 'Error in updating emails', 'error': result, 'job_id':job_id} else: return {'status': 'failure', 'msg': 'Request timeout', "error": response, 'job_id':job_id} except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) if job_id: return {"status": "failure", "msg": "Failed to update details.", 'job_id':job_id} else: return {"status": "failure", "msg": "Failed to update details."} finally: context.end_span()
def update_solutions(req_data): solns = None context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: query = { 'is_deleted': False, 'solution_id': req_data["solution_id"] } projection = {'_id': 0} solns = MongoDbConn.find_one(SOLUTION_COLLECTION, query, projection=projection) # solns = Solution.objects.get(deleted=False,solution_id=req_data["solution_id"]) if solns: solns['hocr_type'] = req_data["hocr_type"] # Solution.objects.update(solns) # solns.save() MongoDbConn.update(SOLUTION_COLLECTION, query, solns) return {'status': 'success', 'msg': 'updated Solutions list'} else: return {'status': 'success', 'msg': 'No solutions exists'} except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return {'status': 'failure', 'msg': 'exception occerd : ' + str(e)} finally: context.end_span()
def delete_source(self, solution_id, source_id): """ This function will delete the particular source and return the dictionary as response :param solution_id: Session Solution Id :param source_id: Sources Id :return: Dictionary as response """ try: source_rec, query, status_code = self.get_source( solution_id, source_id) if not source_rec or \ status_code == STATUS_CODES['PRECONDITION_FAILED']: return { 'status': 'failure', 'status_code': STATUS_CODES['NO_CONTENT'], 'msg': 'Source/s not available to delete.' } source_rec['is_deleted'] = True source_rec['updated_ts'] = datetime.utcnow().isoformat() MongoDbConn.update(SOURCES_COLLECTION, query, source_rec) return { 'status': 'success', 'status_code': STATUS_CODES['OK'], 'msg': 'Source/s has been deleted successfully.' } except Exception as e: self.context.log(message=str(e), obj={"tb": traceback.format_exc()}) return { 'status': 'failure', 'status_code': STATUS_CODES['INTERNAL_SERVER_ERROR'], 'msg': 'Failed to delete the source/s.' }
def process_workflow_files(request): context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: if request.method == "GET": result = MongoDbConn.find(RESOURCES_COLLECTION,dict(type="camunda_workflow")) workflow_files = list() for file in result: file.pop("_id") workflow_files.append(file) return {"data":workflow_files,"status":"success"} elif request.method == "POST": payload = json.loads(request.body.decode()) if "file_path" in payload: with open(payload["file_path"]) as fp: xml_string = fp.read() return {"status": "success","xml_string" : xml_string} if "resource_id" in payload: file = MongoDbConn.find_one(RESOURCES_COLLECTION,dict(type="camunda_workflow",resource_id=payload["resource_id"])) if file is not None: with open(file["file_path"], 'r+') as f: f.read() f.seek(0) f.write(payload["xml_string"]) f.truncate() return {"status":"success","msg":"Workflow updated successfully"} return {"status":"failure","msg":"Workflow update failed"} # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return {"status": "failure", "msg": str(e)} finally: context.end_span()
def store_entity_definitions(file_path, solution_id): resp = dict() context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: data = get_entity_definitions_from_ontology(file_path) for itm in data: itm['solution_id'] = solution_id if "entity_name" in itm.keys(): MongoDbConn.update(ENTITY_DEFN_COLL, where_clause={ "entity_name": itm["entity_name"], "solution_id": itm["solution_id"] }, query=itm) resp['status'] = 'success' resp['msg'] = 'Entity definitions successfully updated/created' # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) resp['status'] = 'failure' resp['msg'] = "Failed to update Entity Definitions " + str(e) context.end_span() return resp
def entity_delete(payload, solution_id, config, validated=False): job_id = None context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: if not validated: domain_object = payload["entity_name"] valid = validate_entity_delete(solution_id, domain_object) if not valid: return invalid_edit_msg() entities_list = [domain_object] else: entities_list = payload for entities in entities_list: complete_list = deepcopy(entities_list) get_all_sub_entities(entities, solution_id, complete_list) data = { "solution_id": solution_id, "data": { "filter_obj": complete_list } } response = post_job(config["DELETE"], data, timeout=100) if 'job_id' in response: job_id = response['job_id'] if not is_request_timeout(response): status, result = get_response(response) if status: for ent in complete_list: query = {"entity_name": ent, "solution_id": solution_id} MongoDbConn.remove(ENTITY_COLLECTION, query) else: return { 'status': 'failure', 'msg': 'Failed to remove', 'error': result, 'job_id': job_id } else: return { 'status': 'failure', 'msg': 'Request timeout', "error": response, 'job_id': job_id } return { 'status': 'success', 'msg': 'Successfully removed', 'job_id': job_id } # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) if job_id: return {"status": "failure", "msg": str(e), 'job_id': job_id} else: return {"status": "failure", "msg": str(e)} finally: context.end_span()
def seed_data(file_name, collection_name): try: MongoDbConn.remove(collection_name, {}) data = json.loads(open(os.path.join(SCRIPTS_ROOT, file_name)).read()) for itm in data: MongoDbConn.insert(collection_name, itm) except Exception as e: print(str(e))
def test_get_template(): query = dict(solution_id=test_solution, is_deleted=False, template_id=test_template_obj["doc_id"]) MongoDbConn.update(TEMPLATE_COLLECTION, query, {"template": json.dumps(test_template_obj)}) response = get_template(test_solution) assert len(response["data"]) == 1
def delete_training_set_services(rec_id): context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: MongoDbConn.remove(TRAINING_SET_SERVICES_COLLECTION, {"trset_id": rec_id}) # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) finally: context.end_span()
def upload_training_set(request): if request.method == 'POST' and request.FILES is not None and len(request.FILES)>0: files = request.FILES uploaded_file = files['file'] unique_folder = "/media/" + str(uuid4()) # Saving File to media folder. fs = FileSystemStorage(location=ROOT + unique_folder) filename = fs.save(uploaded_file.name, uploaded_file) uploaded_file_url = str(unique_folder + "/" + filename).replace("%20", " ") extn = str(str(filename).rsplit(".", 1)[1]) if extn == 'owl' or extn == "rdf": store_entity_definitions(str(os.getcwd()) + uploaded_file_url, get_solution_from_session(request)) # Adding file to bucket resp = post_s3(str(filename), ROOT + uploaded_file_url, AMAZON_AWS_BUCKET, AMAZON_AWS_KEY_PATH) # Sending file info to platform resp_api = post_to_ms(str(filename), resp['url'], extn, get_solution_from_session(request)) # Formatting data for insert data = create_data(dict(request.POST), filename, resp['url'], resp['key'], extn, get_solution_from_session(request)) data['created_ts'] = datetime.now() data['is_published'] = False if resp_api['status'] == 'success' and resp_api['status_code'] == 200: data['is_published'] = True services = get_file_contents('platform_services.json') if services is not None: platform_services = dict() for key in services.keys(): platform_services[key] = {'enabled': False} data['services'] = platform_services MongoDbConn.insert(TRAINING_SET_COLLECTION, data) # create_training_data_services(data) return {'status': 'success', 'msg': 'Resource uploaded to library'} elif request.method == 'POST': payload = json.loads(request.body.decode()) data = payload['data'] status = False if '_id' in data.keys(): status = update_training_data(data) if status: return {'status': 'success', 'status_code': 200, 'msg': 'Updated training set'} else: return {'status': 'failure', 'status_code': 500, 'msg': 'Failed in updated training set'} elif request.method == 'GET': delete_files() return training_set_get(TRAINING_SET_COLLECTION, dict(), get_solution_from_session(request)) elif request.method == 'DELETE': payload = json.loads(request.body.decode()) return training_set_delete(get_solution_from_session(request), payload)
def update_queue_status(data, state, status, update_reqd=False): if "life_cycle" in data and state in data["life_cycle"]: update_data = dict() data["life_cycle"][state]["status"] = status if status == "Closed" and state not in ["processed", "reviewed"]: data["life_cycle"][state]["closed_ts"] = datetime.now() update_data["doc_state"] = "processing" update_data["life_cycle"] = data["life_cycle"] if update_reqd: MongoDbConn.update(DOCUMENTS_COLLECTION, {"doc_id": data["doc_id"]}, update_data) return data
def update_s3_bucket(request): if request.method == 'POST': payload = json.loads(request.body.decode()) try: MongoDbConn.update(CONFIG_COLLECTION, where_clause={}, query={"s3_claims_bucket": payload["s3_claims_bucket"]}) resp = {"status": "success", "msg": "updated bucket."} except: resp = {"status": "failure", "msg": "failed to update bucket."} return resp elif request.method == "GET": return get_s3_bucket()
def change_doc_state(request): solution_id = get_solution_from_session(request) try: payload = json.loads(request.body.decode()) except: payload = request.POST if payload["doc_state"] != "processed": doc_id = payload["doc_id"] query = dict(doc_id=doc_id, solution_id=solution_id) document = MongoDbConn.find_one(DOCUMENTS_COLLECTION, query) if "entity_feedback" in document and document["entity_feedback"]: MongoDbConn.update(DOCUMENTS_COLLECTION, query, {"entity_feedback": None}) return post_doc_state_change(payload,solution_id, reset_cycle=True)
def process_jobs(): try: job = MongoDbConn.find_one(JOB_COLLECTION, {"is_complete": False}) print("Running " + job["name"] + " job.") response = post_job(job["endpoint"], job["data"]) result = get_nested_value(response, job["key"]) if result and len(result) > 0: for entity in result: query = {"solution_id": job["solution_id"]} query.update({a: entity[a] for a in job["unique_keys"]}) MongoDbConn.update(job['collection'], query, entity) MongoDbConn.update(JOB_COLLECTION, {"_id": job["_id"]}, {"is_complete": True}) except Exception as e: print(str(e))
def update_intent_review(request): context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: payload = json.loads(request.body.decode()) doc_id = payload["doc_id"] MongoDbConn.update(DOCUMENTS_COLLECTION, {"doc_id": doc_id}, {"elements_updated": payload["elements"]}) return {"status": "success", "msg": "Intent updated successfully"} # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return {"status": "failure", "msg": str(e)} finally: context.end_span()
def process_action_delete(result_id): context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: query = {'is_deleted': True, 'deleted_ts': datetime.now()} MongoDbConn.update(TRAINING_SET_MODELS_COLLECTION, where_clause={"_id": ObjectId(result_id)}, query=query) return {'status': 'success', 'msg': 'Model has been deleted'} # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return {"status": "failure", "msg": "failed to delete the model"} finally: context.end_span()
def update_sources(self, payload, solution_id, source_id): """ This function will save the sources in the MongoDB and return the dictionary as response :param payload: Request payload :param solution_id: session Solution Id :param source_id: Id of the Source which needs to be updated :return: dictionary as response """ try: source_rec, query, status_code = self.get_source( solution_id, source_id) if not source_rec or \ status_code == STATUS_CODES['PRECONDITION_FAILED']: return { 'status': 'failure', 'status_code': STATUS_CODES['NO_CONTENT'], 'msg': 'Source/s not available to update.' } payload['updated_ts'] = datetime.utcnow().isoformat() if 'triggers' in payload: triggers = payload['triggers'] for trigger in triggers: trigger['updated_ts'] = datetime.utcnow().isoformat() payload['triggers'] = triggers if 'schedules' in payload: schedules = payload['schedules'] for schedule in schedules: schedule['updated_ts'] = datetime.utcnow().isoformat() payload['schedules'] = schedules MongoDbConn.update(SOURCES_COLLECTION, query, payload) return { 'status': 'success', 'status_code': STATUS_CODES['OK'], 'msg': 'Source ' + payload['name'] + ' updated successfully in this solution.' } except Exception as e: self.context.log(message=str(e), obj={"tb": traceback.format_exc()}) return { 'status': 'failure', 'status_code': STATUS_CODES['INTERNAL_SERVER_ERROR'], 'msg': 'Failed to save the source/s information.' }
def process_action_retrain(solution_id, payload, model, result_id): file_name = model['name'] + '_' + str(len(model['model_ref']) + 1) + '.json' with open(file_name, 'w') as outfile: json.dump(payload['data'], outfile) s3_resp = post_s3(str(file_name), ROOT + '/' + str(file_name), AMAZON_AWS_BUCKET, AMAZON_AWS_KEY_PATH) if s3_resp['status'] == 'success': os.remove(file_name) uri = TRAINING_SET_TRAIN_MODEL_URI if model['type'] == 'action_classifier': uri = TRAINING_SET_ACTION_CLASSIFIER_URI response = post_train_model_job(solution_id, model, s3_resp['key'], uri) if not is_request_timeout(response): if is_message_published(response): model['model_ref'].append({ 'bucket_name': AMAZON_AWS_BUCKET, 'key_name': AMAZON_AWS_KEY_PATH + file_name }) query = { 'is_enabled': True, 'updated_ts': datetime.now(), 'model_ref': model['model_ref'] } MongoDbConn.update(TRAINING_SET_MODELS_COLLECTION, where_clause={"_id": ObjectId(result_id)}, query=query) return {'status': 'success', 'msg': 'Retrain model completed'} else: return { 'status': 'failure', 'msg': 'Error in service while publishing retrained model' } else: return { 'status': 'failure', 'msg': 'Timeout Error while processing retrained model' } else: return { 'status': 'failure', 'msg': 'Error in uploading retrained model to s3' }
def create_email_template(solution_id,payload): job_id = None temp_result = {"status" :"failure"} template = MongoDbConn.find_one(TEMPLATE_COLLECTION, {"solution_id": solution_id, "template_name": "email", "is_deleted": False}) if template is None: template_data = format_template_data(solution_id) response = post_job(TEMPLATE_CONFIG["SAVE"],template_data) if 'job_id' in response: job_id = response["job_id"] if not is_request_timeout(response): status, result = get_response(response) if status: template_id = get_nested_value(response, "result.result.metadata.template_id") if template_id: section_result = create_new_section(template_id,solution_id,DEFAULT_SECTION) if section_result["status"] != "success": return temp_result.update({'msg': 'Failed to create sections', 'error': section_result, 'job_id':job_id}) else: return temp_result.update({'msg': 'Failed to create template', 'error': result, 'job_id':job_id}) else: return temp_result.update({'msg': 'Request timed out', 'error': response, 'job_id':job_id}) else: template_id = template["template_id"] element_result = update_elements(template_id,solution_id,payload) if element_result["status"] == "success": return {'status':'success'} else: return temp_result.update({'msg': 'Failed to create elements', 'error': element_result})
def find_ug_based_queues(self, user_groups, solution_id): """ This function will query the DB and get the list of queues in which user group item are present and return the list of eligible queues :param user_groups: User group info for which user is tagged :param solution_id: Session solution id :return: list of eligible queues """ try: ug_ids = [] for ug in user_groups: ug_ids.append(ug['id']) query = {'is_deleted': False, 'solution_id': solution_id} projection = {'_id': 0} queues = MongoDbConn.find(WORKFLOW_QUEUE_COLLECTION, query, projection=projection) raw_queues = [queue for queue in queues] queues_list = [] for ele in raw_queues: if 'user_groups' in ele: for item in ele['user_groups']: if item['id'] in ug_ids: queues_list.append(ele) break return queues_list except Exception as e: self.context.log(message=str(e), obj={'tb': traceback.format_exc()}) return []
def save_template_element(solution_id, data): try: query = dict(solution_id=solution_id, template_id=data.pop("template_id"), is_deleted=False) template = MongoDbConn.find_one(TEMPLATE_COLLECTION, query) template = json.loads(template["template"]) if template else {} # new element object new = data new["id"], is_update = (str(uuid4()), False) if "id" not in new else (new["id"], True) new["section_id"] = new["id"] if "section_id" not in new else new["section_id"] new["is_deleted"] = False if new["type"] == "table": template["domain_object_mapping"] = update_table_mapping(new, template["domain_object_mapping"]) else: if "domain_mapping" in new and new["domain_mapping"] != "": template["domain_object_mapping"] = update_template_domain_mapping(new["domain_mapping"], new["id"], template["domain_object_mapping"]) template["document_variables"].pop(new["id"], None) if "doc_var" in new and new["doc_var"] != {}: template["document_variables"] = update_template_document_variables(new["doc_var"], new["id"], template["document_variables"]) template["domain_object_mapping"].pop(new["id"], None) elements_reformatted = update_template_elements(new, _obj=template["elements"], is_update=is_update) template["elements"] = [elements_reformatted] resp = post_save_template(solution_id, dict(document=template)) resp.update(dict(section_id=new['section_id'], id=new['id'])) return resp except Exception: return dict(success=False, msg="Failed to save element", error=traceback.format_exc(), status="failure")
def get_review_data(solution_id, file_id, status, doc_type, direction, query_string): query = get_flow_id(file_id=file_id, status=status, doc_type=doc_type, direction=direction, query_string=query_string) # query = {"flow_file_id": file_id} rows = MongoDbConn.find_one(DASHBOARD_CONFIG, query={"table": "DocumentReviewDetails"}) record = dict() record.update(query) del query["disable_prev"] del query["disable_next"] query["solution_id"] = solution_id atts = list() for field in rows['config']: if field['level'] == 0: record = add_url_recid(field, query, record) else: if atts == list(): atts = set_attributes(field, query) else: atts = update_attributes(atts, set_attributes(field, query)) if field['key'] == "Document Confidence" and record[field['key']] is not None: record[field['key']] = str(round(get_score(field, record[field['key']]), 2)) + "%" if field['key'] == "Processed" and record[field['key']] is not None: record[field['key']] = str(conv_timestamp(record[field['key']])) record['attributes'] = add_intent(add_nlp_flag_to_atts(atts, query), query) return record
def get_solution_id(request): context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: solution_id = common.get_solution_from_session(request) solution_name = "" query = {'solution_id': solution_id} projection = {'_id': 0} soln_exists = MongoDbConn.find_one(SOLUTION_COLLECTION, query, projection=projection) if soln_exists: solution_name = soln_exists['solution_name'] return { "solution_id": solution_id, 'solution_name': solution_name, 'case_management_url': CASE_MANAGEMENT_SERVICE_URL, "status": "success" } # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return {"status": "failure", "msg": str(e)} finally: context.end_span()
def get_source(self, solution_id, source_id): """ This function will fetch the source record and return the dictionary as response :param solution_id: Session Solution Id :param source_id: Id of the source :return: source record """ try: if not source_id: return { 'status': 'failure', 'status_code': STATUS_CODES['PRECONDITION_FAILED'], 'msg': 'Source Id is not available in the request.' } query = { 'solution_id': solution_id, 'source_type': self.source_type, 'source_id': source_id, 'is_deleted': False } projection = {'_id': 0} source_rec = MongoDbConn.find_one(SOURCES_COLLECTION, query, projection=projection) status_code = STATUS_CODES['OK'] return source_rec, query, status_code except Exception as e: self.context.log(message=str(e), obj={"tb": traceback.format_exc()}) return None, None, None
def mapping_list_of_thresholds(mapping_query, map_list=[]): mapping = MongoDbConn.find_one(MAPPING_COLLECTION, mapping_query) context = tracer.get_context(request_id=str(uuid4()), log_level="ERROR") context.start_span(component=__name__) try: if mapping: section_data = mapping["sections"] for section_id, data in section_data.items(): if "map_to" in data and not data["is_deleted"]: map_list.append(data['map_to'][0]) if type(data['map_to']) == list and len(data['map_to']) == 1 \ else map_list.append(data['map_to']) if "elements" in data: element_data = data["elements"] for element_id, element_data in element_data.items(): if "map_to" in element_data and not element_data[ "is_deleted"]: for each in element_data["map_to"]: map_list.append(each['map_to']) return map_list # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return { "status": "failed", "msg": "error in getting objects", "error": str(e) } finally: context.end_span()
def construct_data(selector, field, solution_id): data = list() iterlen = get_iters(selector)['length'] iterstep = get_iters(selector)['step'] query = dict() if field['key_type'] == 'string': query["$or"] = [{ field['key']: str(field["name"]).lower() }, { field['key']: str(field["name"]).title() }] elif field['key_type'] == 'bool': if "," in field['key']: query["$or"] = [{ key.strip(): True } for key in field['key'].split(",")] else: query[field['key']] = True # Common code to get count. end_day = datetime.now().replace(hour=23, minute=59, second=59) for i in range(iterlen): start_day = end_day + timedelta(days=-iterstep) query[field['timestamp']] = {'$lte': end_day, '$gte': start_day} query["solution_id"] = solution_id count = MongoDbConn.find(field['collection'], query).sort('_id', -1) end_day = start_day data.append(count.count()) return data
def find_documents(request, collection, query, solution_id, projection_fields=None): context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: cursor = MongoDbConn.find(collection, query, projection=projection_fields) sort_by, order_by_asc, skip, limit = get_pagination_details(request, sort_by='updated_ts', order_by_asc=-1, skip=0, limit=0) documents_list = cursor.sort(sort_by, order_by_asc).skip(skip).limit(limit) documents = [] for document in documents_list: document.pop("_id", None) document = construct_json(document, DOCUMENT_SUMMARY_FIELDS) doc_type = get_doc_type(document['extn']) if doc_type == "image": document["is_digital"] = False else: document["is_digital"] = True if "confidence_score" not in document: document["confidence_score"] = get_confidence_score(document, solution_id, document["is_digital"]) document["is_failed"] = True if document["doc_state"] == "failed" else False document["review_text"] = get_review_text(document["doc_state"], document) documents.append(document) return documents # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) finally: context.end_span()
def generate_preassigned_s3_url(request): context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: if request.method == "POST": payload = json.loads(request.body.decode()) method = payload["method"] if 'method' in payload else 'GET' content_type = payload["content_type"] if 'content_type' in payload else 'application/json' file_metadata = payload['file_metadata'] if 'file_metadata' in payload else None solution_id = payload['solution_id'] if 'solution_id' in payload else get_solution_from_session(request) if method == 'GET': key = payload["file_path"] url = StorageHandler.presigned_get_url(AMAZON_AWS_BUCKET, key) return {"status": "success", "solution_id": solution_id, "aws_url": url, "status_code": STATUS_CODES["OK"]} upload_type = payload["upload_type"] file_name = payload["file_name"] file_name = "".join(file_name.split()) file_id = str(uuid4()) key = os.path.join(solution_id, "console", upload_type, file_id, file_name) if method == 'POST': url = StorageHandler.presigned_post_url(AMAZON_AWS_BUCKET, key) MongoDbConn.insert(UPLOADED_FILE_COLLECTION, {"solution_id": solution_id, "key": key, "upload_type": upload_type, "file_name": file_name, "update_ts": datetime.utcnow().isoformat()}) return {"status": "success", "solution_id": solution_id, "aws_url": url, "status_code": STATUS_CODES["OK"]} elif method == 'PUT': url = StorageHandler.generate_presigned_url_to_upload(AMAZON_AWS_BUCKET, key, content_type=content_type, file_metadata=file_metadata) MongoDbConn.insert(UPLOADED_FILE_COLLECTION, {"solution_id": solution_id, "key": key, "upload_type": upload_type, "file_name": file_name, "update_ts": datetime.utcnow().isoformat()}) return {"status": "success", "solution_id": solution_id, "aws_url": url, "status_code": STATUS_CODES["OK"]} else: return {"status": "failure", "error": "Request type not supported", "status_code":STATUS_CODES['METHOD_NOT_ALLOWED']} except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return {"status": "failure", "error": e, "status_code":STATUS_CODES["INTERNAL_SERVER_ERROR"]} finally: context.end_span()
def delete_solution(soln): context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: solution_id = soln["solution_id"] soln_exists = None try: query = {'solution_id': solution_id} projection = {'_id': 0} soln_exists = MongoDbConn.find_one(SOLUTION_COLLECTION, query, projection=projection) # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return { 'status': 'failure', 'msg': 'Error occurred while deleting solution' } if soln_exists: solution_name = soln_exists['solution_name'] remove_nifi_pipeline_config(solution_name) SolutionService.delete_workflows_bpmn_queues( solution_id, context) soln_exists['is_deleted'] = True soln_exists['updated_ts'] = datetime.utcnow().isoformat() MongoDbConn.update(SOLUTION_COLLECTION, query, soln_exists) status = { 'status': 'success', 'msg': solution_name + ' - solution has been deleted' } return status else: status = { 'status': 'failure', 'msg': 'Solution does not exists' } return status except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return { 'status': 'failure', 'msg': 'Error occurred while deleting solution' } finally: context.end_span()
def set_attributes(field, query): result = MongoDbConn.find(field['collection'], query) recs = list() for rec in result: temp = dict() temp[field['key']] = get_review_value(field, rec, query) recs.append(temp) return recs