Exemplo n.º 1
0
def ingest_file(request, collection, aws_bucket, aws_path):
    response = {"status":"failure"}
    if request.method == 'POST'and len(request.FILES) != 0:
        files_list = read_multiple_files(request)
        for file_key, file_value in files_list.items():
            # Saving File to media folder.
            solution_id = get_solution_from_session(request)
            file_data = save_to_folder(solution_id, file_value[0],MOUNT_PATH,"documents","uploads",flag=True)
            if file_data["status"] == "success":
                # file_name = file_data["data"]["filename"]
                # filename = " ".join(file_name.split()).replace(" ","_")
                # uploaded_file_url = file_data['data']["file_path"]
                # # posting to Amazon S3
                # resp = post_s3(str(filename), ROOT + uploaded_file_url, aws_bucket, aws_path)
                # # Formatting data for insert
                # data = create_data(None, file_data)
                # if resp['status'] == 'success':
                #     result_id = MongoDbConn.insert(collection, data)
                #     resp["document_id"] = str(result_id)
                # else:
                #     response['msg'] = "Error while ingesting the files into S3"
                #     return response
                #pipeline_name = PIPELINE_VARIABLES["INGEST_DOCUMENT"]
                #if solution_id == 'testcm_7ba1bb84-1362-434d-b596-0f01273c172e':
                pipeline_name = PIPELINE_VARIABLES["FILE_SOURCE"]
                payload = {"data": {"file_path" : file_data["data"]["relative_path"],
                                    "pipeline_name": pipeline_name,
                                    "request_type": SERVICE_REQUEST_TYPES["INGEST_DOCUMENT"]},
                           "solution_id":solution_id}
                resp = post(API_GATEWAY_POST_JOB_URI+PIPELINE["TRIGGER_PIPELINE"],payload)
                if resp['status'] != 'success':
                    response["msg"] = resp["msg"]
                    return response
            else:
                response['msg'] = "Error while saving the file"
                return response

    elif request.method == "POST":
        solution_id = get_solution_from_session(request)
        payload = json.loads(request.body.decode())
        if "files" in payload and payload["files"]:
            for file in payload["files"]:
                sftp_data = get_mountpath_fromsftp(solution_id,file)
                payload = {"data": {"file_path" : sftp_data["relative_path"]}, "solution_id":solution_id}
                resp = post(API_GATEWAY_POST_JOB_URI+DOCUMENT_ENDPOINT["ingest_flow"],payload)
                if resp['status'] != 'success':
                    response["msg"] = resp["msg"]
                    return response
        else:
            response["msg"] = "No files attached to the request"
            return response

    response['status'] = "success"
    response['msg'] = "File(s) uploaded Successfully"
    return response
Exemplo n.º 2
0
def post_save_template(solution_id, data, endpoint=TEMPLATE_SAVE_URL):
    request = dict(data=data, solution_id=solution_id)
    response = post(API_GATEWAY_POST_JOB_URI + endpoint, request)
    if response["status"] == "success":
        return dict(success=True, msg="Template Updated", status="success")
    else:
        return dict(success=False, msg=response["msg"], status="failure")
Exemplo n.º 3
0
def get_pipelines(request):
    """
    This function will fetch all pipeline services
    and return the dictionary as response
    :param request: Http request
    :return: dictionary as response
    """
    context = tracer.get_context(request_id=str(uuid4()), log_level="ERROR")
    context.start_span(component=__name__)
    try:
        result = {"status": "failure"}
        solution_id = get_solution_from_session(request)
        if request.method == 'GET':
            data_dict = {'solution_id': solution_id,
                         'data': {}}
            response = post(API_GATEWAY_POST_JOB_URI + PIPELINE['GET_PIPELINE'], data_dict)
            if 'job_id' in response:
                result["job_id"] = response["job_id"]
            if not is_request_timeout(response):
                status, msg = get_response(response)
                if status:
                    result["status"] = "success"
                    result['status_code'] = STATUS_CODES['OK']
                    services = get_nested_value(response,
                                                "result.metadata.pipelines")
                    result["data"] = services
                    result['total_services'] = len(services)
                else:
                    result["error"] = msg
                    result['status_code'] = STATUS_CODES['NOT_FOUND']
                    result["msg"] = "Error in retrieving the services information"
            else:
                result["msg"] = "Request timed out"
                result['status_code'] = STATUS_CODES['REQUEST_TIMEOUT']
            return result
        else:
            return {'status': 'failure',
                    'status_code': STATUS_CODES['BAD_REQUEST'],
                    'msg': 'GET request will be accepted.'}
    except Exception as e:
        context.log(message=str(e), obj={"tb": traceback.format_exc()})
        return {'status': 'failure',
                'status_code': STATUS_CODES['INTERNAL_SERVER_ERROR'],
                'msg': 'Failed to fetch pipeline services group.'}
    finally:
        context.end_span()
Exemplo n.º 4
0
def _prepare_upload_file(file_name, transaction_id, access_token=""):
    if not access_token:
        access_token = app.token_manager.get_token()

    url = EPC_BASE_URL + EPC_TRANSACTIONS_URL + transaction_id + "/response/resources"
    headers = {
        "Authorization": "Bearer" + " " + access_token,
    }

    content_type = MIMETYPES.get("JSON")
    payload = [{"name": file_name, "mimeType": "application/pdf"}]

    response = post(url=url,
                    content_type=content_type,
                    headers=headers,
                    body=payload)

    return response
Exemplo n.º 5
0
def process_complete_review(request, doc_id):
    context = tracer.get_context(request_id=str(uuid4()), log_level="ERROR")
    context.start_span(component=__name__)
    try:
        solution_id = common.get_solution_from_session(request)
        path = request.get_full_path()
        if "text/" in path or "entity/" in path:
            payload = json.loads(request.body.decode())
            doc_id = payload["doc_id"]
            if payload["feedback"]:
                if "text/" in path:
                    feedback_status = process_text_feedback(request)
                else:
                    feedback_status = process_entity_feedback(request)
                if feedback_status["status"] != "success":
                    return {"status": "failure", "msg": "Failed to submit feedback"}
        query = {"doc_id": doc_id, "solution_id": solution_id}
        document = MongoDbConn.find_one(DOCUMENTS_COLLECTION, query)
        data = dict(doc_id=doc_id,pipeline_name="manual_review",root_id=document["root_id"])
        if 'completeReview/review/' in path:
            data.update({"object_type": ["document", "domain", "recommendation"],"complete_review":True})
        post_status = post(API_GATEWAY_POST_JOB_URI + PIPELINE["MANUAL_TRIGGER"],
                           {"solution_id": solution_id, "data": data})
        if post_status["status"] != "success":
            return {"status": "failure", "msg": "Error while posting review"}
        state = ""
        if "text/" in path:
            state = "extracted"
        elif "grouping/" in path:
            state = "classified"
        elif "entity/" in path:
            state = "processed"
        elif 'review/' in path:
            state = 'reviewed'
        update_queue_status(document, state, "Closed", update_reqd=True)
        # context.end_span()
        return {"status": "success", "msg": "Review completion Posted successfully"}
    # TODO raise specific exception
    except Exception as e:
        context.log(message=str(e), obj={"tb": traceback.format_exc()})
        return {"status": "failure", "msg": "Internal Error occured while posting review", "error": str(e)}
    finally:
        context.end_span()
Exemplo n.º 6
0
    def create_token(self):
        if self.client_secret is not None and self.client_id is not None:
            body = {
                'grant_type': self.grant_type,
                'client_id': self.client_id,
                'client_secret': self.client_secret,
                'scope': self.scope
            }

            content_type = MIMETYPES.get('FORM')
            response = post(url=self.url, content_type=content_type, body=body)

            if response is not None:
                response_obj = response.json()
                self.access_token = response_obj.get('access_token')
                self.expiry = time.time() + 900
                return True

        return False
Exemplo n.º 7
0
def save_threshold_data(solution_id, payload):
    """
    :param solution_id:
    :param payload: request payload
    :return: response in json format
    """
    context = tracer.get_context(request_id=str(uuid4()), log_level="INFO")
    context.start_span(component=__name__)
    try:
        data = payload['data']
        post_status = post(API_GATEWAY_POST_JOB_URI + DOCUMENT_ENDPOINT["thresholds_update"],
                           {"solution_id": solution_id, "data": data})
        if post_status['status'] == 'success':
            return {"status": "success", "msg": "Threshold data updated successfully"}
        else:
            return {"status": "failure", "msg": "Error while updating threshold data"}
    # TODO raise specific exception
    except Exception as e:
        context.log(message=str(e), obj={"tb": traceback.format_exc()})
        return {"status": "failure", "msg": str(e)}
    finally:
        context.end_span()
Exemplo n.º 8
0
 def create_default_caseflow(solution_id, context):
     """
     This function is responsible for creation of the
     default workflows, BPMN and Queues
     :param solution_id: Solution Id
     :param context: Logger Object
     :return: Dictionary
     """
     try:
         wf_url = CASE_MANAGEMENT_SERVICE_URL + 'workflow/default'
         wf_payload = {
             "data": {
                 "name": "default_case_flow",
                 "description": "default case flow"
             },
             "solution_id": solution_id
         }
         resp = post(wf_url, wf_payload)
         return resp
     except Exception as e:
         context.log(message=str(e), obj={"tb": traceback.format_exc()})
         return {'status': 'failure'}
Exemplo n.º 9
0
 def save_sources(self, payload, solution_id):
     """
     This function will save the sources in the MongoDB
     and return the dictionary as response
     :param payload: Request payload
     :param solution_id: session Solution Id
     :return: dictionary as response
     """
     try:
         name = ''
         if 'name' in payload:
             name = payload['name']
         if 'source_type' in payload:
             self.source_type = payload['source_type']
         if self.source_type == 'email':
             query = {
                 'solution_id': solution_id,
                 'name': name,
                 'is_deleted': False,
                 'source_type': self.source_type
             }
             projection = {'_id': 0}
             source_recs = MongoDbConn.find_one(SOURCES_COLLECTION,
                                                query,
                                                projection=projection)
             if source_recs:
                 return {
                     'status':
                     'failure',
                     'status_code':
                     STATUS_CODES['INTERNAL_SERVER_ERROR'],
                     'msg':
                     'Email source with ' + name +
                     ' already present in this solution.'
                 }
         payload = self.update_payload(payload)
         # Trigger pipeline when source is uploaded
         pipeline_name = PIPELINE_VARIABLES["FILE_SOURCE"]
         pipeline_payload = {
             "data": {
                 "file_path": payload['file_path'],
                 "pipeline_name": pipeline_name,
                 "workflow_id": payload['workflow_id']
             },
             "solution_id": solution_id
         }
         resp = post(
             API_GATEWAY_POST_JOB_URI + PIPELINE["TRIGGER_PIPELINE"],
             pipeline_payload)
         if resp['status'] == 'success':
             MongoDbConn.insert(SOURCES_COLLECTION, payload)
             return {
                 'status':
                 'success',
                 'status_code':
                 STATUS_CODES['OK'],
                 'msg':
                 'Source ' + name +
                 ' saved successfully in this solution and triggered the pipeline.'
             }
         else:
             return {
                 'status':
                 'failure',
                 'status_code':
                 STATUS_CODES['BAD_REQUEST'],
                 'msg':
                 'Source ' + name +
                 ' not saved in this solution and not triggered the pipeline.'
             }
     except Exception as e:
         self.context.log(message=str(e),
                          obj={"tb": traceback.format_exc()})
         return {
             'status': 'failure',
             'status_code': STATUS_CODES['INTERNAL_SERVER_ERROR'],
             'msg': 'Failed to save the source/s information.'
         }
Exemplo n.º 10
0
def rules_config():
    data = dict()
    data["solution_id"] = 'R1'
    data["request_id"] = "1234"
    resp = post(RULE_SERVICE_URI + LIST_OPERATORS_SERVICE_METHOD, data)
    return JsonResponse(resp)