Exemple #1
0
def seed_data(file_name, collection_name):
    try:
        MongoDbConn.remove(collection_name, {})
        data = json.loads(open(os.path.join(SCRIPTS_ROOT, file_name)).read())
        for itm in data:
            MongoDbConn.insert(collection_name, itm)
    except Exception as e:
        print(str(e))
Exemple #2
0
def upload_training_set(request):
    if request.method == 'POST' and request.FILES is not None and len(request.FILES)>0:
        files = request.FILES
        uploaded_file = files['file']
        unique_folder = "/media/" + str(uuid4())
        # Saving File to media folder.
        fs = FileSystemStorage(location=ROOT + unique_folder)
        filename = fs.save(uploaded_file.name, uploaded_file)
        uploaded_file_url = str(unique_folder + "/" + filename).replace("%20", " ")
        extn = str(str(filename).rsplit(".", 1)[1])

        if extn == 'owl' or extn == "rdf":
            store_entity_definitions(str(os.getcwd()) + uploaded_file_url, get_solution_from_session(request))

        # Adding file to bucket
        resp = post_s3(str(filename), ROOT + uploaded_file_url, AMAZON_AWS_BUCKET, AMAZON_AWS_KEY_PATH)

        # Sending file info to platform
        resp_api = post_to_ms(str(filename), resp['url'], extn, get_solution_from_session(request))

        # Formatting data for insert
        data = create_data(dict(request.POST), filename, resp['url'], resp['key'], extn, get_solution_from_session(request))
        data['created_ts'] = datetime.now()

        data['is_published'] = False
        if resp_api['status'] == 'success' and resp_api['status_code'] == 200:
            data['is_published'] = True
        services = get_file_contents('platform_services.json')
        if services is not None:
            platform_services = dict()
            for key in services.keys():
                platform_services[key] = {'enabled': False}
            data['services'] = platform_services
        MongoDbConn.insert(TRAINING_SET_COLLECTION, data)
        # create_training_data_services(data)

        return {'status': 'success', 'msg': 'Resource uploaded to library'}
    elif request.method == 'POST':
        payload = json.loads(request.body.decode())
        data = payload['data']
        status = False
        if '_id' in data.keys():
            status = update_training_data(data)
        if status:
            return {'status': 'success', 'status_code': 200, 'msg': 'Updated training set'}
        else:
            return {'status': 'failure', 'status_code': 500, 'msg': 'Failed in updated training set'}
    elif request.method == 'GET':
        delete_files()
        return training_set_get(TRAINING_SET_COLLECTION, dict(), get_solution_from_session(request))

    elif request.method == 'DELETE':
        payload = json.loads(request.body.decode())
        return training_set_delete(get_solution_from_session(request), payload)
Exemple #3
0
def generate_preassigned_s3_url(request):
    context = tracer.get_context(request_id=str(uuid4()), log_level="INFO")
    context.start_span(component=__name__)
    try:
        if request.method == "POST":
            payload = json.loads(request.body.decode())
            method = payload["method"] if 'method' in payload else 'GET'
            content_type = payload["content_type"] if 'content_type' in payload else 'application/json'
            file_metadata = payload['file_metadata'] if 'file_metadata' in payload else None
            solution_id = payload['solution_id'] if 'solution_id' in payload else get_solution_from_session(request)
            if method == 'GET':
                key = payload["file_path"]
                url = StorageHandler.presigned_get_url(AMAZON_AWS_BUCKET, key)
                return {"status": "success", "solution_id": solution_id,
                        "aws_url": url, "status_code": STATUS_CODES["OK"]}
            upload_type = payload["upload_type"]
            file_name = payload["file_name"]
            file_name = "".join(file_name.split())
            file_id = str(uuid4())
            key = os.path.join(solution_id, "console", upload_type, file_id, file_name)
            if method == 'POST':
                url = StorageHandler.presigned_post_url(AMAZON_AWS_BUCKET, key)
                MongoDbConn.insert(UPLOADED_FILE_COLLECTION, {"solution_id": solution_id,
                                                              "key": key, "upload_type": upload_type,
                                                              "file_name": file_name,
                                                              "update_ts": datetime.utcnow().isoformat()})
                return {"status": "success", "solution_id": solution_id,
                        "aws_url": url, "status_code": STATUS_CODES["OK"]}
            elif method == 'PUT':
                url = StorageHandler.generate_presigned_url_to_upload(AMAZON_AWS_BUCKET, key,
                                                                      content_type=content_type,
                                                                      file_metadata=file_metadata)
                MongoDbConn.insert(UPLOADED_FILE_COLLECTION, {"solution_id": solution_id,
                                                              "key": key, "upload_type": upload_type,
                                                              "file_name": file_name,
                                                              "update_ts": datetime.utcnow().isoformat()})
                return {"status": "success", "solution_id": solution_id,
                        "aws_url": url, "status_code": STATUS_CODES["OK"]}
        else:
            return {"status": "failure", "error": "Request type not supported",
                    "status_code":STATUS_CODES['METHOD_NOT_ALLOWED']}
    except Exception as e:
        context.log(message=str(e), obj={"tb": traceback.format_exc()})
        return {"status": "failure", "error": e,
                "status_code":STATUS_CODES["INTERNAL_SERVER_ERROR"]}
    finally:
        context.end_span()
Exemple #4
0
def insert_threshold(request, solution_id, workflow_id, task_id, threshold_id):
    try:
        if request.method == 'POST':
            payload = json.loads(request.body.decode())
            query = {
                "solution_id": "developer_7f3970b4-8589-41a8-9030-d0065e407056"
            }
            wf_data_dict = MongoDbConn.find_one(WORKFLOW_COLLECTION, query)
            for value, soln_id in zip(payload, wf_data_dict["case_object"]):
                if (value == soln_id['variable_id']):
                    soln_id.update({"thresholds": payload[value]})
                    post_response = MongoDbConn.insert('thresholds_data',
                                                       payload)
                    return ({"response": "success"})
        else:
            return ({"response": "method not working out"})
    except:
        pass
Exemple #5
0
def insert_chart(chart):
    MongoDbConn.insert(DASHBOARD_CONFIG, chart)
    return chart['chart_id']
Exemple #6
0
 def save_sources(self, payload, solution_id):
     """
     This function will save the sources in the MongoDB
     and return the dictionary as response
     :param payload: Request payload
     :param solution_id: session Solution Id
     :return: dictionary as response
     """
     try:
         name = ''
         if 'name' in payload:
             name = payload['name']
         if 'source_type' in payload:
             self.source_type = payload['source_type']
         if self.source_type == 'email':
             query = {
                 'solution_id': solution_id,
                 'name': name,
                 'is_deleted': False,
                 'source_type': self.source_type
             }
             projection = {'_id': 0}
             source_recs = MongoDbConn.find_one(SOURCES_COLLECTION,
                                                query,
                                                projection=projection)
             if source_recs:
                 return {
                     'status':
                     'failure',
                     'status_code':
                     STATUS_CODES['INTERNAL_SERVER_ERROR'],
                     'msg':
                     'Email source with ' + name +
                     ' already present in this solution.'
                 }
         payload = self.update_payload(payload)
         # Trigger pipeline when source is uploaded
         pipeline_name = PIPELINE_VARIABLES["FILE_SOURCE"]
         pipeline_payload = {
             "data": {
                 "file_path": payload['file_path'],
                 "pipeline_name": pipeline_name,
                 "workflow_id": payload['workflow_id']
             },
             "solution_id": solution_id
         }
         resp = post(
             API_GATEWAY_POST_JOB_URI + PIPELINE["TRIGGER_PIPELINE"],
             pipeline_payload)
         if resp['status'] == 'success':
             MongoDbConn.insert(SOURCES_COLLECTION, payload)
             return {
                 'status':
                 'success',
                 'status_code':
                 STATUS_CODES['OK'],
                 'msg':
                 'Source ' + name +
                 ' saved successfully in this solution and triggered the pipeline.'
             }
         else:
             return {
                 'status':
                 'failure',
                 'status_code':
                 STATUS_CODES['BAD_REQUEST'],
                 'msg':
                 'Source ' + name +
                 ' not saved in this solution and not triggered the pipeline.'
             }
     except Exception as e:
         self.context.log(message=str(e),
                          obj={"tb": traceback.format_exc()})
         return {
             'status': 'failure',
             'status_code': STATUS_CODES['INTERNAL_SERVER_ERROR'],
             'msg': 'Failed to save the source/s information.'
         }
Exemple #7
0
def insert_rules_data(rules_data=None):
    MongoDbConn.insert(RULES_COLLECTION, rules_data)
    return rules_data['solution_id']
def insert_template_sample_data(template_sample):
    MongoDbConn.insert(TEMPLATE_TRAIN_SAMPLES_COLLECTION, template_sample)
    return template_sample['solution_id']
Exemple #9
0
def post_document_var(data=doc_var):
    MongoDbConn.insert(DOCUMENTS_COLLECTION, data)
    return "done"
Exemple #10
0
 def store_insight_templates(self, insight_template):
     MongoDbConn.insert(self.collection_name, insight_template)
Exemple #11
0
 def create_solution(soln):
     context = tracer.get_context(request_id=str(uuid4()), log_level="INFO")
     context.start_span(component=__name__)
     try:
         # verify solution name already exists
         soln_name = soln["solution_name"]
         if bool(re.match('^[a-zA-Z0-9\s]+$', soln_name)):
             soln_ref_name = str(soln_name).lower().replace(' ', '-')
         else:
             return {
                 "status": "failure",
                 "msg": "invalid characters in solution name"
             }
         soln_ref_id = soln_ref_name + "_" + str(uuid.uuid4())
         soln_exists = None
         try:
             query = {'solution_name': soln_name, 'is_deleted': False}
             projection = {'_id': 0}
             soln_exists = MongoDbConn.find_one(SOLUTION_COLLECTION,
                                                query,
                                                projection=projection)
         except Exception as e:
             context.log(message=str(e), obj={"tb": traceback.format_exc()})
             return {
                 'status': 'failure',
                 'msg': 'Error occurred while creating solution'
             }
         if soln_exists:
             return {
                 'status':
                 'failure',
                 'msg':
                 (soln['solution_name']) + ' - solution name already exists'
             }
         else:
             # Initialising Nifi
             if "is_pipeline" in soln and soln["is_pipeline"]:
                 create_nifi_pipeline_config(soln_ref_id, soln_ref_name)
             data_dict = {
                 'solution_id': soln_ref_id,
                 'solution_name': soln_name,
                 'solution_type': soln["solution_type"],
                 'description': soln["description"],
                 'is_deleted': False,
                 'created_ts': datetime.utcnow().isoformat(),
                 'updated_ts': datetime.utcnow().isoformat(),
                 'hocr_type': 'XPMS'
             }
             resp = SolutionService.create_default_caseflow(
                 soln_ref_id, context)
             if resp['status'] == 'success':
                 wf_msg = 'Default Workflow, BPMN and Queues has been created.'
             else:
                 wf_msg = 'Error while creating default Workflow, BPMN and Queues.'
             MongoDbConn.insert(SOLUTION_COLLECTION, data_dict)
             try:
                 RuleManager().process("saveDefaultRules",
                                       {"solution_id": soln_ref_id})
             except Exception as e:
                 context.log(message=str(e),
                             obj={"tb": traceback.format_exc()})
             # Initialising Services
             SolutionService().solution_trigger(soln_ref_id)
             return {
                 'status':
                 'success',
                 'msg': (soln['solution_name']) +
                 ' - solution has been created. ' + wf_msg
             }
     except Exception as e:
         context.log(message=str(e), obj={"tb": traceback.format_exc()})
         return {"status": "failure", "msg": str(e)}
     finally:
         context.end_span()
Exemple #12
0
def post_case_queue_var(data=case_queue_var):
    MongoDbConn.insert(CASE_QUEUE_COLLECTION, data)
    return "done"
Exemple #13
0
def process_action_default(solution_id, payload, request):
    training_set_id = payload['training_set_id']
    training_set = MongoDbConn.find_one(TRAINING_SET_COLLECTION,
                                        {'_id': ObjectId(training_set_id)})
    if training_set is not None:
        # post job
        uri = TRAINING_SET_TRAIN_MODEL_URI
        if payload['type'] == 'action_classifier':
            uri = TRAINING_SET_ACTION_CLASSIFIER_URI
        file_name = AMAZON_AWS_KEY_PATH + training_set['file_name']
        if 's3_key' in training_set.keys():
            file_name = training_set['s3_key']
        response = post_train_model_job(solution_id, payload, file_name, uri)
        is_published = False
        if not is_request_timeout(response):
            if is_message_published(response):

                data = dict()
                data['name'] = payload['name']
                data['model_id'] = response['result']['result']['metadata'][
                    'model_id']
                data['description'] = payload[
                    'description'] if 'description' in payload.keys() else ''
                data['type'] = payload['type']
                data['solution_id'] = get_solution_from_session(request)
                data['model_ref'] = []
                data['model_ref'].append({
                    'bucket_name':
                    AMAZON_AWS_BUCKET,
                    'key_name':
                    AMAZON_AWS_KEY_PATH + training_set['file_name']
                })
                data['is_published'] = is_published
                data['service'] = payload['service']
                data['created_ts'] = datetime.now()
                data['updated_ts'] = datetime.now()
                data['is_enabled'] = False
                data['is_deleted'] = False
                MongoDbConn.insert(TRAINING_SET_MODELS_COLLECTION, query=data)
                return {
                    'status': 'success',
                    'msg': payload['name'] + ' model created'
                }
            else:
                status = response['result']['status']
                return {
                    'status': 'failure',
                    'msg': 'Error from service while creating model',
                    'error': status['message']
                }
        else:
            return {
                'status': 'failure',
                'msg': 'Service is not running or taking more time to process',
                'error': response
            }
    else:
        return {
            'status': 'failure',
            'msg': 'Selected training set is not available'
        }
Exemple #14
0
def custom_threshold(request,
                     solution_id=None,
                     workflow_id=None,
                     task_id=None):
    """this function will handle all the GET, POST and PUT methods
    from the API call to Update Threshold values"""
    context = tracer.get_context(request_id=str(uuid4()), log_level="ERROR")
    context.start_span(component=__name__)
    try:
        if request.method == 'GET':
            query = {
                'task_id': task_id,
                'solution_id': solution_id,
                'workflow_id': workflow_id,
                'is_deleted': False
            }
            projection = {'_id': 0}
            thresholds = MongoDbConn.find_one(THRESHOLD_COLLECTION,
                                              query,
                                              projection=projection)
            if not thresholds:
                thresholds = {}
            return {
                "status": "success",
                "data": thresholds,
                'status_code': STATUS_CODES['OK'],
                'msg': 'Threshold fetched successfully.'
            }
        elif request.method == 'POST':
            try:
                payload = json.loads(request.body.decode())
            except:
                payload = request.POST
            query = {
                'task_id': task_id,
                'solution_id': solution_id,
                'workflow_id': workflow_id,
                'is_deleted': False
            }
            projection = {'_id': 0}
            thresholds = MongoDbConn.find_one(THRESHOLD_COLLECTION,
                                              query,
                                              projection=projection)
            if thresholds:
                return {
                    'status': 'failure',
                    'status_code': STATUS_CODES['FOUND'],
                    'msg':
                    'threshold already available for this configuration.'
                }
            data_dict = dict()
            data_dict.update({
                'created_ts': datetime.utcnow().isoformat(),
                'updated_ts': datetime.utcnow().isoformat(),
                'is_deleted': False,
                'solution_id': solution_id,
                'workflow_id': workflow_id,
                'task_id': task_id,
                'thresholds': payload['threshold'],
                'applicable': payload['applicable']
            })
            try:
                MongoDbConn.insert(THRESHOLD_COLLECTION, data_dict)
                return {
                    "status": "success",
                    'status_code': STATUS_CODES['OK'],
                    'msg': 'Threshold saved successfully.'
                }
            except Exception as e:
                context.log(message=str(e), obj={"tb": traceback.format_exc()})
                return {
                    "status": "Failure",
                    "msg": "not able to save threshold",
                    'status_code': STATUS_CODES['FORBIDDEN']
                }
        elif request.method == 'PUT':
            try:
                payload = json.loads(request.body.decode())
            except:
                payload = request.POST
            payload.update({'updated_ts': datetime.utcnow().isoformat()})
            query = {
                'task_id': task_id,
                'solution_id': solution_id,
                'workflow_id': workflow_id,
                'is_deleted': False
            }
            projection = {'_id': 0}
            thresholds = MongoDbConn.find_one(THRESHOLD_COLLECTION,
                                              query,
                                              projection=projection)
            if not thresholds:
                return {
                    'status': 'failure',
                    'status_code': STATUS_CODES['NOT_FOUND'],
                    'msg':
                    'threshold record not available for this configuration.'
                }
            thresholds['thresholds'] = payload['threshold']
            thresholds['applicable'] = payload['applicable']
            thresholds.update({'updated_ts': datetime.utcnow().isoformat()})
            MongoDbConn.update(THRESHOLD_COLLECTION, query, thresholds)
            return {
                "status": "success",
                'status_code': STATUS_CODES['OK'],
                'msg': 'Threshold updated successfully.'
            }
        elif request.method == 'DELETE':
            query = {
                'task_id': task_id,
                'solution_id': solution_id,
                'workflow_id': workflow_id
            }
            projection = {'_id': 0}
            th_recs = MongoDbConn.find_one(THRESHOLD_COLLECTION,
                                           query,
                                           projection=projection)
            if th_recs:
                th_recs['is_deleted'] = True
                th_recs['updated_ts'] = datetime.utcnow().isoformat()
                MongoDbConn.update(THRESHOLD_COLLECTION, query, th_recs)
                return {
                    "status": "success",
                    'status_code': STATUS_CODES['OK'],
                    'msg': 'Threshold deleted successfully.'
                }
            else:
                return {
                    "status": "failure",
                    'status_code': STATUS_CODES['OK'],
                    'msg': 'Threshold not available.'
                }
    except Exception as e:
        context.log(message=str(e), obj={"tb": traceback.format_exc()})
        return {
            'status': 'failure',
            'msg': 'Internal error occurred while fetching '
            'the custom functions list.',
            'status_code': STATUS_CODES['INTERNAL_SERVER_ERROR'],
            'error': str(e)
        }
Exemple #15
0
def insert_resource_data(resource_data= None):
    MongoDbConn.insert(RESOURCES_COLLECTION, resource_data)
    return resource_data['resource_id']
Exemple #16
0
def post_test_entity(data=add_test_entity):
    MongoDbConn.insert(ENTITY_COLLECTION, data)
    return "done"
Exemple #17
0
def post_test_doc_temp(data=doc_temp_var):
    MongoDbConn.insert(TEMPLATE_COLLECTION, data)
    return "done"
Exemple #18
0
def post_test_sec_doc(data=test_section_doc):
    MongoDbConn.insert(SECTIONS_COLLECTION, data)
    return "done"
Exemple #19
0
def post_test_mapping_doc(data=test_mapping_doc):
    MongoDbConn.insert(MAPPING_COLLECTION, data)
    return "done"