def view_search_describe_fit_score_solutions(request): """gRPC: Call from UI with params to Search, Describe, Fit, and Score solutions""" # ------------------------------------ # Retrieve the User # ------------------------------------ user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) user_obj = user_info.result_obj websocket_id = user_obj.username # websocket pushes currently based on username user_id = user_obj.id # ------------------------------------ # Parse the JSON request # ------------------------------------ req_json_info = get_request_body_as_json(request) if not req_json_info.success: return JsonResponse(get_json_error(req_json_info.err_msg)) extra_params = {SESSION_KEY: get_session_key(request)} search_info = SearchSolutionsHelper.make_search_solutions_call(\ req_json_info.result_obj, websocket_id, user_id, **extra_params) if not search_info.success: return JsonResponse(get_json_error(search_info.err_msg)) json_info = get_json_success('success!', data=search_info.result_obj) return JsonResponse(json_info, safe=False)
def view_score(request): user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) user_obj = user_info.result_obj raven_data_info = get_request_body_as_json(request) if not raven_data_info.success: err_msg = f"request.body not found for solve" return JsonResponse(get_json_error(err_msg)) data = raven_data_info.result_obj websocket_id = user_obj.username model_id = data['model_id'] specification = data['specification'] # sanity check timeout if isinstance(data.get('timeout', None), (int, float)): timeout = min(max(data.get('timeout'), 0), TIMEOUT_MAX) else: timeout = TIMEOUT_DEFAULT task_handle = tasks.score_task if not DEBUG_MODE: task_handle = task_handle.delay task_handle(websocket_id, model_id, specification) return JsonResponse({ KEY_SUCCESS: True, KEY_MESSAGE: "score successfully started" })
def view_markup_image(request): """Markup an image based on a spec""" # Make sure the user has a workspace--workspace is used for # creating the output directory # # removed for test """user_workspace_info = get_latest_user_workspace(request) if not user_workspace_info.success: return JsonResponse(get_json_error(user_workspace_info.err_msg)) user_workspace = user_workspace_info.result_obj """ user_workspace = None # Convert the image spec to a python OrderedDict # req_info = get_request_body_as_json(request, login_required=False) if not req_info.success: #user_msg = ('The request did not contain problem data') return JsonResponse(get_json_error(req_info.err_msg)) image_spec_json = req_info.result_obj # Create the output directory # dir_info = create_image_output_dir(user_workspace) if dir_info.get(KEY_SUCCESS) is False: return JsonResponse(dir_info) # Mark up the image # markup_info = markup_image(image_spec_json, dir_info.get(KEY_DATA), **dict(convert_name_to_url=True)) return JsonResponse(markup_info)
def api_augment_async(request): """Run steps of augment, create new dataset folder structure, etc""" # Get the latest UserWorkspace # ws_info = get_latest_user_workspace(request) if not ws_info.success: user_msg = 'User workspace not found: %s' % ws_info.err_msg return JsonResponse(get_json_error(user_msg)) user_workspace = ws_info.result_obj # Request as python dict # json_req_info = get_request_body_as_json(request) if not json_req_info.success: return JsonResponse(get_json_error(json_req_info.err_msg)) augment_params = json_req_info.result_obj # print('augment_params', augment_params) augment_info = dm_tasks.make_augment_call(user_workspace, augment_params) if not augment_info.success: return JsonResponse(get_json_error(augment_info.err_msg)) return JsonResponse(get_json_success(augment_info.result_obj))
def view_end_ta3_search(request): """End the D3M search via the UI""" success, info_dict = get_request_body_as_json(request) if not success: return JsonResponse(dict(success=False, message="No JSON info found in request.")) if not KEY_MESSSAGE in info_dict: return JsonResponse(\ dict(success=False, message="No '%s' found in request." % KEY_MESSSAGE)) if not 'is_success' in info_dict: return JsonResponse(dict(success=False, message="No 'is_success' found in request.")) is_success = info_dict['is_success'] if not is_success in [True, False]: return JsonResponse(dict(success=False, message="'is_success' must be a boolean (true/false)")) MessageUtil.send_shutdown_message(info_dict[KEY_MESSSAGE], is_success=is_success) # open post commands + message # send appropriate message to the listeners return JsonResponse(dict(success=True, message='shutdown message sent'))
def view_write_user_problem(request): """Format the user problem and write it to a file - Pull the current D3M config and update it based on the info provided """ success, dict_info_or_err = get_request_body_as_json(request) if not success: return JsonResponse(dict(success=False, message=dict_info_or_err)) problem_updates = dict_info_or_err user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) problem_helper = UserProblemHelper(user_info.result_obj, problem_updates) if problem_helper.has_error: return JsonResponse(\ dict(success=False, message=problem_helper.error_message)) return JsonResponse(dict(success=True, message=problem_helper.get_success_message(), data=dict(\ filepath=problem_helper.problem_filepath, fileuri=problem_helper.problem_file_uri)))
def api_index(request): success, json_req_obj = get_request_body_as_json(request) if not success: return JsonResponse({ "success": False, "error": get_json_error(json_req_obj) }) # check if data is valid form = dm_forms.DatamartIndexForm(json_req_obj) if not form.is_valid(): return JsonResponse({ "success": False, "message": "invalid input", "errors": form.errors }) # Retrieve the appropriate DatamartJobUtil # job_util_info = get_datamart_job_util(form.cleaned_data['source']) if not job_util_info.success: return JsonResponse(get_json_error(job_util_info.err_msg)) else: DatamartJobUtil = job_util_info.result_obj # e.g. DatamartJobUtilISI, DatamartJobUtilNYU success, results_obj_err = DatamartJobUtil.datamart_upload( json_req_obj['indices']) return JsonResponse({"success": success, "data": results_obj_err})
def view_format_retrieve_user_problem(request): """Format the user problem and return the doc (instead of writing to file) """ success, dict_info_or_err = get_request_body_as_json(request) if not success: return JsonResponse(get_json_error(dict_info_or_err)) problem_updates = dict_info_or_err user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) problem_helper = UserProblemHelper(user_info.result_obj, problem_updates, save_schema_to_file=False) if problem_helper.has_error: return JsonResponse(\ dict(success=False, message=problem_helper.error_message)) return JsonResponse(\ dict(success=True, message=problem_helper.get_success_message(), data=dict(\ new_problem_doc=problem_helper.new_problem_doc)))
def api_get_metadata(request): """Get metadata using the ISI Datamart""" req_info = get_request_body_as_json(request) if not req_info.success: return JsonResponse(get_json_error(req_info.err_msg)) json_req_obj = req_info.result_obj # check if data is valid form = dm_forms.DatamartCustomForm(json_req_obj) if not form.is_valid(): return JsonResponse(\ get_json_success('invalid input', errors=form.errors.as_json())) metadata_info = DatamartJobUtilISI.datamart_get_metadata( json_req_obj['custom']) if not metadata_info.success: json_resp = get_json_error(metadata_info.err_msg) else: json_resp = get_json_success('it worked', data=metadata_info.result_obj) return JsonResponse(json_resp)
def api_search_event_data_queries(request): """Search about models data ( query data ) sample input : { "name":"query1", "description":"query desc", "username":"******" } """ if not request.user.is_authenticated: user_msg = 'You must be logged in.' return JsonResponse(get_json_error(user_msg), status=403) json_info = get_request_body_as_json(request) if not json_info.success: return JsonResponse(get_json_error(json_info.err_msg)) # check if json is empty # json_data = json_info.result_obj search_results = EventJobUtil.search_objects(request.user, json_data) if not search_results.success: return JsonResponse(get_json_error(search_results.err_msg)) user_info = get_json_success('results found!', data=search_results.result_obj) if 'pretty' in request.GET: fmt_info = format_pretty_from_dict(user_info) if not fmt_info.success: return JsonResponse(get_json_error(fmt_info.err_msg)) return HttpResponse('<pre>%s</pre>' % fmt_info.result_obj) return JsonResponse(user_info)
def view_solve(request): user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) user_obj = user_info.result_obj raven_data_info = get_request_body_as_json(request) if not raven_data_info.success: err_msg = f"request.body not found for solve" return JsonResponse(get_json_error(err_msg)) data = raven_data_info.result_obj # workspace user_workspace_info = get_latest_user_workspace(request) if not user_workspace_info.success: return JsonResponse(get_json_error(user_workspace_info.err_msg)) user_workspace = user_workspace_info.result_obj websocket_id = user_obj.username specification = data['specification'] system_id = data['system'] system_params = data.get('system_params', {}) # create a location where the solver may write to disk dest_dir_info = create_destination_directory(user_workspace, name='solver_scratch_space') if not dest_dir_info[KEY_SUCCESS]: return JsonResponse(get_json_error(dest_dir_info.err_msg)) dest_directory = dest_dir_info[KEY_DATA] specification['temp_directory'] = dest_directory # TODO: timeout on celery worker # sanity check timeout if isinstance(data.get('timeout', None), (int, float)): timeout = min(max(data.get('timeout'), 0), TIMEOUT_MAX) else: timeout = TIMEOUT_DEFAULT search_id = Search.get_search_id() task_handle = tasks.solve_task if not DEBUG_MODE: task_handle = task_handle.delay task_handle(websocket_id, system_id, specification, system_params, search_id) return JsonResponse({ KEY_SUCCESS: True, KEY_MESSAGE: "solve successfully started", KEY_DATA: { "search_id": search_id } })
def view_export_solutions(request): req_body_info = get_request_body_as_json(request) if not req_body_info.success: return JsonResponse(get_json_error(req_body_info.err_msg)) data = req_body_info.result_obj problem = data['problem'] dataset_name = data['dataset_name'] ZIP_OUTPUT_DIRECTORY = os.path.join( os.path.expanduser('~/automl_scores'), dataset_name, datetime.datetime.now().strftime("%d-%m-%Y %H:%M:%S")) if not os.path.exists(ZIP_OUTPUT_DIRECTORY): os.makedirs(ZIP_OUTPUT_DIRECTORY) with open(os.path.join(ZIP_OUTPUT_DIRECTORY, problem['problemID'] + '.json'), 'w') as outfile: json.dump(problem, outfile, sort_keys=True, indent=4, separators=(',', ': ')) # copy dataset level data splits_dir = os.path.join(ZIP_OUTPUT_DIRECTORY, 'splits') if not os.path.exists(splits_dir): os.makedirs(splits_dir) for data_split in problem['datasetPaths']: shutil.copyfile(problem['datasetPaths'][data_split], os.path.join(splits_dir, data_split + '.csv')) # copy solution level data solutions_dir = os.path.join(ZIP_OUTPUT_DIRECTORY, 'solutions') if not os.path.exists(solutions_dir): os.makedirs(solutions_dir) solutions_summaries = data['solutions'] for solution in solutions_summaries: # build each solution folder output_dir = os.path.join(solutions_dir, f"{solution['systemId']}-{solution['solutionId']}") if not os.path.exists(output_dir): os.makedirs(output_dir) with open(os.path.join(output_dir, 'summary.json'), 'w') as outfile: json.dump(solution['solution'], outfile) for output in solution['outputs']: shutil.copyfile( output['output'].replace('file://', ''), os.path.join(output_dir, f'{output["name"]}_{output["predict type"]}.csv')) # todo: return path to zip, not a directory to be zipped return JsonResponse({ 'success': True, 'data': ZIP_OUTPUT_DIRECTORY })
def view_create_log_entry(request, is_verbose=False): """Make log entry endpoint""" user_info = get_authenticated_user(request) if not user_info.success: user_msg = 'Can only log entries when user is logged in.' return JsonResponse(get_json_error(user_msg)) user = user_info.result_obj session_key = get_session_key(request) # ---------------------------------------- # Get the log data # ---------------------------------------- json_info = get_request_body_as_json(request) if not json_info.success: return JsonResponse(get_json_error(json_info.err_msg)) log_data = json_info.result_obj log_data.update(dict(session_key=session_key)) # Default L2 to unkown # if not bl_static.KEY_L2_ACTIVITY in log_data: log_data[bl_static.KEY_L2_ACTIVITY] = bl_static.L2_ACTIVITY_BLANK # Note: this form is also used by the LogEntryMaker # - redundant but ok for now, want to return form errors # in a separate field # f = BehavioralLogEntryForm(log_data) if not f.is_valid(): print('nope: %s' % f.errors) user_msg = 'Error found in log entry.' return JsonResponse(get_json_error(user_msg, errors=f.errors)) log_create_info = LogEntryMaker.create_log_entry(\ user, log_data['type'], log_data) if not log_create_info.success: return JsonResponse(get_json_error(log_create_info.err_msg)) user_msg = 'Log entry saved!' if is_verbose: return JsonResponse(get_json_success(\ user_msg, data=log_create_info.result_obj.to_dict())) return JsonResponse(get_json_success(user_msg))
def api_add_event_data_query(request): """ Add an EventDataSavedQuery to the database Example Json included in the body of th request: { "name":"User entered query name", "description":"In this query I am ...." "query":[ "... mongo query, either list or dict ..." ], "collection_name":"acled_africa", "collection_type":"subset", "result_count":161939, } """ if not request.user.is_authenticated: user_msg = 'You must be logged in.' return JsonResponse(get_json_error(user_msg), status=403) json_info = get_request_body_as_json(request) # if json is not valid if not json_info.success: return JsonResponse(get_json_error(json_info.err_msg)) # Validate form results # event_data_info = json_info.result_obj event_data_info['user'] = request.user.id frm = EventDataSavedQueryForm(event_data_info) if not frm.is_valid(): user_msg = dict(success=False, message='Invalid input', errors=frm.errors) return JsonResponse(user_msg) # Save the object # saved_query = EventDataSavedQuery(**frm.cleaned_data) try: saved_query.save() except IntegrityError: # rare to get here--maybe simultaneous saves... user_msg = EventDataSavedQueryForm.get_duplicate_record_error_msg() return JsonResponse(get_json_error(user_msg)) ok_info = get_json_success('Query saved!', data=saved_query.as_dict()) return JsonResponse(ok_info)
def api_materialize(request): """Run materialize using either ISI or NYU""" success, json_req_obj = get_request_body_as_json(request) if not success: return JsonResponse(get_json_error(json_req_obj)) # ---------------------------------------- # Get the latest UserWorkspace # - later used for logging # ---------------------------------------- ws_info = get_latest_user_workspace(request) if not ws_info.success: user_msg = 'User workspace not found: %s' % ws_info.err_msg return JsonResponse(get_json_error(user_msg)) user_workspace = ws_info.result_obj # -------------------------------------- # check the data # -------------------------------------- form = dm_forms.DatamartMaterializeForm(json_req_obj) if not form.is_valid(): print('form.errors.as_json()', form.errors.as_json()) return JsonResponse(\ get_json_error("invalid input", errors=form.errors.as_json())) # -------------------------------------- # Retrieve the correct datamart # -------------------------------------- job_util_info = get_datamart_job_util(form.cleaned_data['source']) if not job_util_info.success: return JsonResponse(get_json_error(job_util_info.err_msg)) DatamartJobUtil = job_util_info.result_obj # e.g. DatamartJobUtilISI, DatamartJobUtilNYU # -------------------------------------- # Run datamart_materialize # -------------------------------------- materialize_result = DatamartJobUtil.datamart_materialize(\ user_workspace, form.cleaned_data['search_result']) if not materialize_result.success: return JsonResponse(get_json_error(materialize_result.err_msg)) return JsonResponse(\ get_json_success('it worked', data=materialize_result.result_obj))
def api_search(request): """Search the datamart with a JSON request. The 'source' will determine which datamart to search""" # for logging ws_info = get_latest_user_workspace(request) if not ws_info.success: user_msg = 'User workspace not found: %s' % ws_info.err_msg return JsonResponse(get_json_error(user_msg)) user_workspace = ws_info.result_obj success, json_req_obj = get_request_body_as_json(request) if not success: return JsonResponse(get_json_error(json_req_obj)) # check if data is valid form = dm_forms.DatamartSearchForm(json_req_obj) if not form.is_valid(): #print('form.errors', form.errors.as_json()) print('\ntype form.errors', type(form.errors.as_json())) json_errs = json.loads(form.errors.as_json()) err_msgs = [ dal['message'] for dval_list in json_errs.values() for dal in dval_list if 'message' in dal ] print('\nerr_msgs', err_msgs) json_err = get_json_error('Input error: %s' % ('. '.join(err_msgs))) return JsonResponse(json_err) # Retrieve the appropriate DatamartJobUtil # job_util_info = get_datamart_job_util(form.cleaned_data['source']) if not job_util_info.success: return JsonResponse(get_json_error(job_util_info.err_msg)) # e.g. DatamartJobUtilISI, DatamartJobUtilNYU DatamartJobUtil = job_util_info.result_obj #data_path = json_req_obj['data_path'] if 'data_path' in json_req_obj else None success, results_obj_err = DatamartJobUtil.datamart_search(\ form.cleaned_data['query'], **dict(user_workspace=user_workspace)) if not success: return JsonResponse(get_json_error(results_obj_err)) return JsonResponse(get_json_success('it worked', data=results_obj_err))
def view_R_preprocess(request): """Route to rook preprocess Example input: { "data": "/ravens_volume/test_data/196_autoMpg/TRAIN/dataset_TRAIN/tables/learningData.csv", "datastub": "196_ag_problem_TRAIN" } """ # used for logging user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) json_info = get_request_body_as_json(request) if not json_info.success: return JsonResponse(get_json_error(json_info.err_msg)) json_data = json_info.result_obj LOGGER.info('view_rook_preprocess input: %s', json_data) # print('view_rook_preprocess, json_data', json_data) if not rook_static.KEY_DATA in json_data: err_msg = (f'The key "{rook_static.KEY_DATA}" was not found' f' in the preprocess request') return JsonResponse(get_json_error(err_msg)) if not rook_static.KEY_DATASTUB in json_data: err_msg = (f'The key "{rook_static.KEY_DATASTUB}" was not found' f' in the preprocess request') return JsonResponse(get_json_error(err_msg)) log_preprocess_call(user_info.result_obj, json_data, get_session_key(request)) putil = PreprocessUtil(json_data[rook_static.KEY_DATA], datastub=json_data[rook_static.KEY_DATASTUB]) if putil.has_error(): return JsonResponse(get_json_error(putil.get_error_message())) info = get_json_success('it worked', data=putil.get_preprocess_data()) return JsonResponse(info, encoder=RavenJSONEncoder)
def api_get_metadata(request): """ get metadata (configs/formats/alignments)""" success, json_req_obj = get_request_body_as_json(request) if not success: return JsonResponse({"success": False, "error": get_json_error(json_req_obj)}) # check if data is valid form = EventDataGetMetadataForm(json_req_obj) if not form.is_valid(): return JsonResponse({"success": False, "message": "invalid input", "errors": form.errors}) return JsonResponse({name: EventJobUtil.get_metadata(name, json_req_obj[name]) for name in ['collections', 'formats', 'alignments'] if name in json_req_obj})
def view_retrieve_d3m_ice_data(request): req_body_info = get_request_body_as_json(request) if not req_body_info.success: return JsonResponse(get_json_error(req_body_info.err_msg)) user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) req_info = req_body_info.result_obj return JsonResponse({ KEY_SUCCESS: True, KEY_DATA: util_results_importance_ice( req_info['data_pointer_predictors'], req_info['data_pointer_fitted'], req_info['variable'])})
def view_download(request): user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) user_obj = user_info.result_obj raven_data_info = get_request_body_as_json(request) if not raven_data_info.success: err_msg = f"request.body not found for solve" return JsonResponse(get_json_error(err_msg)) data = raven_data_info.result_obj model_id = data.get('model_id') if model_id: return JsonResponse({ KEY_SUCCESS: False, KEY_MESSAGE: '"model_id" is a required field' }) save_path = os.path.join(SAVED_MODELS_PATH, model_id) export_path = os.path.join(EXPORTED_MODELS_PATH, model_id + '.zip') if not os.path.exists(save_path): return JsonResponse({ KEY_SUCCESS: False, KEY_MESSAGE: f'model "{model_id}" does not exist' }) if not os.path.exists(EXPORTED_MODELS_PATH): os.makedirs(EXPORTED_MODELS_PATH) if not os.path.exists(export_path): with zipfile.ZipFile(export_path, 'w', zipfile.ZIP_DEFLATED) as zfile: for root, dirs, files in os.walk(save_path): for file in files: zfile.write( os.path.join(root, file), os.path.relpath(os.path.join(root, file), os.path.join(save_path, '..'))) return JsonResponse({ KEY_SUCCESS: True, KEY_DATA: { 'model_pointer': 'file://' + export_path } })
def view_send_reviewer_message(request): """Send a message to the console""" success, info_dict = get_request_body_as_json(request) if not success: return JsonResponse(dict(success=False, message="No JSON info found in request.")) if not KEY_MESSSAGE in info_dict: return JsonResponse(\ dict(success=False, message="No '%s' found in request." % KEY_MESSSAGE)) msg_to_send = info_dict.get(KEY_MESSSAGE) MessageUtil.send_message(msg_to_send) return JsonResponse(dict(success=True, message='message sent: %s' % msg_to_send))
def view_retrieve_fitted_vs_actuals_data(request): req_body_info = get_request_body_as_json(request) if not req_body_info.success: return JsonResponse(get_json_error(req_body_info.err_msg)) req_info = req_body_info.result_obj if not KEY_DATA_POINTER in req_info: user_msg = ('No key found: "%s"' % KEY_DATA_POINTER) return JsonResponse(get_json_error(user_msg)) user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) return JsonResponse(util_results_real_clustered( req_info[KEY_DATA_POINTER], metadata=req_info['metadata']))
def api_search_by_dataset(request): """For search, submit the entire dataset. Return the calls async""" # (1) get the request body # success, json_req_obj = get_request_body_as_json(request) if not success: return JsonResponse(get_json_error(json_req_obj)) # (2) Get the latest UserWorkspace # ws_info = get_latest_user_workspace(request) if not ws_info.success: user_msg = 'User workspace not found: %s' % ws_info.err_msg return JsonResponse(get_json_error(user_msg)) user_workspace = ws_info.result_obj # (3) Which datamart? # form = dm_forms.DatamartSearchByDatasetForm(json_req_obj) if not form.is_valid(): print('form.errors.as_json()', form.errors.as_json()) return JsonResponse(\ get_json_error("invalid input", errors=form.errors.as_json())) # (4) Location of the current dataset # # augment is embedded within a manipulations pipeline, so the data path may be different dataset_path = json_req_obj['dataset_path'] # (5) Kick off async search # call_info = dm_tasks.make_search_by_dataset_call(\ form.cleaned_data['source'], user_workspace.id, dataset_path, query=json_req_obj.get('query', None)) if not call_info.success: return JsonResponse(get_json_error(call_info.err_msg)) return JsonResponse(get_json_success('Search by dataset has started!'))
def api_upload_metadata(request): """NOT TESTED - Use get metadata endpoint from ISI""" success, json_req_obj = get_request_body_as_json(request) if not success: return JsonResponse(get_json_error(json_req_obj)) # # check if data is valid # form = dm_forms.DatamartUploadForm(json_req_obj) # if not form.is_valid(): # return JsonResponse({"success": False, "message": "invalid input", "errors": form.errors}) success, results_obj_err = DatamartJobUtilISI.datamart_get_metadata( json_req_obj['data']) if not success: json_resp = get_json_error(results_obj_err) else: json_resp = get_json_success('it worked', data=results_obj_err) return JsonResponse(json_resp)
def view_retrieve_d3m_efd_data(request): """Expects a JSON request containing "data_pointer" For example: { "data_pointer": "file:///output/predictions/0001.csv"} """ req_body_info = get_request_body_as_json(request) if not req_body_info.success: return JsonResponse(get_json_error(req_body_info.err_msg)) req_info = req_body_info.result_obj if KEY_DATA_POINTER not in req_info: user_msg = ('No key found: "%s"' % KEY_DATA_POINTER) return JsonResponse(get_json_error(user_msg)) user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) return JsonResponse(util_results_importance_efd( req_info[KEY_DATA_POINTER], metadata=req_info['metadata']))
def save_raven_config_to_existing_workspace(request, workspace_id): """Save a new raven config to an existing workspace POST request containing JSON with new request """ # Get the workspace, checking if the user in the request # is the one in the workspace # ws_info = ws_util.get_saved_workspace_by_request_and_id( request, workspace_id) if not ws_info.success: return JsonResponse(get_json_error(ws_info.err_msg)) user_workspace = ws_info.result_obj # Get the Ravens config from the POST # update_info = get_request_body_as_json(request) if not update_info.success: return JsonResponse(get_json_error(update_info.err_msg)) update_dict = update_info.result_obj # Check for the 'raven_config' key # if (not uw_static.KEY_RAVEN_CONFIG in update_dict) or \ (not update_dict[uw_static.KEY_RAVEN_CONFIG]): user_msg = (f'The workspace could not be saved.' f' (Please include Raven Config information' f' using the key "{uw_static.KEY_RAVEN_CONFIG}")') # print('user_msg', user_msg) return JsonResponse(get_json_error(user_msg)) user_workspace.raven_config = update_dict[uw_static.KEY_RAVEN_CONFIG] user_workspace.save() ws_dict = user_workspace.to_dict() json_msg = get_json_success('Workspace saved.', data=ws_dict) return JsonResponse(json_msg)
def get_partials_datasets(request): # request body req_body_info = get_request_body_as_json(request) if not req_body_info.success: return JsonResponse(get_json_error(req_body_info.err_msg)) req_info = req_body_info.result_obj # workspace user_workspace_info = get_latest_user_workspace(request) if not user_workspace_info.success: return JsonResponse(get_json_error(user_workspace_info.err_msg)) user_workspace = user_workspace_info.result_obj # user user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) activity_l1 = bl_static.L1_PROBLEM_DEFINITION activity_l2 = bl_static.L2_ACTIVITY_BLANK log_data = dict(session_key=get_session_key(request), feature_id='PARTIALS_APP', activity_l1=activity_l1, activity_l2=activity_l2) LogEntryMaker.create_system_entry(user_workspace.user, log_data) try: response = create_partials_datasets(req_info, user_workspace.id) except Exception: print("caught traceback when creating ICE datasets:", flush=True) print(traceback.format_exc(), flush=True) response = { KEY_SUCCESS: False, KEY_MESSAGE: "Internal error while creating ICE datasets." } return JsonResponse(response)
def api_scrape(request): success, json_req_obj = get_request_body_as_json(request) if not success: return JsonResponse(get_json_error(json_req_obj)) # check if data is valid form = dm_forms.DatamartScrapeForm(json_req_obj) if not form.is_valid(): return JsonResponse(\ get_json_error("invalid input", errors=form.errors.as_json())) success, results_obj_err = DatamartJobUtilISI.datamart_scrape( json_req_obj['url']) if not success: json_resp = get_json_error(results_obj_err) else: json_resp = get_json_success('it worked', data=results_obj_err) return JsonResponse(json_resp)
def view_search(request): user_info = get_authenticated_user(request) if not user_info.success: return JsonResponse(get_json_error(user_info.err_msg)) user_obj = user_info.result_obj raven_data_info = get_request_body_as_json(request) if not raven_data_info.success: err_msg = f"request.body not found for solve" return JsonResponse(get_json_error(err_msg)) data = raven_data_info.result_obj websocket_id = user_obj.username specification = data['specification'] system_id = data['system'] system_params = data.get('system_params', {}) # sanity check timeout if isinstance(data.get('timeout', None), (int, float)): timeout = min(max(data.get('timeout'), 0), TIMEOUT_MAX) else: timeout = TIMEOUT_DEFAULT print("timeout:", timeout) print(json.dumps(specification)) print(json.dumps(system_params)) search_id = Search.get_search_id() tasks.search_task.delay(websocket_id, system_id, specification, system_params, search_id) return JsonResponse({ KEY_SUCCESS: True, KEY_MESSAGE: "search successfully started", KEY_DATA: { "search_id": search_id } })
def api_get_eventdata(request): """ general api to get event data""" success, json_req_obj = get_request_body_as_json(request) if not success: return JsonResponse({"success": False, "error": get_json_error(json_req_obj)}) # check if data is valid form = EventDataGetDataForm(json_req_obj) if not form.is_valid(): return JsonResponse({"success": False, "message": "invalid input", "errors": form.errors}) success, addquery_obj_err = EventJobUtil.get_data( settings.EVENTDATA_DB_NAME, json_req_obj['collection_name'], json_req_obj['method'], json.loads(json_req_obj['query']), json_req_obj.get('distinct', None), json_req_obj.get('host', None)) return JsonResponse({'success': success, 'data': json_comply(addquery_obj_err)} if success else get_json_error(addquery_obj_err))