def batchRun(request, model): """ Sets up and executes model batch run. Returns: ModelBatch object """ batchoutputmodule = importlib.import_module('.'+model+'_batchoutput', 'models.'+model) batchOutputPageFunc = getattr(batchoutputmodule, model+'BatchOutputPage') # function name = 'model'BatchOutputPage (e.g. 'sipBatchOutputPage') dataFrame = batchOutputPageFunc(request) dataFrame = dataFrame.transpose() logging.info("===== batch.batchRun") logging.info(dataFrame) # Convert DataFrame to JSON string json_inputs = dataFrame.to_json() # Add 'run_type' : 'batch' to the JSON string json = '{"inputs":' + json_inputs + ',"run_type":"batch"}' # logging.info(json) # Send JSON to model_handler module from models import model_handler batch_output = model_handler.call_model_server(model, json) ModelList = model_handler.generate_model_object_list(batch_output) return ModelList
def qaqcRun(model): """ Sets up and executes the model QAQC run. Returns: ModelQAQC object """ qaqcmodule = importlib.import_module('.'+model+'_qaqc', 'models.'+model) #modelQAQC_obj = getattr(qaqcmodule, model+'_obj') # Calling model object, e.g. 'sip_obj' csv_path = os.path.join(os.environ['PROJECT_PATH'], 'models', model, model+'_qaqc.csv') modelQAQC_function = getattr(qaqcmodule, model+'Qaqc') pandas_read_csv = modelQAQC_function(model, csv_path) # Read CSV and create an DataFrame for inputs and expected outputs pd_obj_inputs = pandas_read_csv[0] pd_obj_exp_out = pandas_read_csv[1] # Rename index column, renumber columns, and transpose the DataFrames # pd_obj_inputs.index.name = None # pd_obj_inputs.columns = pd_obj_inputs.columns - 1 # pd_obj_exp_out.index.name = None # pd_obj_exp_out.columns = pd_obj_exp_out.columns - 1 pd_obj_in_out_transpose = pd_obj_inputs.transpose() pd_obj_exp_out_transpose = pd_obj_exp_out.transpose() # logging.info(pd_obj_inputs) # logging.info(pd_obj_exp_out) # logging.info(pd_obj_in_out_transpose) # logging.info(pd_obj_exp_out_transpose) """ The DataFrame is now in correct format to be converted to JSON, but the dtypes for all columns is 'object' (text) because of the transpose(). When the DataFrame is recreated on the backend the dtypes will be properly inferred from read_json() method. """ # Convert DataFrames to JSON strings json_inputs = pd_obj_in_out_transpose.to_json() json_exp_out = pd_obj_exp_out_transpose.to_json() # logging.info(json_inputs) # logging.info(json_exp_out) # Concatenate JSON strings under keys: "inputs" & "out_exp", respectively, # adding 'run_type' : 'qaqc' to the JSON string json = '{"inputs":' + json_inputs + ',"out_exp":' + json_exp_out + ',"run_type":"qaqc"}' # logging.info(json) # Send JSON to model_handler module from models import model_handler # return model_handler.ModelQAQC(model, json) qaqc_output = model_handler.call_model_server(model, json) ModelList = model_handler.generate_model_object_list(qaqc_output) return ModelList