def post_process(simulation_parameter, simulation_dir, queue): """ This function runs the post processing phase :param simulation_parameter the simulation parameter as dict :param simulation_dir the simulation directory (str) :param queue : the logging listener queue """ post_process_param = { 'irf': [ int(jp.search(struct.H5_COMPUTE_IRF, simulation_parameter)), jp.search(struct.H5_IRF_TIME_STEP, simulation_parameter), jp.search(struct.H5_IRF_DURATION, simulation_parameter) ], 'show_pressure': jp.search(struct.H5_SHOW_PRESSURE, simulation_parameter), 'kochin_function': [ jp.search(struct.H5_KOCHIN_NUMBER, simulation_parameter), jp.search(struct.H5_KOCHIN_MIN, simulation_parameter), jp.search(struct.H5_KOCHIN_MAX, simulation_parameter) ], 'free_surface_elevation': [ jp.search(struct.H5_FREE_SURFACE_POINTS_X, simulation_parameter), jp.search(struct.H5_FREE_SURFACE_POINTS_Y, simulation_parameter), jp.search(struct.H5_FREE_SURFACE_DIMENSION_X, simulation_parameter), jp.search(struct.H5_FREE_SURFACE_DIMENSION_Y, simulation_parameter) ] } post_process_param = convert_dict_values(post_process_param) return services.postprocess( simulation_dir, services.construct_postprocess_parameters(post_process_param), queue)
def do_postprocessing(self, json_file): ''' Run post-processing. Args: json_file: the json file containing all the parameters ''' signature = __name__ + '.OpenWarpCLI.do_postprocessing()' helper.log_entrance(self.logger, signature, {'json_file' : json_file}) if not self.simulation_done: ret = { 'error' : 'Simulation must be run first.' } helper.log_exit(self.logger, signature, [ret]) print ret['error'] return try: json_obj = self.load_json(json_file) log = services.postprocess(self.simulation_dir, services.construct_postprocess_parameters(json_obj), self.queue) print log helper.log_exit(self.logger, signature, [ { 'log': log }]) except Exception as e: helper.log_exception(self.logger, signature, e) ret = { 'error' : str(e) } helper.log_exit(self.logger, signature, [ret]) print e
def postprocess(self, json_str): ''' Run post-processing. @param self: the class instance itself @param json_str: the json string posted by client @return: the response as a dictionary, will be serialized to JSON by CherryPy. ''' signature = __name__ + '.WebController.postprocess()' helper.log_entrance(_LOGGER, signature, {'json_str': json_str}) # Set session variable postprocess_done to False by default. cherrypy.session['postprocess_done'] = False try: if not cherrypy.session.has_key( 'simulation_done' ) or not cherrypy.session['simulation_done']: # simulation must be run first cherrypy.response.status = 400 ret = {'error': 'Simulation must be run first.'} helper.log_exit(_LOGGER, signature, [ret]) return ret else: # Call post-processing service ret = { 'log': services.postprocess( cherrypy.session['simulation_dir'], self.construct_postprocess_parameters(json_str)) } cherrypy.session['postprocess_done'] = True helper.log_exit(_LOGGER, signature, [ret]) return ret except (TypeError, ValueError) as e: helper.log_exception(_LOGGER, signature, e) # Error with input, respond with 400 cherrypy.response.status = 400 ret = {'error': str(e)} helper.log_exit(_LOGGER, signature, [ret]) return ret except Exception as e: helper.log_exception(_LOGGER, signature, e) # Server internal error, respond with 500 cherrypy.response.status = 500 ret = {'error': str(e)} helper.log_exit(_LOGGER, signature, [ret]) return ret
def postprocess(self, json_str): ''' Run post-processing. @param self: the class instance itself @param json_str: the json string posted by client @return: the response as a dictionary, will be serialized to JSON by CherryPy. ''' signature = __name__ + '.WebController.postprocess()' helper.log_entrance(self.logger, signature, {'json_str': json_str}) # Set session variable postprocess_done to False by default. cherrypy.session['postprocess_done'] = False try: if not cherrypy.session.has_key('simulation_done') or not cherrypy.session['simulation_done']: # simulation must be run first cherrypy.response.status = 400 ret = { 'error' : 'Simulation must be run first.' } helper.log_exit(self.logger, signature, [ret]) return ret else: # Call post-processing service ret = { 'log' : services.postprocess(cherrypy.session['simulation_dir'], self.construct_postprocess_parameters(json_str)) } cherrypy.session['postprocess_done'] = True helper.log_exit(self.logger, signature, [ret]) return ret except (TypeError, ValueError) as e: helper.log_exception(self.logger, signature, e) # Error with input, respond with 400 cherrypy.response.status = 400 ret = { 'error' : str(e) } helper.log_exit(self.logger, signature, [ret]) return ret except Exception as e: helper.log_exception(self.logger, signature, e) # Server internal error, respond with 500 cherrypy.response.status = 500 ret = { 'error' : str(e) } helper.log_exit(self.logger, signature, [ret]) return ret
def post_process(simulation_parameter, simulation_dir, queue): """ This function runs the post processing phase :param simulation_parameter the simulation parameter as dict :param simulation_dir the simulation directory (str) :param queue : the logging listener queue """ post_process_param = { 'irf': [int(jp.search(struct.H5_COMPUTE_IRF, simulation_parameter)), jp.search(struct.H5_IRF_TIME_STEP, simulation_parameter), jp.search(struct.H5_IRF_DURATION, simulation_parameter)], 'show_pressure': jp.search(struct.H5_SHOW_PRESSURE, simulation_parameter), 'kochin_function': [jp.search(struct.H5_KOCHIN_NUMBER, simulation_parameter), jp.search(struct.H5_KOCHIN_MIN, simulation_parameter), jp.search(struct.H5_KOCHIN_MAX, simulation_parameter)], 'free_surface_elevation': [jp.search(struct.H5_FREE_SURFACE_POINTS_X, simulation_parameter), jp.search(struct.H5_FREE_SURFACE_POINTS_Y, simulation_parameter), jp.search(struct.H5_FREE_SURFACE_DIMENSION_X, simulation_parameter), jp.search(struct.H5_FREE_SURFACE_DIMENSION_Y, simulation_parameter)] } post_process_param = convert_dict_values(post_process_param) return services.postprocess(simulation_dir, services.construct_postprocess_parameters(post_process_param), queue)