def async_launch_and_prepare_simulation(self, burst_config, user, project, simulator_algo, session_stored_simulator, simulation_state_gid): try: metadata = {} metadata.update({DataTypeMetaData.KEY_BURST: burst_config.id}) simulator_id = simulator_algo.id algo_category = simulator_algo.algorithm_category operation = self._prepare_operation(project.id, user.id, simulator_id, session_stored_simulator.gid, algo_category, None, metadata) storage_path = self.files_helper.get_project_folder(project, str(operation.id)) SimulatorSerializer().serialize_simulator(session_stored_simulator, simulation_state_gid, storage_path) BurstService.update_simulation_fields(burst_config.id, operation.id, session_stored_simulator.gid) wf_errs = 0 try: OperationService().launch_operation(operation.id, True) return operation except Exception as excep: self.logger.error(excep) wf_errs += 1 if burst_config: BurstService().mark_burst_finished(burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflow. The operation was launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) if burst_config: BurstService().mark_burst_finished(burst_config, error_message=str(excep))
def stop_burst_operation(self, operation_id, is_group, remove_after_stop=False): """ For a given operation id that is part of a burst just stop the given burst. :returns True when stopped operation was successfully. """ operation_id = int(operation_id) if int(is_group) == 0: operation = self.flow_service.load_operation(operation_id) else: op_group = ProjectService.get_operation_group_by_id(operation_id) first_op = ProjectService.get_operations_in_group(op_group)[0] operation = self.flow_service.load_operation(int(first_op.id)) try: burst_service = BurstService() result = burst_service.stop_burst(operation.burst) if remove_after_stop: current_burst = common.get_from_session(common.KEY_BURST_CONFIG) if current_burst and current_burst.id == operation.burst.id: common.remove_from_session(common.KEY_BURST_CONFIG) result = burst_service.cancel_or_remove_burst(operation.burst.id) or result return result except Exception, ex: self.logger.exception(ex) return False
def cancel_or_remove_operation(self, operation_id, is_group, remove_after_stop=False): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ # Load before we remove, to have its data in memory here burst_config = BurstService.get_burst_for_operation_id(operation_id, is_group) if burst_config is not None: self.burst_service.mark_burst_finished(burst_config, BurstConfiguration.BURST_CANCELED, store_h5_file=False) while GROUP_BURST_PENDING.get(burst_config.id, False): pass GROUP_BURST_PENDING.pop(burst_config.id, False) result = OperationService.stop_operation(operation_id, is_group, remove_after_stop) if remove_after_stop: current_burst = self.context.burst_config if (current_burst is not None and burst_config is not None and current_burst.id == burst_config.id and ((current_burst.fk_simulation == operation_id and not is_group) or (current_burst.fk_operation_group == operation_id and is_group))): self.reset_simulator_configuration() if burst_config is not None: burst_config = BurstService.load_burst_configuration(burst_config.id) if burst_config: BurstService.remove_burst_configuration(burst_config.id) return result
def stop_burst_operation(self, operation_id, is_group, remove_after_stop=False): """ For a given operation id that is part of a burst just stop the given burst. :returns True when stopped operation was successfully. """ operation_id = int(operation_id) if int(is_group) == 0: operation = self.flow_service.load_operation(operation_id) else: op_group = ProjectService.get_operation_group_by_id(operation_id) first_op = ProjectService.get_operations_in_group(op_group)[0] operation = self.flow_service.load_operation(int(first_op.id)) try: burst_service = BurstService() result = burst_service.stop_burst(operation.burst) if remove_after_stop: current_burst = common.get_from_session( common.KEY_BURST_CONFIG) if current_burst and current_burst.id == operation.burst.id: common.remove_from_session(common.KEY_BURST_CONFIG) result = burst_service.cancel_or_remove_burst( operation.burst.id) or result return result except Exception, ex: self.logger.exception(ex) return False
def launch_simulation_workflow(json_path, prj_id): """ :param json_path: Path towards a local JSON file exported from GUI :param prj_id: This ID of a project needs to exists in DB, and it can be taken from the WebInterface """ project = dao.get_project_by_id(prj_id) with open(json_path, 'rb') as input_file: simulation_json = input_file.read() simulation_json = json.loads(simulation_json) LOG.info("Simulation JSON loaded from file '%s': \n %s", json_path, simulation_json) importer = ImportService() simulation_config = importer.load_burst_entity(simulation_json, prj_id) LOG.info("Simulation Workflow configuration object loaded: \n %s", simulation_config) flow_service = FlowService() stored_adapter = flow_service.get_algorithm_by_module_and_class( SIMULATOR_MODULE, SIMULATOR_CLASS) LOG.info("Found Simulation algorithm in local DB: \n %s", stored_adapter) burst_service = BurstService() burst_service.launch_burst(simulation_config, 0, stored_adapter.id, project.administrator.id, LAUNCH_NEW) LOG.info( "Check in the web GUI for your operation. It should be starting now ..." )
def run(self): """ Get the required data from the operation queue and launch the operation. """ # Try to get a spot to launch own operation. LOCKS_QUEUE.get(True) operation_id = self.operation_id run_params = [ TvbProfile.current.PYTHON_INTERPRETER_PATH, '-m', 'tvb.core.operation_async_launcher', str(operation_id), TvbProfile.CURRENT_PROFILE_NAME ] # In the exceptional case where the user pressed stop while the Thread startup is done, # We should no longer launch the operation. if self.stopped() is False: env = os.environ.copy() env['PYTHONPATH'] = os.pathsep.join(sys.path) # anything that was already in $PYTHONPATH should have been reproduced in sys.path launched_process = Popen(run_params, stdout=PIPE, stderr=PIPE, env=env) LOGGER.debug( "Storing pid=%s for operation id=%s launched on local machine." % (operation_id, launched_process.pid)) op_ident = OperationProcessIdentifier(operation_id, pid=launched_process.pid) dao.store_entity(op_ident) if self.stopped(): # In the exceptional case where the user pressed stop while the Thread startup is done. # and stop_operation is concurrently asking about OperationProcessIdentity. self.stop_pid(launched_process.pid) subprocess_result = launched_process.communicate() LOGGER.info("Finished with launch of operation %s" % operation_id) returned = launched_process.wait() if returned != 0 and not self.stopped(): # Process did not end as expected. (e.g. Segmentation fault) burst_service = BurstService() operation = dao.get_operation_by_id(self.operation_id) LOGGER.error( "Operation suffered fatal failure! Exit code: %s Exit message: %s" % (returned, subprocess_result)) burst_service.persist_operation_state( operation, STATUS_ERROR, "Operation failed unexpectedly! Please check the log files." ) del launched_process # Give back empty spot now that you finished your operation CURRENT_ACTIVE_THREADS.remove(self) LOCKS_QUEUE.put(1)
def __init__(self): BurstBaseController.__init__(self) self.range_parameters = SimulatorRangeParameters() self.burst_service = BurstService() self.simulator_service = SimulatorService() self.cached_simulator_algorithm = self.algorithm_service.get_algorithm_by_module_and_class( IntrospectionRegistry.SIMULATOR_MODULE, IntrospectionRegistry.SIMULATOR_CLASS) self.context = SimulatorContext() self.monitors_handler = MonitorsWizardHandler()
def async_launch_and_prepare_pse(self, burst_config, user, project, simulator_algo, range_param1, range_param2, session_stored_simulator): try: simulator_id = simulator_algo.id algo_category = simulator_algo.algorithm_category operation_group = burst_config.operation_group metric_operation_group = burst_config.metric_operation_group operations = [] range_param2_values = [] if range_param2: range_param2_values = range_param2.get_range_values() first_simulator = None for param1_value in range_param1.get_range_values(): for param2_value in range_param2_values: # Copy, but generate a new GUID for every Simulator in PSE simulator = copy.deepcopy(session_stored_simulator) simulator.gid = uuid.uuid4() self._set_simulator_range_parameter(simulator, range_param1.name, param1_value) self._set_simulator_range_parameter(simulator, range_param2.name, param2_value) ranges = json.dumps({range_param1.name: param1_value[0], range_param2.name: param2_value[0]}) operation = self._prepare_operation(project.id, user.id, simulator_id, simulator.gid, algo_category, operation_group, {DataTypeMetaData.KEY_BURST: burst_config.id}, ranges) storage_path = self.files_helper.get_project_folder(project, str(operation.id)) SimulatorSerializer().serialize_simulator(simulator, None, storage_path) operations.append(operation) if first_simulator is None: first_simulator = simulator first_operation = operations[0] BurstService.update_simulation_fields(burst_config.id, first_operation.id, first_simulator.gid) datatype_group = DataTypeGroup(operation_group, operation_id=first_operation.id, fk_parent_burst=burst_config.id, state=json.loads(first_operation.meta_data)[DataTypeMetaData.KEY_STATE]) dao.store_entity(datatype_group) metrics_datatype_group = DataTypeGroup(metric_operation_group, fk_parent_burst=burst_config.id) dao.store_entity(metrics_datatype_group) wf_errs = 0 for operation in operations: try: OperationService().launch_operation(operation.id, True) except Exception as excep: self.logger.error(excep) wf_errs += 1 BurstService().mark_burst_finished(burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflows. " + str(len(operations) - wf_errs) + " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) BurstService().mark_burst_finished(burst_config, error_message=str(excep))
def __init__(self): BurstBaseController.__init__(self) self.burst_service = BurstService() self.workflow_service = WorkflowService() self.context = SelectedAdapterContext() ## Cache simulator Tree and Algorithm for performance issues. self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class( SIMULATOR_MODULE, SIMULATOR_CLASS)
def do_operation_launch(operation_id): """ Event attached to the local queue for executing an operation, when we will have resources available. """ log = get_logger('tvb.core.operation_async_launcher') burst_service = BurstService() try: log.debug("Loading operation with id=%s" % operation_id) curent_operation = dao.get_operation_by_id(operation_id) stored_adapter = curent_operation.algorithm log.debug("Importing Algorithm: " + str(stored_adapter.classname) + " for Operation:" + str(curent_operation.id)) adapter_instance = ABCAdapter.build_adapter(stored_adapter) # Un-comment bellow for profiling an operation: # import cherrypy.lib.profiler as profiler # p = profiler.Profiler("/Users/lia.domide/TVB/profiler/") # p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS) OperationService().initiate_prelaunch(curent_operation, adapter_instance) if curent_operation.fk_operation_group: parent_burst = dao.get_generic_entity( BurstConfiguration, curent_operation.fk_operation_group, 'fk_operation_group')[0] operations_in_group = dao.get_operations_in_group( curent_operation.fk_operation_group) if parent_burst.fk_metric_operation_group: operations_in_group.extend( dao.get_operations_in_group( parent_burst.fk_metric_operation_group)) burst_finished = True for operation in operations_in_group: if not has_finished(operation.status): burst_finished = False break if burst_finished and parent_burst is not None and parent_burst.status != BurstConfiguration.BURST_ERROR: burst_service.mark_burst_finished(parent_burst) else: parent_burst = burst_service.get_burst_for_operation_id( operation_id) if parent_burst is not None: burst_service.mark_burst_finished(parent_burst) log.debug("Successfully finished operation " + str(operation_id)) except Exception as excep: log.error("Could not execute operation " + str(operation_id)) log.exception(excep) parent_burst = burst_service.get_burst_for_operation_id(operation_id) if parent_burst is not None: burst_service.mark_burst_finished(parent_burst, error_message=str(excep))
def update_db_with_results(operation, sim_h5_filenames, metric_operation, metric_h5_filename): # type: (Operation, list, Operation, str) -> (str, int) """ Generate corresponding Index entities for the resulted H5 files and insert them in DB. """ burst_service = BurstService() index_list = [] is_group = operation.fk_operation_group is not None burst_config = burst_service.get_burst_for_operation_id(operation.id) if is_group: burst_config = burst_service.get_burst_for_operation_id( operation.fk_operation_group, True) all_indexes = burst_service.prepare_indexes_for_simulation_results( operation, sim_h5_filenames, burst_config) if is_group: # Update the operation group name operation_group = dao.get_operationgroup_by_id( metric_operation.fk_operation_group) operation_group.fill_operationgroup_name("DatatypeMeasureIndex") dao.store_entity(operation_group) metric_index = burst_service.prepare_index_for_metric_result( metric_operation, metric_h5_filename, burst_config) all_indexes.append(metric_index) for index in all_indexes: index = dao.store_entity(index) index_list.append(index) burst_service.update_burst_status(burst_config)
def test_launch_burst(self): """ Launch a burst and check that it finishes correctly and before timeout (100) """ self.burst_c.index() connectivity = self._burst_create_connectivity() launch_params = copy.deepcopy(SIMULATOR_PARAMETERS) launch_params['connectivity'] = connectivity.gid launch_params['simulation_length'] = '10' launch_params = {"simulator_parameters": json.dumps(launch_params)} burst_id = json.loads( self.burst_c.launch_burst("new", "test_burst", **launch_params))['id'] waited = 1 timeout = 100 burst_config = dao.get_burst_by_id(burst_id) while burst_config.status == BurstConfiguration.BURST_RUNNING and waited <= timeout: sleep(0.5) waited += 0.5 burst_config = dao.get_burst_by_id(burst_config.id) if waited > timeout: self.fail("Timed out waiting for simulations to finish.") if burst_config.status != BurstConfiguration.BURST_FINISHED: BurstService().stop_burst(burst_config) self.fail("Burst should have finished successfully.")
def stage_out_to_operation_folder(working_dir, operation, simulator_gid): # type: (Storage, Operation, typing.Union[uuid.UUID, str]) -> (list, Operation, str) encrypted_files = HPCSchedulerClient._stage_out_results( working_dir, simulator_gid) encryption_handler = EncryptionHandler(simulator_gid) simulation_results = list() metric_op = None metric_file = None for encrypted_file in encrypted_files: if os.path.basename(encrypted_file).startswith( DatatypeMeasureH5.file_name_base()): metric_op_dir, metric_op = BurstService.prepare_metrics_operation( operation) metric_files = encryption_handler.decrypt_files_to_dir( [encrypted_file], metric_op_dir) metric_file = metric_files[0] else: simulation_results.append(encrypted_file) project = dao.get_project_by_id(operation.fk_launched_in) operation_dir = HPCSchedulerClient.file_handler.get_project_folder( project, str(operation.id)) h5_filenames = EncryptionHandler(simulator_gid).decrypt_files_to_dir( simulation_results, operation_dir) return h5_filenames, metric_op, metric_file
def stop_operation(operation_id): """ Stop a thread for a given operation id """ operation = dao.try_get_operation_by_id(operation_id) if not operation or operation.has_finished: LOGGER.info("Operation already stopped or not found at ID: %s" % operation_id) return True LOGGER.debug("Stopping operation: %s" % str(operation_id)) # Set the thread stop flag to true for thread in CURRENT_ACTIVE_THREADS: if int(thread.operation_id) == operation_id: thread._stop() LOGGER.debug("Found running thread for operation: %d" % operation_id) # Kill Thread stopped = True operation_process = dao.get_operation_process_for_operation(operation_id) if operation_process is not None: # Now try to kill the operation if it exists stopped = OperationExecutor.stop_pid(operation_process.pid) if not stopped: LOGGER.debug("Operation %d was probably killed from it's specific thread." % operation_id) else: LOGGER.debug("Stopped OperationExecutor process for %d" % operation_id) # Mark operation as canceled in DB and on disk BurstService().persist_operation_state(operation, STATUS_CANCELED) return stopped
def test_export_simulator_configuration(self, operation_factory, connectivity_index_factory): """ Test export of a simulator configuration """ conn_gid = uuid.UUID(connectivity_index_factory().gid) operation = operation_factory(is_simulation=True, store_vm=True, test_project=self.test_project, conn_gid=conn_gid) burst_configuration = BurstConfiguration(self.test_project.id) burst_configuration.fk_simulation = operation.id burst_configuration.simulator_gid = operation.view_model_gid burst_configuration.name = "Test_burst" burst_configuration = dao.store_entity(burst_configuration) op_folder = StorageInterface().get_project_folder( self.test_project.name, str(operation.id)) BurstService().store_burst_configuration(burst_configuration, op_folder) export_file = self.export_manager.export_simulator_configuration( burst_configuration.id) assert export_file is not None, "Export process should return path to export file" assert os.path.exists( export_file ), "Could not find export file: %s on disk." % export_file assert zipfile.is_zipfile( export_file), "Generated file is not a valid ZIP file"
def stop_operation(operation_id): # TODO: Review this implementation after DAINT maintenance operation = dao.get_operation_by_id(operation_id) if not operation or operation.has_finished: LOGGER.warning("Operation already stopped: %s" % operation_id) return True LOGGER.debug("Stopping HPC operation: %s" % str(operation_id)) op_ident = OperationDAO().get_operation_process_for_operation( operation_id) if op_ident is not None: # TODO: Handle login transport = unicore_client.Transport( os.environ[HPCSchedulerClient.CSCS_LOGIN_TOKEN_ENV_KEY]) # Abort HPC job job = Job(transport, op_ident.job_id) if job.is_running(): job.abort() # Kill thread operation_thread = get_op_thread(operation_id) if operation_thread is None: LOGGER.warning("Thread for operation {} is not available".format( operation_id)) else: operation_thread.stop() while not operation_thread.stopped(): LOGGER.info( "Thread for operation {} is stopping".format(operation_id)) BurstService().persist_operation_state(operation, STATUS_CANCELED) return True
def _fill_param_from_post(all_range_parameters, param_key, **data): # type: (dict, str, dict) -> RangeParameter param_name = data.get( SimulatorPSERangeFragment.NAME_FIELD.format(param_key)) param = BurstService.get_range_param_by_name(param_name, all_range_parameters) if param.type is float: param_lo = data.get( SimulatorPSERangeFragment.LO_FIELD.format(param_key)) param_hi = data.get( SimulatorPSERangeFragment.HI_FIELD.format(param_key)) param_step = data.get( SimulatorPSERangeFragment.STEP_FIELD.format(param_key)) param_range = RangeParameter(param_name, param.type, Range(float(param_lo), float(param_hi), float(param_step)), is_array=param.is_array) else: param_range_str = data.get( SimulatorPSERangeFragment.GID_FIELD.format(param_key)) param_range = RangeParameter( param_name, param.type, param.range_definition, False, SimulatorPSERangeFragment._prepare_pse_uuid_list( param_range_str)) return param_range
def stop_operation(operation_id): """ Stop a thread for a given operation id """ operation = dao.try_get_operation_by_id(operation_id) if not operation or operation.has_finished: LOGGER.warning( "Operation already stopped or not found is given to stop job: %s" % operation_id) return True operation_process = dao.get_operation_process_for_operation( operation_id) result = 0 # Try to kill only if operation job process is not None if operation_process is not None: stop_command = TvbProfile.current.cluster.STOP_COMMAND % operation_process.job_id LOGGER.info("Stopping cluster operation: %s" % stop_command) result = os.system(stop_command) if result != 0: LOGGER.error( "Stopping cluster operation was unsuccessful. Try following status with '" + TvbProfile.current.cluster.STATUS_COMMAND + "'" % operation_process.job_id) BurstService().persist_operation_state(operation, STATUS_CANCELED) return result == 0
def update_db_with_results(self, operation, sim_h5_filenames, metric_operation, metric_h5_filename): # type: (Operation, list, Operation, str) -> (str, int) """ Generate corresponding Index entities for the resulted H5 files and insert them in DB. """ burst_service = BurstService() index_list = [] burst_config = burst_service.get_burst_for_operation_id(operation.id) all_indexes = burst_service.prepare_indexes_for_simulation_results( operation, sim_h5_filenames, burst_config) if burst_config.fk_operation_group: metric_index = burst_service.prepare_index_for_metric_result( metric_operation, metric_h5_filename, burst_config) all_indexes.append(metric_index) for index in all_indexes: index = dao.store_entity(index) index_list.append(index) sim_adapter = SimulatorAdapter() sim_adapter.extract_operation_data(operation) sim_adapter.generic_attributes.parent_burst = burst_config.gid mesage, _ = sim_adapter._capture_operation_results(index_list) burst_service.update_burst_status(burst_config) # self.update_datatype_groups() return mesage
def cancel_or_remove_burst(self, burst_id): """ Cancel or Remove the burst entity given by burst_id (and all linked entities: op, DTs) :returns True: if the op was successfully. """ burst_config = BurstService.load_burst_configuration(int(burst_id)) op_id, is_group = burst_config.operation_info_for_burst_removal return self.cancel_or_remove_operation(op_id, is_group, burst_config.is_finished)
def __init__(self): BurstBaseController.__init__(self) self.burst_service = BurstService() self.workflow_service = WorkflowService() self.context = SelectedAdapterContext() ## Cache simulator Tree and Algorithm for performance issues. self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
def test_load_burst_removed(self): """ Add burst to session, then remove burst from database. Try to load burst and check that it will raise exception and remove it from session. """ burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1') cherrypy.session[common.KEY_BURST_CONFIG] = burst burst_id = burst.id BurstService().cancel_or_remove_burst(burst_id) self.assertRaises(Exception, self.burst_c.load_burst, burst_id) self.assertTrue(common.KEY_BURST_CONFIG not in cherrypy.session)
def prepare_final_fragment(simulator, burst_config, project_id, rendering_rules, setup_pse_url): simulation_name, simulation_number = BurstService.prepare_simulation_name( burst_config, project_id) form = SimulatorFinalFragment(default_simulation_name=simulation_name) form.fill_from_trait(simulator) rendering_rules.form = form rendering_rules.form_action_url = setup_pse_url rendering_rules.is_launch_fragment = True rendering_rules.is_pse_launch = burst_config.is_pse_burst() return rendering_rules.to_dict()
def setUp(self): """ Sets up the testing environment; saves config file; creates a test user, a test project; creates burst, operation, flow and workflow services """ self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.workflow_service = WorkflowService() self.burst_service = BurstService() self.operation_service = OperationService() self.flow_service = FlowService()
def _handle_exception(self, exception, temp_files, message, operation=None): """ Common way to treat exceptions: - remove temporary files, if any - set status ERROR on current operation (if any) - log exception """ self.logger.exception(message) if operation is not None: BurstService().persist_operation_state(operation, STATUS_ERROR, str(exception)) self._remove_files(temp_files) exception.message = message raise exception.with_traceback( sys.exc_info()[2]) # when rethrowing in python this is required to preserve the stack trace
def stop_operation(operation_id): """ Stop a thread for a given operation id """ operation = dao.try_get_operation_by_id(operation_id) if not operation or operation.has_finished: LOGGER.info("Operation already stopped or not found at ID: %s" % operation_id) return True LOGGER.debug("Stopping operation: %s" % str(operation_id)) stopped = StandAloneClient.stop_operation_process(operation_id, True) # Mark operation as canceled in DB and on disk BurstService().persist_operation_state(operation, STATUS_CANCELED) return stopped
def _handle_metric_results(metric_encrypted_file, metric_vm_encrypted_file, operation, encryption_handler): if not metric_encrypted_file: return None, None metric_op_dir, metric_op = BurstService.prepare_metrics_operation( operation) metric_files = encryption_handler.decrypt_files_to_dir( [metric_encrypted_file, metric_vm_encrypted_file], metric_op_dir) metric_file = metric_files[0] metric_vm = h5.load_view_model_from_file(metric_files[1]) metric_op.view_model_gid = metric_vm.gid.hex dao.store_entity(metric_op) return metric_op, metric_file
def store_burst(project_id, operation=None): """ Build and persist BurstConfiguration entity. """ burst = BurstConfiguration(project_id) if operation is not None: burst.name = 'dummy_burst' burst.status = BurstConfiguration.BURST_FINISHED burst.start_time = datetime.now() burst.range1 = '["conduction_speed", {"lo": 50, "step": 1.0, "hi": 100.0}]' burst.range2 = '["connectivity", null]' burst.fk_simulation = operation.id burst.simulator_gid = uuid.uuid4().hex BurstService().store_burst_configuration(burst) return dao.store_entity(burst)
def launch_simulation_workflow(json_path, prj_id): """ :param json_path: Path towards a local JSON file exported from GUI :param prj_id: This ID of a project needs to exists in DB, and it can be taken from the WebInterface """ project = dao.get_project_by_id(prj_id) with open(json_path, 'rb') as input_file: simulation_json = input_file.read() simulation_json = json.loads(simulation_json) LOG.info("Simulation JSON loaded from file '%s': \n %s", json_path, simulation_json) importer = ImportService() simulation_config = importer.load_burst_entity(simulation_json, prj_id) LOG.info("Simulation Workflow configuration object loaded: \n %s", simulation_config) flow_service = FlowService() simulator_algorithm, _ = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS) LOG.info("Found Simulation algorithm in local DB: \n %s", simulator_algorithm) burst_service = BurstService() burst_service.launch_burst(simulation_config, 0, simulator_algorithm.id, project.administrator.id, LAUNCH_NEW) LOG.info("Check in the web GUI for your operation. It should be starting now ...")
def build(test_user, test_project, simulation_length=10, is_group=False): model = SimulatorAdapterModel() model.connectivity = connectivity_index_factory().gid model.simulation_length = simulation_length burst = BurstConfiguration(test_project.id, name="Sim " + str(datetime.now())) burst.start_time = datetime.now() algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS) service = SimulatorService() if is_group: range_param = RangeParameter("conduction_speed", float, Range(lo=50.0, hi=100.0, step=20.0)) burst.range1 = range_param.to_json() burst = BurstService().prepare_burst_for_pse(burst) op = service.async_launch_and_prepare_pse(burst, test_user, test_project, algorithm, range_param, None, model) else: dao.store_entity(burst) op = service.async_launch_and_prepare_simulation(burst, test_user, test_project, algorithm, model) return op
def cancel_or_remove_operation(self, operation_id, is_group, remove_after_stop=False): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ operation_id = int(operation_id) is_group = int(is_group) != 0 # Load before we remove, to have its data in memory here burst_config = BurstService.get_burst_for_operation_id(operation_id) result = OperationService.stop_operation(operation_id, is_group, remove_after_stop) if remove_after_stop: current_burst = common.get_from_session(common.KEY_BURST_CONFIG) if current_burst is not None and burst_config is not None and current_burst.id == burst_config.id: common.remove_from_session(common.KEY_BURST_CONFIG) common.add2session(common.KEY_BURST_CONFIG, BurstConfiguration(burst_config.project.id)) return result
def cancel_or_remove_operation(self, operation_id, is_group, remove_after_stop=False): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ # Load before we remove, to have its data in memory here burst_config = BurstService.get_burst_for_operation_id( operation_id, is_group) result = OperationService.stop_operation(operation_id, is_group, remove_after_stop) if remove_after_stop: current_burst = self.context.burst_config if (current_burst is not None and burst_config is not None and current_burst.id == burst_config.id and ((current_burst.fk_simulation == operation_id and not is_group) or (current_burst.fk_operation_group == operation_id and is_group))): self.reset_simulator_configuration() return result
def __init__(self): self.logger = get_logger(self.__class__.__module__) self.burst_service = BurstService() self.operation_service = OperationService() self.files_helper = FilesHelper()
class BurstController(base.BaseController): """ Controller class for Burst-Pages. """ def __init__(self): base.BaseController.__init__(self) self.burst_service = BurstService() self.workflow_service = WorkflowService() self.context = SelectedAdapterContext() ## Cache simulator Tree, Algorithm and AlgorithmGroup, for performance issues. algorithm, self.cached_simulator_algo_group = self.flow_service.get_algorithm_by_module_and_class( SIMULATOR_MODULE, SIMULATOR_CLASS) self.cached_simulator_algorithm_id = algorithm.id @property @context_selected() def cached_simulator_input_tree(self): """ Cache Simulator's input tree, for performance issues. Anyway, without restart, the introspected tree will not be different on multiple executions. :returns: Simulator's Input Tree (copy from cache or just loaded) """ cached_simulator_tree = base.get_from_session(base.KEY_CACHED_SIMULATOR_TREE) if cached_simulator_tree is None: cached_simulator_tree = self.flow_service.prepare_adapter(base.get_current_project().id, self.cached_simulator_algo_group)[1] base.add2session(base.KEY_CACHED_SIMULATOR_TREE, cached_simulator_tree) return copy.deepcopy(cached_simulator_tree) @cherrypy.expose @using_template('base_template') @base.settings() @logged() @context_selected() def index(self): """Get on burst main page""" template_specification = dict(mainContent="burst/main_burst", title="Simulation Cockpit", baseUrl=cfg.BASE_URL, includedResources='project/included_resources') portlets_list = self.burst_service.get_available_portlets() session_stored_burst = base.get_from_session(base.KEY_BURST_CONFIG) if session_stored_burst is None or session_stored_burst.id is None: if session_stored_burst is None: session_stored_burst = self.burst_service.new_burst_configuration(base.get_current_project().id) base.add2session(base.KEY_BURST_CONFIG, session_stored_burst) adapter_interface = self.cached_simulator_input_tree if session_stored_burst is not None: current_data = session_stored_burst.get_all_simulator_values()[0] adapter_interface = ABCAdapter.fill_defaults(adapter_interface, current_data, True) ### Add simulator tree to session to be available in filters self.context.add_adapter_to_session(self.cached_simulator_algo_group, adapter_interface, current_data) template_specification['inputList'] = adapter_interface selected_portlets = session_stored_burst.update_selected_portlets() template_specification['burst_list'] = self.burst_service.get_available_bursts(base.get_current_project().id) template_specification['portletList'] = portlets_list template_specification['selectedPortlets'] = json.dumps(selected_portlets) template_specification['draw_hidden_ranges'] = True template_specification['burstConfig'] = session_stored_burst ### Prepare PSE available metrics ### We put here all available algorithms, because the metrics select area is a generic one, ### and not loaded with every Burst Group change in history. algo_group = self.flow_service.get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)[1] adapter_instance = ABCAdapter.build_adapter(algo_group) if adapter_instance is not None and hasattr(adapter_instance, 'available_algorithms'): template_specification['available_metrics'] = [metric_name for metric_name in adapter_instance.available_algorithms.keys()] else: template_specification['available_metrics'] = [] template_specification[base.KEY_PARAMETERS_CONFIG] = False template_specification[base.KEY_SECTION] = 'burst' return self.fill_default_attributes(template_specification) @cherrypy.expose @using_template('burst/burst_history') def load_burst_history(self): """ Load the available burst that are stored in the database at this time. This is one alternative to 'chrome-back problem'. """ session_burst = base.get_from_session(base.KEY_BURST_CONFIG) return {'burst_list': self.burst_service.get_available_bursts(base.get_current_project().id), 'selectedBurst': session_burst.id} @cherrypy.expose @ajax_call(False) def get_selected_burst(self): """ Return the burst that is currently stored in session. This is one alternative to 'chrome-back problem'. """ session_burst = base.get_from_session(base.KEY_BURST_CONFIG) if session_burst.id: return str(session_burst.id) else: return 'None' @cherrypy.expose @using_template('burst/portlet_configure_parameters') def get_portlet_configurable_interface(self, index_in_tab): """ From the position given by the tab index and the index from that tab, get the portlet configuration and build the configurable interface for that portlet. """ burst_config = base.get_from_session(base.KEY_BURST_CONFIG) tab_index = burst_config.selected_tab portlet_config = burst_config.tabs[tab_index].portlets[int(index_in_tab)] portlet_interface = self.burst_service.build_portlet_interface(portlet_config, base.get_current_project().id) full_portlet_input_tree = [] for entry in portlet_interface: full_portlet_input_tree.extend(entry.interface) self.context.add_portlet_to_session(full_portlet_input_tree) portlet_interface = {"adapters_list": portlet_interface, base.KEY_PARAMETERS_CONFIG: False, base.KEY_SESSION_TREE: self.context.KEY_PORTLET_CONFIGURATION} return self.fill_default_attributes(portlet_interface) @cherrypy.expose @using_template('burst/portlets_preview') def portlet_tab_display(self, **data): """ When saving a new configuration of tabs, check if any of the old portlets are still present, and if that is the case use their parameters configuration. For all the new portlets add entries in the burst configuration. Also remove old portlets that are no longer saved. """ tab_portlets_list = json.loads(data['tab_portlets_list']) burst_config = base.get_from_session(base.KEY_BURST_CONFIG) selected_tab_idx = burst_config.selected_tab for tab_idx in xrange(len(tab_portlets_list)): current_tab = burst_config.tabs[tab_idx] ### When configuration already exists, and new portlets ##### ### are selected, first check if any configuration was saved for ##### ### each portlet and if that is the case, use it. If none is present # ### create a new one. ### for idx_in_tab in xrange(len(tab_portlets_list[tab_idx])): portlet_id = tab_portlets_list[tab_idx][idx_in_tab][0] portlet_name = tab_portlets_list[tab_idx][idx_in_tab][1] if portlet_id >= 0: saved_config = current_tab.portlets[idx_in_tab] if saved_config is None or saved_config.portlet_id != portlet_id: current_tab.portlets[idx_in_tab] = self.burst_service.new_portlet_configuration(portlet_id, tab_idx, idx_in_tab, portlet_name) else: saved_config.visualizer.ui_name = portlet_name else: current_tab.portlets[idx_in_tab] = None #For generating the HTML get for each id the corresponding portlet selected_tab_portlets = [] saved_selected_tab = burst_config.tabs[selected_tab_idx] for portlet in saved_selected_tab.portlets: if portlet: portlet_id = int(portlet.portlet_id) portlet_entity = self.burst_service.get_portlet_by_id(portlet_id) portlet_entity.name = portlet.name selected_tab_portlets.append(portlet_entity) return {'portlet_tab_list': selected_tab_portlets} @cherrypy.expose @using_template('burst/portlets_preview') def get_configured_portlets(self): """ Return the portlets for one given tab. This is used when changing from tab to tab and selecting which portlets will be displayed. """ burst_config = base.get_from_session(base.KEY_BURST_CONFIG) if burst_config is None: return {'portlet_tab_list': []} tab_idx = burst_config.selected_tab tab_portlet_list = [] for portlet_cfg in burst_config.tabs[int(tab_idx)].portlets: if portlet_cfg is not None: portlet_entity = self.burst_service.get_portlet_by_id(portlet_cfg.portlet_id) portlet_entity.name = portlet_cfg.name tab_portlet_list.append(portlet_entity) return {'portlet_tab_list': tab_portlet_list} @cherrypy.expose @ajax_call() def change_selected_tab(self, tab_nr): """ Set :param tab_nr: as the currently selected tab in the stored burst configuration. """ base.get_from_session(base.KEY_BURST_CONFIG).selected_tab = int(tab_nr) @cherrypy.expose @ajax_call() def get_portlet_session_configuration(self): """ Get the current configuration of portlets stored in session for this burst, as a json. """ burst_entity = base.get_from_session(base.KEY_BURST_CONFIG) returned_configuration = burst_entity.update_selected_portlets() return returned_configuration @cherrypy.expose @ajax_call(False) def save_parameters(self, index_in_tab, **data): """ Save parameters :param tab_nr: the index of the selected tab :param index_in_tab: the index of the configured portlet in the selected tab :param data: the {name:value} dictionary configuration of the current portlet Having these inputs, update the configuration of the portletin the corresponding tab position form the burst configuration . """ burst_config = base.get_from_session(base.KEY_BURST_CONFIG) tab_nr = burst_config.selected_tab old_portlet_config = burst_config.tabs[int(tab_nr)].portlets[int(index_in_tab)] # Replace all void entries with 'None' for entry in data: if data[entry] == '': data[entry] = None need_relaunch = self.burst_service.update_portlet_configuration(old_portlet_config, data) if need_relaunch: #### Reset Burst Configuration into an entity not persisted (id = None for all) base.add2session(base.KEY_BURST_CONFIG, burst_config.clone()) return "relaunchView" else: self.workflow_service.store_workflow_step(old_portlet_config.visualizer) return "noRelaunch" @cherrypy.expose @ajax_call() def rename_burst(self, burst_id, burst_name): """ Rename the burst given by burst_id, setting it's new name to burst_name. """ self._validate_burst_name(burst_name) self.burst_service.rename_burst(burst_id, burst_name) @cherrypy.expose @ajax_call() def launch_burst(self, launch_mode, burst_name, **data): """ Do the actual burst launch, using the configuration saved in current session. :param launch_mode: new/branch/continue :param burst_name: user-given burst name. It can be empty (case in which we will fill with simulation_x) :param data: kwargs for simulation input parameters. """ burst_config = base.get_from_session(base.KEY_BURST_CONFIG) ## Validate new burst-name if burst_name != 'none_undefined': self._validate_burst_name(burst_name) burst_config.name = burst_name ## Fill all parameters user_id = base.get_logged_user().id data[base.KEY_ADAPTER] = self.cached_simulator_algorithm_id burst_config.update_simulator_configuration(data) burst_config.fk_project = base.get_current_project().id ## Do the asynchronous launch burst_id, burst_name = self.burst_service.launch_burst(burst_config, 0, self.cached_simulator_algorithm_id, user_id, launch_mode) return [burst_id, burst_name] @cherrypy.expose @ajax_call() def load_burst(self, burst_id): """ Given a clicked burst from the history and the selected tab, load all the required data from that burst. Return a value specifying if it was a result of a range launch (OperationGroup) or not. """ try: old_burst = base.get_from_session(base.KEY_BURST_CONFIG) burst, group_gid = self.burst_service.load_burst(burst_id) burst.selected_tab = old_burst.selected_tab base.add2session(base.KEY_BURST_CONFIG, burst) return {'status': burst.status, 'group_gid': group_gid, 'selected_tab': burst.selected_tab} except Exception, excep: ### Most probably Burst was removed. Delete it from session, so that client ### has a good chance to get a good response on refresh self.logger.error(excep) base.remove_from_session(base.KEY_BURST_CONFIG) raise excep
class BurstController(BurstBaseController): """ Controller class for Burst-Pages. """ def __init__(self): BurstBaseController.__init__(self) self.burst_service = BurstService() self.workflow_service = WorkflowService() self.context = SelectedAdapterContext() ## Cache simulator Tree and Algorithm for performance issues. self.cached_simulator_algorithm = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS) @property @context_selected def cached_simulator_input_tree(self): """ Cache Simulator's input tree, for performance issues. Anyway, without restart, the introspected tree will not be different on multiple executions. :returns: Simulator's Input Tree (copy from cache or just loaded) """ cached_simulator_tree = common.get_from_session(common.KEY_CACHED_SIMULATOR_TREE) if cached_simulator_tree is None: cached_simulator_tree = self.flow_service.prepare_adapter(common.get_current_project().id, self.cached_simulator_algorithm) common.add2session(common.KEY_CACHED_SIMULATOR_TREE, cached_simulator_tree) return copy.deepcopy(cached_simulator_tree) @expose_page @settings @context_selected def index(self): """Get on burst main page""" # todo : reuse load_burst here for consistency. template_specification = dict(mainContent="burst/main_burst", title="Simulation Cockpit", baseUrl=TvbProfile.current.web.BASE_URL, includedResources='project/included_resources') portlets_list = self.burst_service.get_available_portlets() session_stored_burst = common.get_from_session(common.KEY_BURST_CONFIG) if session_stored_burst is None or session_stored_burst.id is None: if session_stored_burst is None: session_stored_burst = self.burst_service.new_burst_configuration(common.get_current_project().id) common.add2session(common.KEY_BURST_CONFIG, session_stored_burst) adapter_interface = self.cached_simulator_input_tree if session_stored_burst is not None: current_data = session_stored_burst.get_all_simulator_values()[0] adapter_interface = InputTreeManager.fill_defaults(adapter_interface, current_data, True) ### Add simulator tree to session to be available in filters self.context.add_adapter_to_session(self.cached_simulator_algorithm, adapter_interface, current_data) template_specification['inputList'] = adapter_interface selected_portlets = session_stored_burst.update_selected_portlets() template_specification['burst_list'] = self.burst_service.get_available_bursts(common.get_current_project().id) template_specification['portletList'] = portlets_list template_specification['selectedPortlets'] = json.dumps(selected_portlets) template_specification['draw_hidden_ranges'] = True template_specification['burstConfig'] = session_stored_burst ### Prepare PSE available metrics ### We put here all available algorithms, because the metrics select area is a generic one, ### and not loaded with every Burst Group change in history. algorithm = self.flow_service.get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS) adapter_instance = ABCAdapter.build_adapter(algorithm) if adapter_instance is not None and hasattr(adapter_instance, 'available_algorithms'): template_specification['available_metrics'] = [metric_name for metric_name in adapter_instance.available_algorithms.keys()] else: template_specification['available_metrics'] = [] template_specification[common.KEY_PARAMETERS_CONFIG] = False template_specification[common.KEY_SECTION] = 'burst' return self.fill_default_attributes(template_specification) @expose_fragment('burst/burst_history') def load_burst_history(self): """ Load the available burst that are stored in the database at this time. This is one alternative to 'chrome-back problem'. """ session_burst = common.get_from_session(common.KEY_BURST_CONFIG) bursts = self.burst_service.get_available_bursts(common.get_current_project().id) self.burst_service.populate_burst_disk_usage(bursts) return {'burst_list': bursts, 'selectedBurst': session_burst.id} @cherrypy.expose @handle_error(redirect=False) def get_selected_burst(self): """ Return the burst that is currently stored in session. This is one alternative to 'chrome-back problem'. """ session_burst = common.get_from_session(common.KEY_BURST_CONFIG) if session_burst.id: return str(session_burst.id) else: return 'None' @expose_fragment('burst/portlet_configure_parameters') def get_portlet_configurable_interface(self, index_in_tab): """ From the position given by the tab index and the index from that tab, get the portlet configuration and build the configurable interface for that portlet. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) tab_index = burst_config.selected_tab portlet_config = burst_config.tabs[tab_index].portlets[int(index_in_tab)] portlet_interface = self.burst_service.build_portlet_interface(portlet_config, common.get_current_project().id) full_portlet_input_tree = [] for entry in portlet_interface: full_portlet_input_tree.extend(entry.interface) self.context.add_portlet_to_session(full_portlet_input_tree) portlet_interface = {"adapters_list": portlet_interface, common.KEY_PARAMETERS_CONFIG: False, common.KEY_SESSION_TREE: self.context.KEY_PORTLET_CONFIGURATION} return self.fill_default_attributes(portlet_interface) @expose_fragment('burst/portlets_preview') def portlet_tab_display(self, **data): """ When saving a new configuration of tabs, check if any of the old portlets are still present, and if that is the case use their parameters configuration. For all the new portlets add entries in the burst configuration. Also remove old portlets that are no longer saved. """ tab_portlets_list = json.loads(data['tab_portlets_list']) burst_config = common.get_from_session(common.KEY_BURST_CONFIG) selected_tab_idx = burst_config.selected_tab for tab_idx in range(len(tab_portlets_list)): current_tab = burst_config.tabs[tab_idx] ### When configuration already exists, and new portlets ##### ### are selected, first check if any configuration was saved for ##### ### each portlet and if that is the case, use it. If none is present # ### create a new one. ### for idx_in_tab in range(len(tab_portlets_list[tab_idx])): portlet_id = tab_portlets_list[tab_idx][idx_in_tab][0] portlet_name = tab_portlets_list[tab_idx][idx_in_tab][1] if portlet_id >= 0: saved_config = current_tab.portlets[idx_in_tab] if saved_config is None or saved_config.portlet_id != portlet_id: current_tab.portlets[idx_in_tab] = self.burst_service.new_portlet_configuration(portlet_id, tab_idx, idx_in_tab, portlet_name) else: saved_config.visualizer.ui_name = portlet_name else: current_tab.portlets[idx_in_tab] = None #For generating the HTML get for each id the corresponding portlet selected_tab_portlets = [] saved_selected_tab = burst_config.tabs[selected_tab_idx] for portlet in saved_selected_tab.portlets: if portlet: portlet_id = int(portlet.portlet_id) portlet_entity = self.burst_service.get_portlet_by_id(portlet_id) portlet_entity.name = portlet.name selected_tab_portlets.append(portlet_entity) return {'portlet_tab_list': selected_tab_portlets} @expose_fragment('burst/portlets_preview') def get_configured_portlets(self): """ Return the portlets for one given tab. This is used when changing from tab to tab and selecting which portlets will be displayed. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) if burst_config is None: return {'portlet_tab_list': []} tab_idx = burst_config.selected_tab tab_portlet_list = [] for portlet_cfg in burst_config.tabs[int(tab_idx)].portlets: if portlet_cfg is not None: portlet_entity = self.burst_service.get_portlet_by_id(portlet_cfg.portlet_id) portlet_entity.name = portlet_cfg.name tab_portlet_list.append(portlet_entity) return {'portlet_tab_list': tab_portlet_list} @expose_json def change_selected_tab(self, tab_nr): """ Set :param tab_nr: as the currently selected tab in the stored burst configuration. """ common.get_from_session(common.KEY_BURST_CONFIG).selected_tab = int(tab_nr) @expose_json def get_portlet_session_configuration(self): """ Get the current configuration of portlets stored in session for this burst, as a json. """ burst_entity = common.get_from_session(common.KEY_BURST_CONFIG) returned_configuration = burst_entity.update_selected_portlets() return returned_configuration @cherrypy.expose @handle_error(redirect=False) def save_parameters(self, index_in_tab, **data): """ Save parameters :param tab_nr: the index of the selected tab :param index_in_tab: the index of the configured portlet in the selected tab :param data: the {"portlet_parameters": json_string} Where json_string is a Jsonified dictionary {"name": value}, representing the configuration of the current portlet Having these inputs, current method updated the configuration of the portlet in the corresponding tab position form the burst configuration in session. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) tab_nr = burst_config.selected_tab old_portlet_config = burst_config.tabs[int(tab_nr)].portlets[int(index_in_tab)] data = json.loads(data['portlet_parameters']) # Replace all void entries with 'None' for entry in data: if data[entry] == '': data[entry] = None need_relaunch = self.burst_service.update_portlet_configuration(old_portlet_config, data) if need_relaunch: #### Reset Burst Configuration into an entity not persisted (id = None for all) common.add2session(common.KEY_BURST_CONFIG, burst_config.clone()) return "relaunchView" else: self.workflow_service.store_workflow_step(old_portlet_config.visualizer) return "noRelaunch" @expose_json def rename_burst(self, burst_id, burst_name): """ Rename the burst given by burst_id, setting it's new name to burst_name. """ validation_result = self._is_burst_name_ok(burst_name) if validation_result is True: self.burst_service.rename_burst(burst_id, burst_name) return {'success': "Simulation successfully renamed!"} else: return {'error': validation_result} @expose_json def launch_burst(self, launch_mode, burst_name, **data): """ Do the actual burst launch, using the configuration saved in current session. :param launch_mode: new/branch/continue :param burst_name: user-given burst name. It can be empty (case in which we will fill with simulation_x) :param data: kwargs for simulation input parameters. """ data = json.loads(data['simulator_parameters']) simulation_length = data['simulation_length'] try: simulation_length = total_ms(simulation_length) except ValueError, e: return {'error': e.message} data['simulation_length']=unicode(simulation_length) burst_config = common.get_from_session(common.KEY_BURST_CONFIG) ## Validate new burst-name if launch_mode == LAUNCH_NEW and burst_name != 'none_undefined': validation_result = self._is_burst_name_ok(burst_name) if validation_result is True: burst_config.name = burst_name else: return {'error': validation_result} ## Fill all parameters user_id = common.get_logged_user().id data[common.KEY_ADAPTER] = self.cached_simulator_algorithm.id burst_config.update_simulator_configuration(data) burst_config.fk_project = common.get_current_project().id ## Do the asynchronous launch try: burst_id, burst_name = self.burst_service.launch_burst(burst_config, 0, self.cached_simulator_algorithm.id, user_id, launch_mode) return {'id': burst_id, 'name': burst_name} except BurstServiceException as e: self.logger.exception("Could not launch burst!") return {'error': e.message}
class BurstController(BurstBaseController): """ Controller class for Burst-Pages. """ def __init__(self): BurstBaseController.__init__(self) self.burst_service = BurstService() self.workflow_service = WorkflowService() self.context = SelectedAdapterContext() ## Cache simulator Tree, Algorithm and AlgorithmGroup, for performance issues. algorithm, self.cached_simulator_algo_group = self.flow_service.get_algorithm_by_module_and_class( SIMULATOR_MODULE, SIMULATOR_CLASS) self.cached_simulator_algorithm_id = algorithm.id @property @context_selected def cached_simulator_input_tree(self): """ Cache Simulator's input tree, for performance issues. Anyway, without restart, the introspected tree will not be different on multiple executions. :returns: Simulator's Input Tree (copy from cache or just loaded) """ cached_simulator_tree = common.get_from_session(common.KEY_CACHED_SIMULATOR_TREE) if cached_simulator_tree is None: cached_simulator_tree = self.flow_service.prepare_adapter(common.get_current_project().id, self.cached_simulator_algo_group)[1] common.add2session(common.KEY_CACHED_SIMULATOR_TREE, cached_simulator_tree) return copy.deepcopy(cached_simulator_tree) @expose_page @settings @context_selected def index(self): """Get on burst main page""" # todo : reuse load_burst here for consistency. template_specification = dict(mainContent="burst/main_burst", title="Simulation Cockpit", baseUrl=TvbProfile.current.web.BASE_URL, includedResources='project/included_resources') portlets_list = self.burst_service.get_available_portlets() session_stored_burst = common.get_from_session(common.KEY_BURST_CONFIG) if session_stored_burst is None or session_stored_burst.id is None: if session_stored_burst is None: session_stored_burst = self.burst_service.new_burst_configuration(common.get_current_project().id) common.add2session(common.KEY_BURST_CONFIG, session_stored_burst) adapter_interface = self.cached_simulator_input_tree if session_stored_burst is not None: current_data = session_stored_burst.get_all_simulator_values()[0] adapter_interface = ABCAdapter.fill_defaults(adapter_interface, current_data, True) ### Add simulator tree to session to be available in filters self.context.add_adapter_to_session(self.cached_simulator_algo_group, adapter_interface, current_data) template_specification['inputList'] = adapter_interface selected_portlets = session_stored_burst.update_selected_portlets() template_specification['burst_list'] = self.burst_service.get_available_bursts(common.get_current_project().id) template_specification['portletList'] = portlets_list template_specification['selectedPortlets'] = json.dumps(selected_portlets) template_specification['draw_hidden_ranges'] = True template_specification['burstConfig'] = session_stored_burst ### Prepare PSE available metrics ### We put here all available algorithms, because the metrics select area is a generic one, ### and not loaded with every Burst Group change in history. algo_group = self.flow_service.get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)[1] adapter_instance = ABCAdapter.build_adapter(algo_group) if adapter_instance is not None and hasattr(adapter_instance, 'available_algorithms'): template_specification['available_metrics'] = [metric_name for metric_name in adapter_instance.available_algorithms.keys()] else: template_specification['available_metrics'] = [] template_specification[common.KEY_PARAMETERS_CONFIG] = False template_specification[common.KEY_SECTION] = 'burst' return self.fill_default_attributes(template_specification) @expose_fragment('burst/burst_history') def load_burst_history(self): """ Load the available burst that are stored in the database at this time. This is one alternative to 'chrome-back problem'. """ session_burst = common.get_from_session(common.KEY_BURST_CONFIG) return {'burst_list': self.burst_service.get_available_bursts(common.get_current_project().id), 'selectedBurst': session_burst.id} @cherrypy.expose @handle_error(redirect=False) def get_selected_burst(self): """ Return the burst that is currently stored in session. This is one alternative to 'chrome-back problem'. """ session_burst = common.get_from_session(common.KEY_BURST_CONFIG) if session_burst.id: return str(session_burst.id) else: return 'None' @expose_fragment('burst/portlet_configure_parameters') def get_portlet_configurable_interface(self, index_in_tab): """ From the position given by the tab index and the index from that tab, get the portlet configuration and build the configurable interface for that portlet. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) tab_index = burst_config.selected_tab portlet_config = burst_config.tabs[tab_index].portlets[int(index_in_tab)] portlet_interface = self.burst_service.build_portlet_interface(portlet_config, common.get_current_project().id) full_portlet_input_tree = [] for entry in portlet_interface: full_portlet_input_tree.extend(entry.interface) self.context.add_portlet_to_session(full_portlet_input_tree) portlet_interface = {"adapters_list": portlet_interface, common.KEY_PARAMETERS_CONFIG: False, common.KEY_SESSION_TREE: self.context.KEY_PORTLET_CONFIGURATION} return self.fill_default_attributes(portlet_interface) @expose_fragment('burst/portlets_preview') def portlet_tab_display(self, **data): """ When saving a new configuration of tabs, check if any of the old portlets are still present, and if that is the case use their parameters configuration. For all the new portlets add entries in the burst configuration. Also remove old portlets that are no longer saved. """ tab_portlets_list = json.loads(data['tab_portlets_list']) burst_config = common.get_from_session(common.KEY_BURST_CONFIG) selected_tab_idx = burst_config.selected_tab for tab_idx in xrange(len(tab_portlets_list)): current_tab = burst_config.tabs[tab_idx] ### When configuration already exists, and new portlets ##### ### are selected, first check if any configuration was saved for ##### ### each portlet and if that is the case, use it. If none is present # ### create a new one. ### for idx_in_tab in xrange(len(tab_portlets_list[tab_idx])): portlet_id = tab_portlets_list[tab_idx][idx_in_tab][0] portlet_name = tab_portlets_list[tab_idx][idx_in_tab][1] if portlet_id >= 0: saved_config = current_tab.portlets[idx_in_tab] if saved_config is None or saved_config.portlet_id != portlet_id: current_tab.portlets[idx_in_tab] = self.burst_service.new_portlet_configuration(portlet_id, tab_idx, idx_in_tab, portlet_name) else: saved_config.visualizer.ui_name = portlet_name else: current_tab.portlets[idx_in_tab] = None #For generating the HTML get for each id the corresponding portlet selected_tab_portlets = [] saved_selected_tab = burst_config.tabs[selected_tab_idx] for portlet in saved_selected_tab.portlets: if portlet: portlet_id = int(portlet.portlet_id) portlet_entity = self.burst_service.get_portlet_by_id(portlet_id) portlet_entity.name = portlet.name selected_tab_portlets.append(portlet_entity) return {'portlet_tab_list': selected_tab_portlets} @expose_fragment('burst/portlets_preview') def get_configured_portlets(self): """ Return the portlets for one given tab. This is used when changing from tab to tab and selecting which portlets will be displayed. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) if burst_config is None: return {'portlet_tab_list': []} tab_idx = burst_config.selected_tab tab_portlet_list = [] for portlet_cfg in burst_config.tabs[int(tab_idx)].portlets: if portlet_cfg is not None: portlet_entity = self.burst_service.get_portlet_by_id(portlet_cfg.portlet_id) portlet_entity.name = portlet_cfg.name tab_portlet_list.append(portlet_entity) return {'portlet_tab_list': tab_portlet_list} @expose_json def change_selected_tab(self, tab_nr): """ Set :param tab_nr: as the currently selected tab in the stored burst configuration. """ common.get_from_session(common.KEY_BURST_CONFIG).selected_tab = int(tab_nr) @expose_json def get_portlet_session_configuration(self): """ Get the current configuration of portlets stored in session for this burst, as a json. """ burst_entity = common.get_from_session(common.KEY_BURST_CONFIG) returned_configuration = burst_entity.update_selected_portlets() return returned_configuration @cherrypy.expose @handle_error(redirect=False) def save_parameters(self, index_in_tab, **data): """ Save parameters :param tab_nr: the index of the selected tab :param index_in_tab: the index of the configured portlet in the selected tab :param data: the {"portlet_parameters": json_string} Where json_string is a Jsonified dictionary {"name": value}, representing the configuration of the current portlet Having these inputs, current method updated the configuration of the portlet in the corresponding tab position form the burst configuration in session. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) tab_nr = burst_config.selected_tab old_portlet_config = burst_config.tabs[int(tab_nr)].portlets[int(index_in_tab)] data = json.loads(data['portlet_parameters']) # Replace all void entries with 'None' for entry in data: if data[entry] == '': data[entry] = None need_relaunch = self.burst_service.update_portlet_configuration(old_portlet_config, data) if need_relaunch: #### Reset Burst Configuration into an entity not persisted (id = None for all) common.add2session(common.KEY_BURST_CONFIG, burst_config.clone()) return "relaunchView" else: self.workflow_service.store_workflow_step(old_portlet_config.visualizer) return "noRelaunch" @expose_json def rename_burst(self, burst_id, burst_name): """ Rename the burst given by burst_id, setting it's new name to burst_name. """ validation_result = self._is_burst_name_ok(burst_name) if validation_result is True: self.burst_service.rename_burst(burst_id, burst_name) return {'success': "Simulation successfully renamed!"} else: return {'error': validation_result} @expose_json def launch_burst(self, launch_mode, burst_name, **data): """ Do the actual burst launch, using the configuration saved in current session. :param launch_mode: new/branch/continue :param burst_name: user-given burst name. It can be empty (case in which we will fill with simulation_x) :param data: kwargs for simulation input parameters. """ data = json.loads(data['simulator_parameters']) burst_config = common.get_from_session(common.KEY_BURST_CONFIG) ## Validate new burst-name if launch_mode == LAUNCH_NEW and burst_name != 'none_undefined': validation_result = self._is_burst_name_ok(burst_name) if validation_result is True: burst_config.name = burst_name else: return {'error': validation_result} ## Fill all parameters user_id = common.get_logged_user().id data[common.KEY_ADAPTER] = self.cached_simulator_algorithm_id burst_config.update_simulator_configuration(data) burst_config.fk_project = common.get_current_project().id ## Do the asynchronous launch burst_id, burst_name = self.burst_service.launch_burst(burst_config, 0, self.cached_simulator_algorithm_id, user_id, launch_mode) return {'id': burst_id, 'name': burst_name} @expose_json def load_burst(self, burst_id): """ Given a burst id return its running status, weather it was a operation group and the selected tab. This is called when a burst is selected in the history, when returning from a burst config page (model param or noise) and when the status of running simulations is polled. Besides returning these values it updates the session stored burst. A burst configuration has 2 meanings. It is a staging configuration for a new burst (stored in transients in the session). It is the configuration used to launch a simulation and it's running status (stored in the db). This method has to merge the two meanings. If the requested burst_id is different from the one held in the session, then the burst config is loaded from the db, discarding any session stored config. If the id is the same then the session config is kept. """ try: burst_id = int(burst_id) old_burst = common.get_from_session(common.KEY_BURST_CONFIG) burst, group_gid = self.burst_service.load_burst(burst_id) if old_burst.id == burst_id: # This function was called to reload the current burst. # Merge session config into the db config. Overwrite all transient fields burst.simulator_configuration = old_burst.simulator_configuration burst.dynamic_ids = old_burst.dynamic_ids burst.selected_tab = old_burst.selected_tab common.add2session(common.KEY_BURST_CONFIG, burst) return {'status': burst.status, 'group_gid': group_gid, 'selected_tab': burst.selected_tab} except Exception: ### Most probably Burst was removed. Delete it from session, so that client ### has a good chance to get a good response on refresh self.logger.exception("Error loading burst") common.remove_from_session(common.KEY_BURST_CONFIG) raise @expose_json def get_history_status(self, **data): """ For each burst id received, get the status and return it. """ return self.burst_service.update_history_status(json.loads(data['burst_ids'])) @cherrypy.expose @handle_error(redirect=False) @check_user def cancel_or_remove_burst(self, burst_id): """ Cancel or Remove the burst entity given by burst_id. :returns 'reset-new': When currently selected burst was removed. JS will need to reset selection to a new entry :returns 'canceled': When current burst was still running and was just stopped. :returns 'done': When no action is required on the client. """ burst_id = int(burst_id) session_burst = common.get_from_session(common.KEY_BURST_CONFIG) removed = self.burst_service.cancel_or_remove_burst(burst_id) if removed: if session_burst.id == burst_id: return "reset-new" return 'done' else: # Burst was stopped since it was running return 'canceled' @expose_json def get_selected_portlets(self): """ Get the selected portlets for the loaded burst. """ burst = common.get_from_session(common.KEY_BURST_CONFIG) return burst.update_selected_portlets() @cherrypy.expose @handle_error(redirect=False) def get_visualizers_for_operation_id(self, op_id, width, height): """ Method called from parameters exploration page in case a burst with a range of parameters for the simulator was launched. :param op_id: the selected operation id from the parameter space exploration. :param width: the width of the right side display :param height: the height of the right side display Given these parameters first get the workflow to which op_id belongs, then load the portlets from that workflow as the current burst configuration. Width and height are used to get the proper sizes for the visualization iFrames. """ burst_config = common.get_from_session(common.KEY_BURST_CONFIG) burst_config = self.burst_service.load_tab_configuration(burst_config, op_id) common.add2session(common.KEY_BURST_CONFIG, burst_config) return self.load_configured_visualizers(width, height) @expose_fragment("burst/portlets_view") def load_configured_visualizers(self, width='800', height='600'): """ Load all the visualization steps for this tab. Width and height represent the dimensions of the right side Div, so that we can compute for each iFrame the maximum size it's visualizer can take. """ burst = common.get_from_session(common.KEY_BURST_CONFIG) selected_tab = burst.selected_tab tab_portlet_list = [] for portlet_cfg in burst.tabs[int(selected_tab)].portlets: if portlet_cfg is not None: tab_portlet_list.append(self.__portlet_config2portlet_entity(portlet_cfg)) return {'status': burst.status, 'portlet_tab_list': tab_portlet_list, 'max_width': int(width), 'max_height': int(height), 'model': tvb.core.entities.model} @expose_fragment("burst/portlet_visualization_template") def check_status_for_visualizer(self, selected_tab, index_in_tab, width='800', height='600'): """ This call is used to check on a regular basis if the data for a certain portlet is available for visualization. Should return the status and the HTML to be displayed. """ burst = common.get_from_session(common.KEY_BURST_CONFIG) target_portlet = burst.tabs[int(selected_tab)].portlets[int(index_in_tab)] target_portlet = self.__portlet_config2portlet_entity(target_portlet) template_dict = {'portlet_entity': target_portlet, 'model': tvb.core.entities.model, 'width': int(width), 'height': int(height)} return template_dict @expose_json def reset_burst(self): """ Called when click on "New Burst" entry happens from UI. This will generate an empty new Burst Configuration. """ common.remove_from_session(common.KEY_CACHED_SIMULATOR_TREE) new_burst = self.burst_service.new_burst_configuration(common.get_current_project().id) common.add2session(common.KEY_BURST_CONFIG, new_burst) @cherrypy.expose @handle_error(redirect=False) def copy_burst(self, burst_id): """ When currently selected entry is a valid Burst, create a clone of that Burst. """ common.remove_from_session(common.KEY_CACHED_SIMULATOR_TREE) base_burst = self.burst_service.load_burst(burst_id)[0] if (base_burst is None) or (base_burst.id is None): return self.reset_burst() common.add2session(common.KEY_BURST_CONFIG, base_burst.clone()) return base_burst.name @expose_fragment("burst/base_portlets_iframe") def launch_visualization(self, index_in_tab, frame_width, frame_height, method_name="generate_preview"): """ Launch the visualization for this tab and index in tab. The width and height represent the maximum of the inner visualization canvas so that it can fit in the iFrame. """ result = {} try: burst = common.get_from_session(common.KEY_BURST_CONFIG) visualizer = burst.tabs[burst.selected_tab].portlets[int(index_in_tab)].visualizer result = self.burst_service.launch_visualization(visualizer, float(frame_width), float(frame_height), method_name)[0] result['launch_success'] = True except Exception, ex: result['launch_success'] = False result['error_msg'] = str(ex) self.logger.exception("Could not launch Portlet Visualizer...") return self.fill_default_attributes(result)