コード例 #1
0
    def __init__(self):
        BaseController.__init__(self)
        self.context = SelectedAdapterContext()
        self.files_helper = FilesHelper()
        self.operation_services = OperationService()
        self.simulator_controller = SimulatorController()

        analyze_category, groups = self.algorithm_service.get_analyze_groups()
        adapters_list = []
        for adapter_group in groups:

            if len(adapter_group.children) > 1:
                ids = [str(child.id) for child in adapter_group.children]
                ids = ','.join(ids)
                adapter_link = '/flow/show_group_of_algorithms/' + str(
                    analyze_category.id) + "/" + ids
            else:
                adapter_link = self.get_url_adapter(
                    analyze_category.id, adapter_group.children[0].id)

            adapters_list.append({
                common.KEY_TITLE:
                adapter_group.name,
                'link':
                adapter_link,
                'description':
                adapter_group.description,
                'subsection':
                adapter_group.children[0].subsection_name
            })
        self.analyze_adapters = adapters_list
コード例 #2
0
 def setup_method(self):
     """
     Sets up the environment for testing;
     creates a `FlowController`
     """
     self.init()
     self.flow_c = FlowController()
     self.burst_c = SimulatorController()
     self.operation_service = OperationService()
コード例 #3
0
    def transactional_setup_method(self):
        self.simulator_controller = SimulatorController()
        self.test_user = TestFactory.create_user('SimulationController_User')
        self.test_project = TestFactory.create_project(
            self.test_user, "SimulationController_Project")
        TvbProfile.current.web.RENDER_HTML = False
        self.session_stored_simulator = SimulatorAdapterModel()

        self.sess_mock = RamSession()
        self.sess_mock[KEY_USER] = self.test_user
        self.sess_mock[KEY_PROJECT] = self.test_project

        cherrypy.request.method = "POST"
コード例 #4
0
    def transactional_setup_method(self):
        self.simulator_controller = SimulatorController()
        self.test_user = TestFactory.create_user('SimulationController_User')
        self.test_project = TestFactory.create_project(self.test_user, "SimulationController_Project")
        connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project)

        self.session_stored_simulator = SimulatorAdapterModel()
        self.session_stored_simulator.connectivity = UUID(connectivity.gid)

        self.sess_mock = RamSession()
        self.sess_mock[KEY_USER] = self.test_user
        self.sess_mock[KEY_PROJECT] = self.test_project

        cherrypy.request.method = "POST"
コード例 #5
0
    def test_submit_noise_configuration_happy(self, connectivity_factory):
        """
        Submit noise configuration writes the noise array on the required key in the burst configuration
        """
        self.init()
        noise_controller = NoiseConfigurationController()
        SimulatorController().index()
        simulator = cherrypy.session[common.KEY_SIMULATOR_CONFIG]
        connectivity = connectivity_factory()
        simulator.connectivity = connectivity.gid
        simulator.integrator = HeunStochastic()

        # a noise configuration in the format expected by submit. Assumes Generic2dOscillator model.
        nodes_range = list(range(connectivity.number_of_regions))
        noise_in = [{'V': 1.0, 'W': 2.0} for _ in nodes_range]
        noise_in = json.dumps(noise_in)

        self._expect_redirect('/burst/', noise_controller.submit, noise_in)

        expected_noise_arr = [[1.0 for _ in nodes_range],
                              [2.0 for _ in nodes_range]]
        actual_noise_arr = simulator.integrator.noise.nsig
        assert (expected_noise_arr == actual_noise_arr).all()

        self.cleanup()
コード例 #6
0
 def transactional_setup_method(self):
     """
     Sets up the environment for testing;
     creates a `RegionsModelParametersController` and a connectivity
     """
     self.init()
     self.region_m_p_c = RegionsModelParametersController()
     SimulatorController().index()
     self.simulator = cherrypy.session[common.KEY_SIMULATOR_CONFIG]
     self._setup_dynamic()
 def transactional_setup_fixture(self, region_mapping_index_factory):
     self.init()
     self.surface_m_p_c = SurfaceModelParametersController()
     SimulatorController().index()
     simulator = cherrypy.session[common.KEY_SIMULATOR_CONFIG]
     region_mapping_index = region_mapping_index_factory()
     simulator.connectivity = region_mapping_index.fk_connectivity_gid
     simulator.surface = CortexViewModel()
     simulator.surface.surface_gid = region_mapping_index.fk_surface_gid
     simulator.surface.region_mapping_data = region_mapping_index.gid
コード例 #8
0
 def transactional_setup_fixture(self, connectivity_factory, surface_factory):
     self.init()
     self.surface_m_p_c = SurfaceModelParametersController()
     SimulatorController().index()
     stored_burst = cherrypy.session[common.KEY_BURST_CONFIG]
     _, self.connectivity = connectivity_factory
     _, self.surface = surface_factory
     new_params = {}
     for key, val in SIMULATOR_PARAMETERS.items():
         new_params[key] = {'value': val}
     new_params['connectivity'] = {'value': self.connectivity.gid}
     new_params['surface'] = {'value': self.surface.gid}
     stored_burst.simulator_configuration = new_params
コード例 #9
0
 def cancel_or_remove_operation(self,
                                operation_id,
                                is_group,
                                remove_after_stop=False):
     """
     Stop the operation given by operation_id. If is_group is true stop all the
     operations from that group.
     """
     operation_id = int(operation_id)
     is_group = int(is_group) != 0
     if isinstance(remove_after_stop, str):
         remove_after_stop = bool(remove_after_stop)
     return SimulatorController.cancel_or_remove_operation(
         operation_id, is_group, remove_after_stop)
コード例 #10
0
    def reload_burst_operation(self, operation_id, is_group, **_):
        """
        Find out from which burst was this operation launched. Set that burst as the selected one and
        redirect to the burst page.
        """
        is_group = int(is_group)
        if not is_group:
            operation = OperationService.load_operation(int(operation_id))
        else:
            op_group = ProjectService.get_operation_group_by_id(operation_id)
            first_op = ProjectService.get_operations_in_group(op_group)[0]
            operation = OperationService.load_operation(int(first_op.id))
        SimulatorController().copy_simulator_configuration(operation.burst.id)

        raise cherrypy.HTTPRedirect("/burst/")
コード例 #11
0
def init_cherrypy(arguments=None):
    #### Mount static folders from modules marked for introspection
    arguments = arguments or []
    CONFIGUER = TvbProfile.current.web.CHERRYPY_CONFIGURATION
    for module in arguments:
        module_inst = importlib.import_module(str(module))
        module_path = os.path.dirname(os.path.abspath(module_inst.__file__))
        CONFIGUER["/static_" + str(module)] = {'tools.staticdir.on': True,
                                               'tools.staticdir.dir': '.',
                                               'tools.staticdir.root': module_path}

    #### Mount controllers, and specify the root URL for them.
    cherrypy.tree.mount(BaseController(), "/", config=CONFIGUER)
    cherrypy.tree.mount(UserController(), "/user/", config=CONFIGUER)
    cherrypy.tree.mount(ProjectController(), "/project/", config=CONFIGUER)
    cherrypy.tree.mount(FigureController(), "/project/figure/", config=CONFIGUER)
    cherrypy.tree.mount(FlowController(), "/flow/", config=CONFIGUER)
    cherrypy.tree.mount(SettingsController(), "/settings/", config=CONFIGUER)
    cherrypy.tree.mount(HelpController(), "/help/", config=CONFIGUER)
    cherrypy.tree.mount(SimulatorController(), "/burst/", config=CONFIGUER)
    cherrypy.tree.mount(ParameterExplorationController(), "/burst/explore/", config=CONFIGUER)
    cherrypy.tree.mount(DynamicModelController(), "/burst/dynamic/", config=CONFIGUER)
    cherrypy.tree.mount(SpatioTemporalController(), "/spatial/", config=CONFIGUER)
    cherrypy.tree.mount(RegionsModelParametersController(), "/burst/modelparameters/regions/", config=CONFIGUER)
    cherrypy.tree.mount(SurfaceModelParametersController(), "/spatial/modelparameters/surface/", config=CONFIGUER)
    cherrypy.tree.mount(RegionStimulusController(), "/spatial/stimulus/region/", config=CONFIGUER)
    cherrypy.tree.mount(SurfaceStimulusController(), "/spatial/stimulus/surface/", config=CONFIGUER)
    cherrypy.tree.mount(LocalConnectivityController(), "/spatial/localconnectivity/", config=CONFIGUER)
    cherrypy.tree.mount(NoiseConfigurationController(), "/burst/noise/", config=CONFIGUER)
    cherrypy.tree.mount(HPCController(), "/hpc/", config=CONFIGUER)

    cherrypy.config.update(CONFIGUER)

    # ----------------- Register additional request handlers -----------------
    # This tool checks for MAX upload size
    cherrypy.tools.upload = Tool('on_start_resource', RequestHandler.check_upload_size)
    # This tools clean up files on disk (mainly after export)
    cherrypy.tools.cleanup = Tool('on_end_request', RequestHandler.clean_files_on_disk)
    # ----------------- End register additional request handlers ----------------

    # Register housekeeping job
    if TvbProfile.current.hpc.IS_HPC_RUN and TvbProfile.current.hpc.CAN_RUN_HPC:
        cherrypy.engine.housekeeper = cherrypy.process.plugins.BackgroundTask(
            TvbProfile.current.hpc.BACKGROUND_JOB_INTERVAL, HPCOperationService.check_operations_job)
        cherrypy.engine.housekeeper.start()

    # HTTP Server is fired now ######
    cherrypy.engine.start()
コード例 #12
0
 def transactional_setup_fixture(self, connectivity_factory):
     """
     Sets up the environment for testing;
     creates a `RegionsModelParametersController` and a connectivity
     """
     self.init()
     self.region_m_p_c = RegionsModelParametersController()
     SimulatorController().index()
     stored_burst = cherrypy.session[common.KEY_BURST_CONFIG]
     self.connectivity = connectivity_factory()
     new_params = {}
     for key, val in SIMULATOR_PARAMETERS.items():
         new_params[key] = {'value': val}
     new_params['connectivity'] = {'value': self.connectivity.gid}
     stored_burst.simulator_configuration = new_params
     self._setup_dynamic()
コード例 #13
0
    def test_submit_noise_configuration_happy(self, connectivity_factory):
        self.init()
        self.noise_c = NoiseConfigurationController()
        self.connectivity = connectivity_factory()
        SimulatorController().index()

        stored_burst = cherrypy.session[common.KEY_BURST_CONFIG]

        new_params = {}
        for key, val in SIMULATOR_PARAMETERS.items():
            new_params[key] = {'value': val}
        new_params['connectivity'] = {'value': self.connectivity.gid}

        # Simulate selection of a specific integration  from the ui
        new_params[PARAM_INTEGRATOR] = {'value': EulerStochastic.__name__}
        new_params[PARAM_MODEL] = {
            'value': ModelsEnum.GENERIC_2D_OSCILLATOR.get_class().__name__
        }
        new_params[INTEGRATOR_PARAMETERS + '_option_EulerStochastic_noise'] = {
            'value': Additive.__name__
        }
        stored_burst.simulator_configuration = new_params
        """
        Submit noise configuration writes the noise array on the required key in the burst configuration
        """
        # a noise configuration in the format expected by submit. Assumes Generic2dOscillator model.
        nodes_range = list(range(self.connectivity.number_of_regions))
        noise_in = [{'V': 1.0, 'W': 2.0} for _ in nodes_range]
        noise_in = json.dumps(noise_in)

        # expected noise array in burst config
        expected_noise_arr = [[1.0 for _ in nodes_range],
                              [2.0 for _ in nodes_range]]

        self._expect_redirect('/burst/', self.noise_c.submit, noise_in)

        simulator_configuration = cherrypy.session[
            common.KEY_BURST_CONFIG].simulator_configuration
        nsig_key = 'integrator_parameters_option_EulerStochastic_noise_parameters_option_Additive_nsig'
        actual_noise_arr = json.loads(
            simulator_configuration[nsig_key]['value'])

        assert expected_noise_arr == actual_noise_arr
コード例 #14
0
    def test_edit_model_parameters(self, region_mapping_index_factory):
        self.init()
        surface_m_p_c = SurfaceModelParametersController()
        SimulatorController().index()
        simulator = cherrypy.session[common.KEY_SIMULATOR_CONFIG]
        region_mapping_index = region_mapping_index_factory()
        simulator.connectivity = region_mapping_index.fk_connectivity_gid
        simulator.surface = CortexViewModel()
        simulator.surface.surface_gid = region_mapping_index.fk_surface_gid
        simulator.surface.region_mapping_data = region_mapping_index.gid

        result_dict = surface_m_p_c.edit_model_parameters()
        expected_keys = [
            'urlNormals', 'urlNormalsPick', 'urlTriangles', 'urlTrianglesPick',
            'urlVertices', 'urlVerticesPick', 'mainContent',
            'parametersEquationPlotForm', 'baseUrl', 'equationsPrefixes',
            'brainCenter', 'applied_equations'
        ]
        # map(lambda x: self.assertTrue(x in result_dict), expected_keys)
        assert all(x in result_dict for x in expected_keys)
        assert result_dict['baseUrl'] == '/spatial/modelparameters/surface'
        assert result_dict['mainContent'] == 'spatial/model_param_surface_main'
コード例 #15
0
class FlowController(BaseController):
    """
    This class takes care of executing steps in projects.
    """
    def __init__(self):
        BaseController.__init__(self)
        self.context = SelectedAdapterContext()
        self.files_helper = FilesHelper()
        self.operation_services = OperationService()
        self.simulator_controller = SimulatorController()

        analyze_category, groups = self.algorithm_service.get_analyze_groups()
        adapters_list = []
        for adapter_group in groups:

            if len(adapter_group.children) > 1:
                ids = [str(child.id) for child in adapter_group.children]
                ids = ','.join(ids)
                adapter_link = '/flow/show_group_of_algorithms/' + str(
                    analyze_category.id) + "/" + ids
            else:
                adapter_link = self.get_url_adapter(
                    analyze_category.id, adapter_group.children[0].id)

            adapters_list.append({
                common.KEY_TITLE:
                adapter_group.name,
                'link':
                adapter_link,
                'description':
                adapter_group.description,
                'subsection':
                adapter_group.children[0].subsection_name
            })
        self.analyze_adapters = adapters_list

    @expose_page
    @settings
    @context_selected
    def step_analyzers(self):
        """
        Choose exact action/adapter for current step.
        """
        try:
            analyze_category, groups = self.algorithm_service.get_analyze_groups(
            )
            step_name = analyze_category.displayname.lower()
            template_specification = dict(mainContent="header_menu",
                                          section_name=step_name,
                                          controlPage=None,
                                          title="Select an analyzer",
                                          displayControl=False)
            template_specification[
                common.KEY_SUBMENU_LIST] = self.analyze_adapters
            return self.fill_default_attributes(template_specification)

        except ValueError:
            message = 'Could not load analyzers!'
            common.set_warning_message(message)
            self.logger.warning(message)
            raise cherrypy.HTTPRedirect('/tvb')

    @expose_page
    @settings
    @context_selected
    def step_connectivity(self):
        """
        Display menu for Connectivity Footer tab.
        """
        template_specification = dict(mainContent="header_menu",
                                      section_name='connectivity',
                                      controlPage=None,
                                      title="Select an algorithm",
                                      displayControl=False,
                                      subsection_name='step',
                                      submenu_list=self.connectivity_submenu)
        return self.fill_default_attributes(template_specification)

    @staticmethod
    def _compute_back_link(back_indicator, project):
        """
        Based on a simple indicator, compute URL for anchor BACK.
        """
        if back_indicator is None:
            # This applies to Connectivity and other visualizers when RELAUNCH button is used from Operation page.
            back_page_link = None
        elif back_indicator == 'burst':
            back_page_link = "/burst"
        elif back_indicator == 'operations':
            back_page_link = '/project/viewoperations/' + str(project.id)
        else:
            back_page_link = '/project/editstructure/' + str(project.id)
        return back_page_link

    @expose_page
    @settings
    @context_selected
    def show_group_of_algorithms(self, step_key, algorithm_ids):

        project = common.get_current_project()
        category = self.algorithm_service.get_category_by_id(step_key)
        algorithms = []
        for i in algorithm_ids.split(','):
            algorithm_id = int(i)
            algorithm = self.algorithm_service.get_algorithm_by_identifier(
                algorithm_id)
            algorithm.link = self.get_url_adapter(step_key, algorithm_id)
            adapter_instance = self.algorithm_service.prepare_adapter(
                algorithm)
            adapter_form = self.algorithm_service.prepare_adapter_form(
                adapter_instance, project.id)
            algorithm.form = self.render_adapter_form(adapter_form)
            algorithms.append(algorithm)

        template_specification = dict(
            mainContent="flow/algorithms_list",
            algorithms=algorithms,
            title="Select an algorithm",
            section_name=category.displayname.lower())
        self._populate_section(algorithms[0], template_specification)
        self.fill_default_attributes(template_specification,
                                     algorithms[0].group_name)
        return template_specification

    @expose_page
    @settings
    @context_selected
    def prepare_group_launch(self, group_gid, step_key, algorithm_id, **data):
        """
        Receives as input a group gid and an algorithm given by category and id, along
        with data that gives the name of the required input parameter for the algorithm.
        Having these generate a range of GID's for all the DataTypes in the group and
        launch a new operation group.
        """
        prj_service = ProjectService()
        dt_group = prj_service.get_datatypegroup_by_gid(group_gid)
        datatypes = prj_service.get_datatypes_from_datatype_group(dt_group.id)
        range_param_name = data.pop('range_param_name')
        data[RANGE_PARAMETER_1] = range_param_name
        data[range_param_name] = ','.join(dt.gid for dt in datatypes)
        self.operation_services.group_operation_launch(
            common.get_logged_user().id, common.get_current_project(),
            int(algorithm_id), int(step_key), **data)
        redirect_url = self._compute_back_link('operations',
                                               common.get_current_project())
        raise cherrypy.HTTPRedirect(redirect_url)

    @expose_page
    @settings
    @context_selected
    def default(self,
                step_key,
                adapter_key,
                cancel=False,
                back_page=None,
                **data):
        """
        Render a specific adapter.
        'data' are arguments for POST
        """
        project = common.get_current_project()
        algorithm = self.algorithm_service.get_algorithm_by_identifier(
            adapter_key)
        back_page_link = self._compute_back_link(back_page, project)

        if algorithm is None:
            raise cherrypy.HTTPRedirect("/tvb?error=True")

        if cherrypy.request.method == 'POST' and cancel:
            raise cherrypy.HTTPRedirect(back_page_link)

        submit_link = self.get_url_adapter(step_key, adapter_key, back_page)
        is_burst = back_page not in ['operations', 'data']
        if cherrypy.request.method == 'POST':
            data[common.KEY_ADAPTER] = adapter_key
            template_specification = self.execute_post(project.id, submit_link,
                                                       step_key, algorithm,
                                                       **data)
            self._populate_section(algorithm, template_specification, is_burst)
        else:
            template_specification = self.get_template_for_adapter(
                project.id,
                step_key,
                algorithm,
                submit_link,
                is_burst=is_burst)
        if template_specification is None:
            raise cherrypy.HTTPRedirect('/tvb')

        if KEY_CONTROLLS not in template_specification:
            template_specification[KEY_CONTROLLS] = None
        if common.KEY_SUBMIT_LINK not in template_specification:
            template_specification[common.KEY_SUBMIT_LINK] = submit_link
        if KEY_CONTENT not in template_specification:
            template_specification[KEY_CONTENT] = "flow/full_adapter_interface"
            template_specification[common.KEY_DISPLAY_MENU] = False
        else:
            template_specification[common.KEY_DISPLAY_MENU] = True
            template_specification[common.KEY_BACK_PAGE] = back_page_link

        template_specification[common.KEY_ADAPTER] = adapter_key
        template_specification[ABCDisplayer.KEY_IS_ADAPTER] = True
        self.fill_default_attributes(template_specification,
                                     algorithm.displayname)
        return template_specification

    @expose_fragment('form_fields/options_field')
    @settings
    @context_selected
    def get_filtered_datatypes(self, dt_module, dt_class, filters,
                               has_all_option, has_none_option):
        """
        Given the name from the input tree, the dataType required and a number of
        filters, return the available dataType that satisfy the conditions imposed.
        """
        index_class = getattr(sys.modules[dt_module], dt_class)()
        filters_dict = json.loads(filters)

        fields = []
        operations = []
        values = []

        for idx in range(len(filters_dict['fields'])):
            fields.append(filters_dict['fields'][idx])
            operations.append(filters_dict['operations'][idx])
            values.append(filters_dict['values'][idx])

        filter = FilterChain(fields=fields,
                             operations=operations,
                             values=values)
        project = common.get_current_project()

        form = Form(project_id=project.id)
        data_type_gid_attr = DataTypeGidAttr(
            linked_datatype=REGISTRY.get_datatype_for_index(index_class))
        data_type_gid_attr.required = not string2bool(has_none_option)

        select_field = TraitDataTypeSelectField(
            data_type_gid_attr,
            form.project_id,
            conditions=filter,
            has_all_option=string2bool(has_all_option))

        return {'options': select_field.options()}

    def execute_post(self, project_id, submit_url, step_key, algorithm,
                     **data):
        """ Execute HTTP POST on a generic step."""
        errors = None
        adapter_instance = ABCAdapter.build_adapter(algorithm)

        try:
            form = adapter_instance.get_form()(project_id=project_id)
            if 'fill_defaults' in data:
                form.fill_from_post_plus_defaults(data)
            else:
                form.fill_from_post(data)
            view_model = None
            if form.validate():
                try:
                    view_model = form.get_view_model()()
                    form.fill_trait(view_model)
                    self.context.add_view_model_to_session(view_model)
                except NotImplementedError:
                    self.logger.exception(
                        "Form and/or ViewModel not fully implemented for " +
                        str(form))
                    raise InvalidFormValues(
                        "Invalid form inputs! Could not find a model for this form!",
                        error_dict=form.get_errors_dict())
            else:
                raise InvalidFormValues(
                    "Invalid form inputs! Could not fill algorithm from the given inputs!",
                    error_dict=form.get_errors_dict())

            adapter_instance.submit_form(form)

            if issubclass(type(adapter_instance), ABCDisplayer):
                adapter_instance.current_project_id = project_id
                adapter_instance.user_id = common.get_logged_user().id
                result = adapter_instance.launch(view_model)
                if isinstance(result, dict):
                    return result
                else:
                    common.set_error_message(
                        "Invalid result returned from Displayer! Dictionary is expected!"
                    )
                return {}

            result = self.operation_services.fire_operation(
                adapter_instance,
                common.get_logged_user(),
                project_id,
                view_model=view_model)
            if isinstance(result, list):
                result = "Launched %s operations." % len(result)
            common.set_important_message(str(result))

        except formencode.Invalid as excep:
            errors = excep.unpack_errors()
            common.set_error_message("Invalid form inputs")
            self.logger.warning("Invalid form inputs %s" % errors)
        except (OperationException, LaunchException,
                TraitValueError) as excep1:
            self.logger.exception("Error while executing a Launch procedure:" +
                                  excep1.message)
            common.set_error_message(excep1.message)
        except InvalidFormValues as excep2:
            message, errors = excep2.display_full_errors()
            common.set_error_message(message)
            self.logger.warning("%s \n %s" % (message, errors))

        template_specification = self.get_template_for_adapter(
            project_id, step_key, algorithm, submit_url)
        if (errors is not None) and (template_specification is not None):
            template_specification[common.KEY_ERRORS] = errors
        return template_specification

    def get_template_for_adapter(self,
                                 project_id,
                                 step_key,
                                 stored_adapter,
                                 submit_url,
                                 is_burst=True,
                                 is_callout=False):
        """ Get Input HTML Interface template or a given adapter """
        try:
            group = None
            category = self.algorithm_service.get_category_by_id(step_key)
            title = "Fill parameters for step " + category.displayname.lower()
            if group:
                title = title + " - " + group.displayname

            adapter_instance = self.algorithm_service.prepare_adapter(
                stored_adapter)

            adapter_form = self.algorithm_service.prepare_adapter_form(
                adapter_instance, project_id)
            vm = self.context.get_view_model_from_session()
            if vm and type(vm) == adapter_form.get_view_model():
                adapter_form.fill_from_trait(vm)
            else:
                self.context.clean_from_session()
            template_specification = dict(
                submitLink=submit_url,
                adapter_form=self.render_adapter_form(adapter_form,
                                                      is_callout=is_callout),
                title=title)

            self._populate_section(stored_adapter, template_specification,
                                   is_burst)
            return template_specification
        except OperationException as oexc:
            self.logger.error("Inconsistent Adapter")
            self.logger.exception(oexc)
            common.set_warning_message(
                'Inconsistent Adapter!  Please review the link (development problem)!'
            )
        return None

    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def readserverstaticfile(self, coded_path):
        """
        Retrieve file from Local storage, having a File System Path.
        """
        try:
            with open(url2path(coded_path), "rb") as f:
                return f.read()
        except Exception as excep:
            self.logger.error("Could not retrieve file from path:" +
                              str(coded_path))
            self.logger.exception(excep)

    def _read_datatype_attribute(self,
                                 entity_gid,
                                 dataset_name,
                                 datatype_kwargs='null',
                                 **kwargs):

        self.logger.debug("Starting to read HDF5: " + entity_gid + "/" +
                          dataset_name + "/" + str(kwargs))
        entity = load_entity_by_gid(entity_gid)
        entity_dt = h5.load_from_index(entity)

        datatype_kwargs = json.loads(datatype_kwargs)
        if datatype_kwargs:
            for key, value in six.iteritems(datatype_kwargs):
                kwargs[key] = load_entity_by_gid(value)

        result = getattr(entity_dt, dataset_name)
        if callable(result):
            if kwargs:
                result = result(**kwargs)
            else:
                result = result()
        return result

    @expose_json
    def invoke_adapter(self, algo_id, method_name, entity_gid, **kwargs):
        algorithm = self.algorithm_service.get_algorithm_by_identifier(algo_id)
        adapter_instance = ABCAdapter.build_adapter(algorithm)
        entity = load_entity_by_gid(entity_gid)
        storage_path = self.files_helper.get_project_folder(
            entity.parent_operation.project, str(entity.fk_from_operation))
        adapter_instance.storage_path = storage_path
        method = getattr(adapter_instance, method_name)
        if kwargs:
            return method(entity_gid, **kwargs)
        return method(entity_gid)

    @expose_json
    def read_from_h5_file(self,
                          entity_gid,
                          method_name,
                          flatten=False,
                          datatype_kwargs='null',
                          **kwargs):
        self.logger.debug("Starting to read HDF5: " + entity_gid + "/" +
                          method_name + "/" + str(kwargs))
        entity = load_entity_by_gid(entity_gid)
        entity_h5 = h5.h5_file_for_index(entity)

        datatype_kwargs = json.loads(datatype_kwargs)
        if datatype_kwargs:
            for key, value in six.iteritems(datatype_kwargs):
                kwargs[key] = load_entity_by_gid(value)

        result = getattr(entity_h5, method_name)
        if kwargs:
            result = result(**kwargs)
        else:
            result = result()

        entity_h5.close()
        return self._prepare_result(result, flatten)

    @expose_json
    def read_datatype_attribute(self,
                                entity_gid,
                                dataset_name,
                                flatten=False,
                                datatype_kwargs='null',
                                **kwargs):
        """
        Retrieve from a given DataType a property or a method result.

        :returns: JSON representation of the attribute.
        :param entity_gid: GID for DataType entity
        :param dataset_name: name of the dataType property /method
        :param flatten: result should be flatten before return (use with WebGL data mainly e.g vertices/triangles)
            Ignored if the attribute is not an ndarray
        :param datatype_kwargs: if passed, will contain a dictionary of type {'name' : 'gid'}, and for each such
            pair, a load_entity will be performed and kwargs will be updated to contain the result
        :param kwargs: extra parameters to be passed when dataset_name is method.

        """
        result = self._read_datatype_attribute(entity_gid, dataset_name,
                                               datatype_kwargs, **kwargs)
        return self._prepare_result(result, flatten)

    def _prepare_result(self, result, flatten):
        if isinstance(result, numpy.ndarray):
            # for ndarrays honor the flatten kwarg and convert to lists as ndarrs are not json-able
            if flatten is True or flatten == "True":
                result = result.flatten()
            return result.tolist()
        else:
            return result

    @expose_numpy_array
    def read_binary_datatype_attribute(self,
                                       entity_gid,
                                       dataset_name,
                                       datatype_kwargs='null',
                                       **kwargs):
        return self._read_datatype_attribute(entity_gid, dataset_name,
                                             datatype_kwargs, **kwargs)

    @expose_fragment("flow/genericAdapterFormFields")
    def get_simple_adapter_interface(self,
                                     algorithm_id,
                                     parent_div='',
                                     is_uploader=False):
        """
        AJAX exposed method. Will return only the interface for a adapter, to
        be used when tabs are needed.
        """
        curent_project = common.get_current_project()
        is_uploader = string2bool(is_uploader)
        template_specification = self.get_adapter_template(
            curent_project.id, algorithm_id, is_uploader)
        template_specification[common.KEY_PARENT_DIV] = parent_div
        return self.fill_default_attributes(template_specification)

    @expose_fragment("flow/full_adapter_interface")
    def getadapterinterface(self, project_id, algorithm_id, back_page=None):
        """
        AJAX exposed method. Will return only a piece of a page,
        to be integrated as part in another page.
        """
        template_specification = self.get_adapter_template(project_id,
                                                           algorithm_id,
                                                           False,
                                                           back_page,
                                                           is_callout=True)
        template_specification["isCallout"] = True
        return self.fill_default_attributes(template_specification)

    def get_adapter_template(self,
                             project_id,
                             algorithm_id,
                             is_upload=False,
                             back_page=None,
                             is_callout=False):
        """
        Get the template for an adapter based on the algo group id.
        """
        if not (project_id and int(project_id) and
                (algorithm_id is not None) and int(algorithm_id)):
            return ""

        algorithm = self.algorithm_service.get_algorithm_by_identifier(
            algorithm_id)
        if is_upload:
            submit_link = "/project/launchloader/" + str(
                project_id) + "/" + str(algorithm_id)
        else:
            submit_link = self.get_url_adapter(algorithm.fk_category,
                                               algorithm.id, back_page)

        template_specification = self.get_template_for_adapter(
            project_id,
            algorithm.fk_category,
            algorithm,
            submit_link,
            is_callout=is_callout)
        if template_specification is None:
            return ""
        template_specification[common.KEY_DISPLAY_MENU] = not is_upload
        return template_specification

    @cherrypy.expose
    @handle_error(redirect=True)
    @context_selected
    def reloadoperation(self, operation_id, **_):
        """Redirect to Operation Input selection page, with input data already selected."""
        operation = OperationService.load_operation(operation_id)
        # Reload previous parameters in session
        adapter_instance = ABCAdapter.build_adapter(operation.algorithm)
        view_model = adapter_instance.load_view_model(operation)
        self.context.add_view_model_to_session(view_model)
        # Display the inputs tree for the current op
        category_id = operation.algorithm.fk_category
        algo_id = operation.fk_from_algo
        raise cherrypy.HTTPRedirect("/flow/" + str(category_id) + "/" +
                                    str(algo_id))

    @cherrypy.expose
    @handle_error(redirect=True)
    @context_selected
    def reload_burst_operation(self, operation_id, is_group, **_):
        """
        Find out from which burst was this operation launched. Set that burst as the selected one and
        redirect to the burst page.
        """
        is_group = int(is_group)
        if not is_group:
            operation = OperationService.load_operation(int(operation_id))
        else:
            op_group = ProjectService.get_operation_group_by_id(operation_id)
            first_op = ProjectService.get_operations_in_group(op_group)[0]
            operation = OperationService.load_operation(int(first_op.id))
        self.simulator_controller.copy_simulator_configuration(
            operation.burst.id)

        raise cherrypy.HTTPRedirect("/burst/")

    @expose_json
    def cancel_or_remove_operation(self,
                                   operation_id,
                                   is_group,
                                   remove_after_stop=False):
        """
        Stop the operation given by operation_id. If is_group is true stop all the
        operations from that group.
        """
        operation_id = int(operation_id)
        is_group = int(is_group) != 0
        if isinstance(remove_after_stop, str):
            remove_after_stop = bool(remove_after_stop)
        return self.simulator_controller.cancel_or_remove_operation(
            operation_id, is_group, remove_after_stop)

    def fill_default_attributes(self, template_dictionary, title='-'):
        """
        Overwrite base controller to add required parameters for adapter templates.
        """
        if common.KEY_TITLE not in template_dictionary:
            template_dictionary[common.KEY_TITLE] = title

        if common.KEY_PARENT_DIV not in template_dictionary:
            template_dictionary[common.KEY_PARENT_DIV] = ''
        if common.KEY_PARAMETERS_CONFIG not in template_dictionary:
            template_dictionary[common.KEY_PARAMETERS_CONFIG] = False

        template_dictionary[
            common.KEY_INCLUDE_RESOURCES] = 'flow/included_resources'
        BaseController.fill_default_attributes(self, template_dictionary)
        return template_dictionary

    NEW_SELECTION_NAME = 'New selection'

    def _get_available_selections(self, datatype_gid):
        """
        selection retrieval common to selection component and connectivity selection
        """
        curent_project = common.get_current_project()
        selections = self.algorithm_service.get_selections_for_project(
            curent_project.id, datatype_gid)
        names, sel_values = [], []

        for selection in selections:
            names.append(selection.ui_name)
            sel_values.append(selection.selected_nodes)

        return names, sel_values

    @expose_fragment('visualizers/commons/channel_selector_opts')
    def get_available_selections(self, **data):
        sel_names, sel_values = self._get_available_selections(
            data['datatype_gid'])
        return dict(namedSelections=list(zip(sel_names, sel_values)))

    @expose_json
    def store_measure_points_selection(self, ui_name, **data):
        """
        Save a MeasurePoints selection (new or update existing entity).
        """
        if ui_name and ui_name != self.NEW_SELECTION_NAME:
            sel_project_id = common.get_current_project().id
            # client sends integers as strings:
            selection = json.dumps(
                [int(s) for s in json.loads(data['selection'])])
            datatype_gid = data['datatype_gid']
            self.algorithm_service.save_measure_points_selection(
                ui_name, selection, datatype_gid, sel_project_id)
            return [True, 'Selection saved successfully.']

        else:
            error_msg = self.NEW_SELECTION_NAME + " or empty name are not  valid as selection names."
            return [False, error_msg]

    @expose_fragment(
        "visualizers/pse_discrete/inserting_new_threshold_spec_bar")
    def create_row_of_specs(self, count):
        return dict(id_increment_count=count)

    @expose_json
    def store_pse_filter(self, config_name, **data):
        # this will need to be updated in such a way that the expose_json actually gets used
        ## also this is going to be changed to be storing through the flow service and dao. Stay updated
        try:
            ##this is to check whether there is already an entry in the
            for i, (name, Val) in enumerate(self.PSE_names_list):
                if name == config_name:
                    self.PSE_names_list[i] = (
                        config_name,
                        (data['threshold_value'] + "," +
                         data['threshold_type'] + "," + data['not_presence'])
                    )  # replace the previous occurence of the config name, and carry on.
                    self.get_pse_filters()
                    return [
                        True, 'Selected Text stored, and selection updated'
                    ]
            self.PSE_names_list.append(
                (config_name,
                 (data['threshold_value'] + "," + data['threshold_type'] +
                  "," + data['not_presence'])))
        except AttributeError:
            self.PSE_names_list = [
                (config_name,
                 (data['threshold_value'] + "," + data['threshold_type'] +
                  "," + data['not_presence']))
            ]

        self.get_pse_filters()
        return [True, 'Selected Text stored, and selection updated']

    @expose_fragment("visualizers/commons/channel_selector_opts")
    def get_pse_filters(self):
        try:
            return dict(namedSelections=self.PSE_names_list)
        except AttributeError:
            return dict(
                namedSelections=[]
            )  # this will give us back atleast the New Selection option in the select
        except:
            raise

    @expose_json
    def store_exploration_section(self, val_range, step, dt_group_guid):
        """
        Launching method for further simulations.
        """
        range_list = [float(num) for num in val_range.split(",")]
        step_list = [float(num) for num in step.split(",")]

        datatype_group_ob = ProjectService().get_datatypegroup_by_gid(
            dt_group_guid)
        operation_grp = datatype_group_ob.parent_operation_group
        operation_obj = OperationService.load_operation(
            datatype_group_ob.fk_from_operation)
        parameters = {}

        range1name, range1_dict = json.loads(operation_grp.range1)
        range2name, range2_dict = json.loads(operation_grp.range2)
        parameters[RANGE_PARAMETER_1] = range1name
        parameters[RANGE_PARAMETER_2] = range2name

        # change the existing simulator parameters to be min max step types
        range1_dict = {
            constants.ATT_MINVALUE: range_list[0],
            constants.ATT_MAXVALUE: range_list[1],
            constants.ATT_STEP: step_list[0]
        }
        range2_dict = {
            constants.ATT_MINVALUE: range_list[2],
            constants.ATT_MAXVALUE: range_list[3],
            constants.ATT_STEP: step_list[1]
        }
        parameters[range1name] = json.dumps(
            range1_dict)  # this is for the x axis parameter
        parameters[range2name] = json.dumps(
            range2_dict)  # this is for the y axis parameter

        OperationService().group_operation_launch(
            common.get_logged_user().id, common.get_current_project(),
            operation_obj.algorithm.id, operation_obj.algorithm.fk_category,
            datatype_group_ob, **parameters)

        return [True, 'Stored the exploration material successfully']
コード例 #16
0
class TestSimulationController(BaseTransactionalControllerTest):

    def transactional_setup_method(self):
        self.simulator_controller = SimulatorController()
        self.test_user = TestFactory.create_user('SimulationController_User')
        self.test_project = TestFactory.create_project(self.test_user, "SimulationController_Project")
        connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project)

        self.session_stored_simulator = SimulatorAdapterModel()
        self.session_stored_simulator.connectivity = UUID(connectivity.gid)

        self.sess_mock = RamSession()
        self.sess_mock[KEY_USER] = self.test_user
        self.sess_mock[KEY_PROJECT] = self.test_project

        cherrypy.request.method = "POST"

    def test_set_connectivity(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John")

        self.sess_mock['connectivity'] = connectivity.gid
        self.sess_mock['conduction_speed'] = "3.0"
        self.sess_mock['coupling'] = "Sigmoidal"

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_connectivity(**self.sess_mock._data)

        assert self.session_stored_simulator.connectivity.hex == connectivity.gid, "Connectivity was not set correctly."
        assert self.session_stored_simulator.conduction_speed == 3.0, "Conduction speed was not set correctly."
        assert isinstance(self.session_stored_simulator.coupling, Sigmoidal), "Coupling was not set correctly."

    def test_set_coupling_params(self):
        self.sess_mock['a'] = '[0.00390625]'
        self.sess_mock['b'] = '[0.0]'

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_coupling_params(**self.sess_mock._data)

        assert self.session_stored_simulator.coupling.a[0] == [0.00390625], "a value was not set correctly."
        assert self.session_stored_simulator.coupling.b[0] == [0.0], "b value was not set correctly."

    def test_set_surface(self):
        zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, CORTICAL, True)
        surface = TestFactory.get_entity(self.test_project, SurfaceIndex)

        self.sess_mock['surface'] = surface.gid

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_surface(**self.sess_mock._data)

        assert self.session_stored_simulator.surface is not None, "Surface was not set."

    def test_set_surface_none(self):
        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_surface(**self.sess_mock._data)

        assert self.session_stored_simulator.surface is None, "Surface should not be set."

    def test_set_cortex_without_local_connectivity(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip')
        connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John")

        zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip')
        surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, CORTICAL, True)

        text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt')
        region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid,
                                                           connectivity.gid)

        self.session_stored_simulator.surface = CortexViewModel()

        self.sess_mock['region_mapping'] = region_mapping.gid
        self.sess_mock['local_connectivity'] = 'explicit-None-value'
        self.sess_mock['coupling_strength'] = '[1.0]'

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_cortex(**self.sess_mock._data)

        assert self.session_stored_simulator.surface.region_mapping_data.hex == region_mapping.gid, \
            'Region mapping was not set correctly'
        assert self.session_stored_simulator.surface.local_connectivity is None, \
            'Default value should have been set to local connectivity.'
        assert self.session_stored_simulator.surface.coupling_strength == [1.0], \
            "coupling_strength was not set correctly."

    def test_set_stimulus_none(self):
        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_stimulus(**self.sess_mock._data)

        assert self.session_stored_simulator.stimulus is None, "Stimulus should not be set."

    def test_set_stimulus(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        connectivity_index = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path)
        weight_array = numpy.zeros(connectivity_index.number_of_regions)

        region_stimulus_creator = RegionStimulusCreator()
        view_model = region_stimulus_creator.get_view_model_class()()
        view_model.connectivity = UUID(connectivity_index.gid)
        view_model.weight = weight_array
        view_model.temporal = TemporalApplicableEquation()
        view_model.temporal.parameters['a'] = 1.0
        view_model.temporal.parameters['b'] = 2.0

        FlowService().fire_operation(region_stimulus_creator, self.test_user, self.test_project.id,
                                     view_model=view_model)
        region_stimulus_index = TestFactory.get_entity(self.test_project, StimuliRegionIndex)

        self.sess_mock['region_stimuli'] = UUID(region_stimulus_index.gid)

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_stimulus(**self.sess_mock._data)

        assert self.session_stored_simulator.stimulus.hex == region_stimulus_index.gid, \
            "Stimuli was not set correctly."

    def test_set_model(self):
        self.sess_mock['model'] = 'Generic 2d Oscillator'

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_model(**self.sess_mock._data)

        assert isinstance(self.session_stored_simulator.model,
                          ModelsEnum.GENERIC_2D_OSCILLATOR.get_class()), "Model class is incorrect."

    def test_set_model_params(self):
        self.sess_mock['tau'] = '[1.0]'
        self.sess_mock['I'] = '[0.0]'
        self.sess_mock['a'] = '[-2.0]'
        self.sess_mock['b'] = '[-10.0]'
        self.sess_mock['c'] = '[0.0]'
        self.sess_mock['d'] = '[0.02]'
        self.sess_mock['e'] = '[3.0]'
        self.sess_mock['f'] = '[1.0]'
        self.sess_mock['g'] = '[0.0]'
        self.sess_mock['alpha'] = '[1.0]'
        self.sess_mock['beta'] = '[1.0]'
        self.sess_mock['gamma'] = '[1.0]'
        self.sess_mock['variables_of_interest'] = 'V'

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_model_params(**self.sess_mock._data)

        assert self.session_stored_simulator.model.tau == [1.0], "Tau has incorrect value."
        assert self.session_stored_simulator.model.I == [0.0], "I has incorrect value."
        assert self.session_stored_simulator.model.a == [-2.0], "a has incorrect value."
        assert self.session_stored_simulator.model.b == [-10.0], "b has incorrect value."
        assert self.session_stored_simulator.model.c == [0.0], "c has incorrect value."
        assert self.session_stored_simulator.model.d == [0.02], "d has incorrect value."
        assert self.session_stored_simulator.model.e == [3.0], "e has incorrect value."
        assert self.session_stored_simulator.model.f == [1.0], "f has incorrect value."
        assert self.session_stored_simulator.model.g == [0.0], "g has incorrect value."
        assert self.session_stored_simulator.model.alpha == [1.0], "alpha has incorrect value."
        assert self.session_stored_simulator.model.beta == [1.0], "beta has incorrect value."
        assert self.session_stored_simulator.model.gamma == [1.0], "gamma has incorrect value."
        assert self.session_stored_simulator.model.variables_of_interest == ['V'], \
            "variables_of_interest has incorrect value."

    def test_set_integrator(self):
        self.sess_mock['integrator'] = 'Heun'

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_integrator(**self.sess_mock._data)

        assert isinstance(self.session_stored_simulator.integrator,
                          HeunDeterministic), "Integrator was not set correctly."

    def test_set_integrator_params(self):
        self.sess_mock['dt'] = '0.01220703125'

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_integrator_params(**self.sess_mock._data)

        assert self.session_stored_simulator.integrator.dt == 0.01220703125, 'dt value was not set correctly.'

    def test_set_integrator_params_stochastic(self):
        self.sess_mock['dt'] = '0.01220703125'
        self.sess_mock['noise'] = 'Multiplicative'

        self.session_stored_simulator.integrator = Dopri5Stochastic()

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_integrator_params(**self.sess_mock._data)

        assert isinstance(self.session_stored_simulator.integrator, IntegratorStochastic), \
            "Coupling should be Stochastic Dormand-Prince."
        assert self.session_stored_simulator.integrator.dt == 0.01220703125, 'dt value was not set correctly.'
        assert isinstance(self.session_stored_simulator.integrator.noise, Multiplicative), 'Noise class is incorrect.'

    def test_set_noise_params(self):
        self.sess_mock['ntau'] = '0.0'
        self.sess_mock['noise_seed'] = '42'
        self.sess_mock['nsig'] = '[1.0]'

        self.session_stored_simulator.integrator = EulerStochastic()

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_noise_params(**self.sess_mock._data)

        assert self.session_stored_simulator.integrator.noise.ntau == 0.0, "ntau value was not set correctly."
        assert self.session_stored_simulator.integrator.noise.noise_seed == 42, \
            "noise_seed value was not set correctly."
        assert self.session_stored_simulator.integrator.noise.nsig == [1.0], "nsig value was not set correctly."

    def test_set_noise_equation_params(self):
        self.sess_mock['low'] = '0.1'
        self.sess_mock['high'] = '1.0'
        self.sess_mock['midpoint'] = '1.0'
        self.sess_mock['sigma'] = '0.3'

        self.session_stored_simulator.integrator = Dopri5Stochastic()
        self.session_stored_simulator.integrator.noise = Multiplicative()
        self.session_stored_simulator.integrator.noise.b = GeneralizedSigmoid()

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_noise_equation_params(**self.sess_mock._data)

        assert self.session_stored_simulator.integrator.noise.b.parameters['low'] == 0.1, \
            "low value was not set correctly"
        assert self.session_stored_simulator.integrator.noise.b.parameters['high'] == 1.0, \
            "high value was not set correctly"
        assert self.session_stored_simulator.integrator.noise.b.parameters['midpoint'] == 1.0, \
            "midpoint value was not set correctly"
        assert self.session_stored_simulator.integrator.noise.b.parameters['sigma'] == 0.3, \
            "sigma value was not set correctly"

    def test_set_monitors(self):
        self.sess_mock['monitors'] = ['Temporal average']
        self.session_stored_simulator.monitors[0].variables_of_interest = numpy.array([0])
        self.session_stored_simulator.model.variables_of_interest = ['V', 'W']

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            common.add2session(common.KEY_BURST_CONFIG, BurstConfiguration(self.test_project.id))
            self.simulator_controller.set_monitors(**self.sess_mock._data)

        assert isinstance(self.session_stored_simulator.monitors[0], TemporalAverage), 'Monitor class is incorrect.'

    def test_set_monitor_params(self):
        self.session_stored_simulator.model.variables_of_interest = ('V', 'W', 'V - W')
        variable_of_interest_indexes = {'W': 1, 'V - W': 2}
        self.sess_mock['variables_of_interest'] = list(variable_of_interest_indexes.keys())
        self.sess_mock['period'] = '0.8'
        self.session_stored_simulator.monitors = [SubSample()]

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            common.add2session(common.KEY_BURST_CONFIG, BurstConfiguration(self.test_project.id))
            self.simulator_controller.set_monitor_params('SubSample', **self.sess_mock._data)

        assert self.session_stored_simulator.monitors[0].period == 0.8, "Period was not set correctly."
        assert list(self.session_stored_simulator.monitors[0].variables_of_interest) == \
            list(variable_of_interest_indexes.values()), "Variables of interest were not set correctly."

    def set_region_mapping(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip')
        connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John")

        zip_path = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip')
        surface = TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, CORTICAL, True)

        text_file = path.join(path.dirname(tvb_data.regionMapping.__file__), 'regionMapping_16k_76.txt')
        region_mapping = TestFactory.import_region_mapping(self.test_user, self.test_project, text_file, surface.gid,
                                                           connectivity.gid)
        return region_mapping

    def test_set_eeg_monitor_params(self):
        region_mapping = self.set_region_mapping()

        eeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'eeg_unitvector_62.txt')
        eeg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, eeg_sensors_file,
                                                 SensorsImporterModel.OPTIONS['EEG Sensors'])

        surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip')
        surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, CORTICAL, True)

        eeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__),
                                        'projection_eeg_62_surface_16k.mat')
        eeg_projections = TestFactory.import_projection_matrix(self.test_user, self.test_project, eeg_projection_file,
                                                               eeg_sensors.gid, surface.gid)

        self.session_stored_simulator.model.variables_of_interest = ('V', 'W', 'V - W')
        variable_of_interest_indexes = {'W': 1, 'V - W': 2}
        self.sess_mock['variables_of_interest'] = list(variable_of_interest_indexes.keys())
        self.sess_mock['period'] = '0.75'
        self.sess_mock['region_mapping'] = region_mapping.gid
        self.sess_mock['projection'] = eeg_projections.gid
        self.sess_mock['sigma'] = "1.0"
        self.sess_mock['sensors'] = eeg_sensors.gid

        self.session_stored_simulator.monitors = [EEG()]

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            common.add2session(common.KEY_BURST_CONFIG, BurstConfiguration(self.test_project.id))
            self.simulator_controller.set_monitor_params('EEG', **self.sess_mock._data)

        assert self.session_stored_simulator.monitors[0].period == 0.75, "Period was not set correctly."
        assert list(self.session_stored_simulator.monitors[0].variables_of_interest) == \
            list(variable_of_interest_indexes.values()), "Variables of interest were not set correctly."
        assert self.session_stored_simulator.monitors[0].region_mapping.gid.hex == region_mapping.gid, \
            "Region Mapping wasn't set and stored correctly."
        assert self.session_stored_simulator.monitors[0].sensors.gid.hex == eeg_sensors.gid, \
            "Region Mapping wasn't set and stored correctly."
        assert self.session_stored_simulator.monitors[0].projection.gid is not None, \
            "Projection wasn't stored correctly."

    def test_set_meg_monitor_params(self):
        region_mapping = self.set_region_mapping()

        meg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'meg_brainstorm_276.txt')
        meg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, meg_sensors_file,
                                                 SensorsImporterModel.OPTIONS['MEG Sensors'])

        surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip')
        surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, CORTICAL, True)

        meg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__),
                                        'projection_meg_276_surface_16k.npy')
        meg_projections = TestFactory.import_projection_matrix(self.test_user, self.test_project, meg_projection_file,
                                                               meg_sensors.gid, surface.gid)

        self.session_stored_simulator.model.variables_of_interest = ('V', 'W', 'V - W')
        variable_of_interest_indexes = {'W': 1, 'V - W': 2}
        self.sess_mock['variables_of_interest'] = list(variable_of_interest_indexes.keys())
        self.sess_mock['period'] = '0.75'
        self.sess_mock['region_mapping'] = region_mapping.gid
        self.sess_mock['projection'] = meg_projections.gid
        self.sess_mock['sigma'] = 1.0
        self.sess_mock['sensors'] = meg_sensors.gid

        self.session_stored_simulator.monitors = [MEG()]

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            common.add2session(common.KEY_BURST_CONFIG, BurstConfiguration(self.test_project.id))
            self.simulator_controller.set_monitor_params('MEG', **self.sess_mock._data)

        assert self.session_stored_simulator.monitors[0].period == 0.75, "Period was not set correctly."
        assert list(self.session_stored_simulator.monitors[0].variables_of_interest) == \
            list(variable_of_interest_indexes.values()), "Variables of interest were not set correctly."
        assert self.session_stored_simulator.monitors[0].region_mapping.gid.hex == region_mapping.gid, \
            "Region Mapping wasn't set and stored correctly."
        assert self.session_stored_simulator.monitors[0].sensors.gid.hex == meg_sensors.gid, \
            "Region Mapping wasn't set and stored correctly."
        assert self.session_stored_simulator.monitors[0].projection.gid is not None, \
            "Projection wasn't stored correctly."

    def test_set_seeg_monitor_params(self):
        region_mapping = self.set_region_mapping()

        seeg_sensors_file = path.join(path.dirname(tvb_data.sensors.__file__), 'seeg_588.txt')
        seeg_sensors = TestFactory.import_sensors(self.test_user, self.test_project, seeg_sensors_file,
                                                  SensorsImporterModel.OPTIONS['Internal Sensors'])

        surface_file = path.join(path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip')
        surface = TestFactory.import_surface_zip(self.test_user, self.test_project, surface_file, CORTICAL, True)

        seeg_projection_file = path.join(path.dirname(tvb_data.projectionMatrix.__file__),
                                         'projection_seeg_588_surface_16k.npy')
        seeg_projections = TestFactory.import_projection_matrix(self.test_user, self.test_project, seeg_projection_file,
                                                                seeg_sensors.gid, surface.gid)

        self.session_stored_simulator.model.variables_of_interest = ('V', 'W', 'V - W')
        variable_of_interest_indexes = {'W': 1, 'V - W': 2}
        self.sess_mock['variables_of_interest'] = list(variable_of_interest_indexes.keys())
        self.sess_mock['period'] = '0.75'
        self.sess_mock['region_mapping'] = region_mapping.gid
        self.sess_mock['projection'] = seeg_projections.gid
        self.sess_mock['sigma'] = "1.0"
        self.sess_mock['sensors'] = seeg_sensors.gid

        self.session_stored_simulator.monitors = [iEEG()]

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            common.add2session(common.KEY_BURST_CONFIG, BurstConfiguration(self.test_project.id))
            self.simulator_controller.set_monitor_params('iEEG', **self.sess_mock._data)

        assert self.session_stored_simulator.monitors[0].period == 0.75, "Period was not set correctly."
        assert list(self.session_stored_simulator.monitors[0].variables_of_interest) == \
            list(variable_of_interest_indexes.values()), "Variables of interest were not set correctly."
        assert self.session_stored_simulator.monitors[0].region_mapping.gid.hex == region_mapping.gid, \
            "Region Mapping wasn't set and stored correctly."
        assert self.session_stored_simulator.monitors[0].sensors.gid.hex == seeg_sensors.gid, \
            "Region Mapping wasn't set and stored correctly."
        assert self.session_stored_simulator.monitors[0].projection.gid is not None, \
            "Projection wasn't stored correctly."

    def test_set_bold_monitor_params(self):
        self.session_stored_simulator.model.variables_of_interest = ('V', 'W', 'V - W')
        variable_of_interest_indexes = {'W': 1, 'V - W': 2}

        self.sess_mock['variables_of_interest'] = list(variable_of_interest_indexes.keys())
        self.sess_mock['period'] = '2000.0'
        self.sess_mock['hrf_kernel'] = 'HRF kernel: Volterra Kernel'

        self.session_stored_simulator.monitors = [Bold()]

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_monitor_params('Bold', **self.sess_mock._data)

        assert self.session_stored_simulator.monitors[0].period == 2000.0, "Period was not set correctly."
        assert list(self.session_stored_simulator.monitors[0].variables_of_interest) == \
            list(variable_of_interest_indexes.values()), "Variables of interest were not set correctly."

    def test_set_monitor_equation(self):
        self.sess_mock['tau_s'] = '0.8'
        self.sess_mock['tau_f'] = '0.4'
        self.sess_mock['k_1'] = '5.6'
        self.sess_mock['V_0'] = '0.02'

        self.session_stored_simulator.monitors = [Bold()]
        self.session_stored_simulator.monitors[0].equation = FirstOrderVolterra()

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            common.add2session(common.KEY_BURST_CONFIG, BurstConfiguration(self.test_project.id))
            self.simulator_controller.set_monitor_equation('Bold', **self.sess_mock._data)

        assert self.session_stored_simulator.monitors[0].equation.parameters[
                   'tau_s'] == 0.8, "tau_s value was not set correctly."
        assert self.session_stored_simulator.monitors[0].equation.parameters[
                   'tau_f'] == 0.4, "tau_f value was not set correctly."
        assert self.session_stored_simulator.monitors[0].equation.parameters[
                   'k_1'] == 5.6, "k_1 value was not set correctly."
        assert self.session_stored_simulator.monitors[0].equation.parameters[
                   'V_0'] == 0.02, "V_0 value was not set correctly."

    def test_load_burst_history(self):
        burst_config1 = BurstConfiguration(self.test_project.id)
        burst_config2 = BurstConfiguration(self.test_project.id)
        burst_config3 = BurstConfiguration(self.test_project.id)

        dao.store_entity(burst_config1)
        dao.store_entity(burst_config2)
        dao.store_entity(burst_config3)

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_BURST_CONFIG, burst_config1)
            burst_parameters = self.simulator_controller.load_burst_history()

        assert len(burst_parameters['burst_list']) == 3, "The burst configurations where not stored."

    def test_reset_simulator_configuration(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John")

        self.sess_mock['connectivity'] = connectivity.gid
        self.sess_mock['conduction_speed'] = "3.0"
        self.sess_mock['coupling'] = "Sigmoidal"

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            rendering_rules = self.simulator_controller.set_connectivity(**self.sess_mock._data)

        assert rendering_rules['renderer'].is_first_fragment is False, \
            "Page should have advanced past the first fragment."

        with patch('cherrypy.session', self.sess_mock, create=True):
            rendering_rules = self.simulator_controller.reset_simulator_configuration()

        assert rendering_rules['renderer'].is_first_fragment is True, \
            "Page should be set to the first fragment."

    def test_get_history_status(self):
        burst_config = BurstConfiguration(self.test_project.id)
        burst_config.start_time = datetime.now()
        dao.store_entity(burst_config)
        burst = dao.get_bursts_for_project(self.test_project.id)
        self.sess_mock['burst_ids'] = '["' + str(burst[0].id) + '"]'

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_BURST_CONFIG, self.session_stored_simulator)
            common.add2session(common.KEY_BURST_CONFIG, burst_config)
            result = self.simulator_controller.get_history_status(**self.sess_mock._data).split(',')

        assert int(result[0][2:]) == burst[0].id, "Incorrect burst was used."
        assert result[1] == ' "running"', "Status should be set to running."
        assert result[2] == ' false', "Burst shouldn't be group."
        assert result[3] == ' ""', "Message should be empty, which means that there shouldn't be any errors."
        assert int(result[4][2:-4]) >= 0, "Running time should be greater than or equal to 0."

    def test_rename_burst(self):
        new_name = "Test Burst Configuration 2"
        operation = TestFactory.create_operation()
        burst_config = TestFactory.store_burst(self.test_project.id, operation)
        burst = dao.get_bursts_for_project(self.test_project.id)
        self.sess_mock['burst_id'] = str(burst[0].id)
        self.sess_mock['burst_name'] = new_name

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_BURST_CONFIG, self.session_stored_simulator)
            common.add2session(common.KEY_BURST_CONFIG, burst_config)
            result = self.simulator_controller.rename_burst(burst[0].id, new_name)

        assert result == '{"success": "Simulation successfully renamed!"}', \
            "Some error happened at renaming, probably because of invalid new name."
        assert dao.get_bursts_for_project(self.test_project.id)[0].name == new_name, "Name wasn't actually changed."

    def test_export(self):
        op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        burst_config = BurstConfiguration(self.test_project.id)
        burst_config.fk_simulation = op.id
        burst_config.simulator_gid = self.session_stored_simulator.gid.hex
        burst_config = dao.store_entity(burst_config)

        storage_path = FilesHelper().get_project_folder(self.test_project, str(op.id))
        h5_path = h5.path_for(storage_path, SimulatorH5, self.session_stored_simulator.gid)
        with SimulatorH5(h5_path) as h5_file:
            h5_file.store(self.session_stored_simulator)

        burst = dao.get_bursts_for_project(self.test_project.id)
        self.sess_mock['burst_id'] = str(burst[0].id)

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_BURST_CONFIG, self.session_stored_simulator)
            common.add2session(common.KEY_BURST_CONFIG, burst_config)
            result = self.simulator_controller.export(str(burst[0].id))

        assert path.exists(result.input.name), "Simulation was not exported!"

    def test_copy_simulator_configuration(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John")

        op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        burst_config = BurstConfiguration(self.test_project.id)
        burst_config.fk_simulation = op.id
        burst_config.name = 'test_burst'
        burst_config.simulator_gid = self.session_stored_simulator.gid.hex
        burst_config = dao.store_entity(burst_config)

        self.sess_mock['burst_id'] = str(burst_config.id)
        self.sess_mock['connectivity'] = connectivity.gid
        self.sess_mock['conduction_speed'] = "3.0"
        self.sess_mock['coupling'] = "Sigmoidal"

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_connectivity(**self.sess_mock._data)
            self.simulator_controller.set_stimulus(**self.sess_mock._data)

        storage_path = FilesHelper().get_project_folder(self.test_project, str(op.id))
        SimulatorSerializer().serialize_simulator(self.session_stored_simulator, None, storage_path)

        with patch('cherrypy.session', self.sess_mock, create=True):
            self.simulator_controller.copy_simulator_configuration(str(burst_config.id))
            is_simulator_load = common.get_from_session(KEY_IS_SIMULATOR_LOAD)
            is_simulator_copy = common.get_from_session(KEY_IS_SIMULATOR_COPY)

        assert not is_simulator_load, "Simulator Load Flag should be True!"
        assert is_simulator_copy, "Simulator Copy Flag should be False!"

    def test_load_burst_only(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John")

        op = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        burst_config = BurstConfiguration(self.test_project.id)
        burst_config.fk_simulation = op.id
        burst_config.simulator_gid = self.session_stored_simulator.gid.hex
        burst_config.name = 'Test_Burst'
        burst_config = dao.store_entity(burst_config)

        self.sess_mock['burst_id'] = str(burst_config.id)
        self.sess_mock['connectivity'] = connectivity.gid
        self.sess_mock['conduction_speed'] = "3.0"
        self.sess_mock['coupling'] = "Sigmoidal"

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.set_connectivity(**self.sess_mock._data)
            self.simulator_controller.set_stimulus(**self.sess_mock._data)

        storage_path = FilesHelper().get_project_folder(self.test_project, str(op.id))
        SimulatorSerializer().serialize_simulator(self.session_stored_simulator, None, storage_path)

        with patch('cherrypy.session', self.sess_mock, create=True):
            self.simulator_controller.load_burst_read_only(str(burst_config.id))
            is_simulator_load = common.get_from_session(KEY_IS_SIMULATOR_LOAD)
            is_simulator_copy = common.get_from_session(KEY_IS_SIMULATOR_COPY)
            last_loaded_form_url = common.get_from_session(KEY_LAST_LOADED_FORM_URL)

        assert is_simulator_load, "Simulator Load Flag should be True!"
        assert not is_simulator_copy, "Simulator Copy Flag should be False!"
        assert last_loaded_form_url == '/burst/setup_pse', "Incorrect last form URL!"

    def test_launch_simulation_with_default_parameters(self):
        self.sess_mock['input_simulation_name_id'] = 'HappySimulation'
        self.sess_mock['simulation_length'] = '10'
        launch_mode = 'new'

        burst_config = BurstConfiguration(self.test_project.id)

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_BURST_CONFIG, burst_config)
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            self.simulator_controller.launch_simulation(launch_mode, **self.sess_mock._data)
コード例 #17
0
class TestFlowController(BaseControllersTest):
    """ Unit tests for FlowController """
    def setup_method(self):
        """
        Sets up the environment for testing;
        creates a `FlowController`
        """
        self.init()
        self.flow_c = FlowController()
        self.burst_c = SimulatorController()
        self.operation_service = OperationService()

    def teardown_method(self):
        """ Cleans up the testing environment """
        self.cleanup()
        self.clean_database()

    @pytest.fixture()
    def long_burst_launch(self, connectivity_factory):
        def build(is_range=False):
            self.burst_c.index()
            connectivity = connectivity_factory()
            launch_params = copy.deepcopy(SIMULATOR_PARAMETERS)
            launch_params['connectivity'] = dao.get_datatype_by_id(
                connectivity.id).gid
            launch_params['simulation_length'] = '10000'
            if is_range:
                launch_params['conduction_speed'] = '[10,15,20]'
                launch_params[RANGE_PARAMETER_1] = 'conduction_speed'
            launch_params = {"simulator_parameters": json.dumps(launch_params)}
            burst_id = json.loads(
                self.burst_c.launch_burst("new", "test_burst",
                                          **launch_params))['id']
            return dao.get_burst_by_id(burst_id)

        return build

    def test_context_selected(self):
        """
        Remove the project from CherryPy session and check that you are redirected to projects page.
        """
        del cherrypy.session[common.KEY_PROJECT]
        self._expect_redirect('/project/viewall', self.flow_c.step_analyzers)

    def test_valid_step(self):
        """
        For all algorithm categories check that a submenu is generated and the result
        page has it's title given by category name.
        """
        result_dict = self.flow_c.step_analyzers()
        assert common.KEY_SUBMENU_LIST in result_dict,\
                        "Expect to have a submenu with available algorithms for category."
        assert result_dict["section_name"] == 'analyze'

    def test_step_connectivity(self):
        """
        Check that the correct section name and connectivity sub-menu are returned for the connectivity step.
        """
        result_dict = self.flow_c.step_connectivity()
        assert result_dict['section_name'] == 'connectivity'
        assert result_dict['submenu_list'] == self.flow_c.connectivity_submenu

    def test_default(self):
        """
        Test default method from step controllers. Check that the submit link is ok, that a mainContent
        is present in result dict and that the isAdapter flag is set to true.
        """
        cherrypy.request.method = "GET"
        categories = dao.get_algorithm_categories()
        for categ in categories:
            algo_groups = dao.get_adapters_from_categories([categ.id])
            for algo in algo_groups:
                result_dict = self.flow_c.default(categ.id, algo.id)
                assert result_dict[common.KEY_SUBMIT_LINK] == '/flow/%i/%i' % (
                    categ.id, algo.id)
                assert 'mainContent' in result_dict
                assert result_dict['isAdapter']

    def test_default_cancel(self):
        """
        On cancel we should get a redirect to the back page link.
        """
        cherrypy.request.method = "POST"
        categories = dao.get_algorithm_categories()
        algo_groups = dao.get_adapters_from_categories([categories[0].id])
        self._expect_redirect('/project/viewoperations/%i' %
                              self.test_project.id,
                              self.flow_c.default,
                              categories[0].id,
                              algo_groups[0].id,
                              cancel=True,
                              back_page='operations')

    def test_default_invalid_key(self):
        """
        Pass invalid keys for adapter and step and check you get redirect to tvb entry
        page with error set.
        """
        self._expect_redirect('/tvb?error=True', self.flow_c.default,
                              'invalid', 'invalid')

    def test_read_datatype_attribute(self, dummy_datatype_index_factory):
        """
        Read an attribute from a datatype.
        """
        dt = dummy_datatype_index_factory(row1='This is stored data')
        dt.subject = "test_subject"
        dt.state = "RAW_STATE"

        returned_data = self.flow_c.read_datatype_attribute(dt.gid, "row1")
        assert returned_data == '"This is stored data"'

    def test_read_datatype_attribute_method_call(self,
                                                 dummy_datatype_index_factory):
        """
        Call method on given datatype.
        """
        dt = dummy_datatype_index_factory(row1='This is stored data')
        args = {'length': 101}
        returned_data = self.flow_c.read_datatype_attribute(
            dt.gid, 'return_test_data', **args)
        assert returned_data.replace('"', '') == " ".join(
            str(x) for x in range(101))

    def test_get_simple_adapter_interface(self, test_adapter_factory):
        test_adapter_factory()
        form = TestAdapter1Form()
        adapter = TestFactory.create_adapter(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
        algo = adapter.stored_adapter
        adapter.submit_form(form)
        result = self.flow_c.get_simple_adapter_interface(algo.id)
        expected_interface = adapter.get_form()
        assert type(result['form']) == type(expected_interface)
        assert result[
            'form'].test1_val1.value == expected_interface.test1_val1.value
        assert result[
            'form'].test1_val2.value == expected_interface.test1_val2.value

    def _wait_for_burst_ops(self, burst_config):
        """ sleeps until some operation of the burst is created"""
        waited = 1
        timeout = 50
        operations = dao.get_operations_in_burst(burst_config.id)
        while not len(operations) and waited <= timeout:
            sleep(1)
            waited += 1
            operations = dao.get_operations_in_burst(burst_config.id)
        operations = dao.get_operations_in_burst(burst_config.id)
        return operations

    def test_stop_burst_operation(self, long_burst_launch):
        burst_config = long_burst_launch()
        operation = self._wait_for_burst_ops(burst_config)[0]
        assert not operation.has_finished
        self.flow_c.stop_burst_operation(operation.id, 0, False)
        operation = dao.get_operation_by_id(operation.id)
        assert operation.status == STATUS_CANCELED

    def test_stop_burst_operation_group(self, long_burst_launch):
        burst_config = long_burst_launch(True)
        operations = self._wait_for_burst_ops(burst_config)
        operations_group_id = 0
        for operation in operations:
            assert not operation.has_finished
            operations_group_id = operation.fk_operation_group
        self.flow_c.stop_burst_operation(operations_group_id, 1, False)
        for operation in operations:
            operation = dao.get_operation_by_id(operation.id)
            assert operation.status == STATUS_CANCELED

    def test_remove_burst_operation(self, long_burst_launch):
        burst_config = long_burst_launch()
        operation = self._wait_for_burst_ops(burst_config)[0]
        assert not operation.has_finished
        self.flow_c.stop_burst_operation(operation.id, 0, True)
        operation = dao.try_get_operation_by_id(operation.id)
        assert operation is None

    def test_remove_burst_operation_group(self, long_burst_launch):
        burst_config = long_burst_launch(True)
        operations = self._wait_for_burst_ops(burst_config)
        operations_group_id = 0
        for operation in operations:
            assert not operation.has_finished
            operations_group_id = operation.fk_operation_group
        self.flow_c.stop_burst_operation(operations_group_id, 1, True)
        for operation in operations:
            operation = dao.try_get_operation_by_id(operation.id)
            assert operation is None

    def _launch_test_algo_on_cluster(self, **data):
        adapter = TestFactory.create_adapter(
            'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
        algo = adapter.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)
        operations, _ = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project, algo, algo_category, {},
            **data)
        self.operation_service._send_to_cluster(operations, adapter)
        return operations

    def test_stop_operations(self):
        data = {"test1_val1": 5, 'test1_val2': 5}
        operations = self._launch_test_algo_on_cluster(**data)
        operation = dao.get_operation_by_id(operations[0].id)
        assert not operation.has_finished
        self.flow_c.stop_operation(operation.id, 0, False)
        operation = dao.get_operation_by_id(operation.id)
        assert operation.status == STATUS_CANCELED

    def test_stop_operations_group(self):
        data = {
            RANGE_PARAMETER_1: "test1_val1",
            "test1_val1": '5,6,7',
            'test1_val2': 5
        }
        operations = self._launch_test_algo_on_cluster(**data)
        operation_group_id = 0
        for operation in operations:
            operation = dao.get_operation_by_id(operation.id)
            assert not operation.has_finished
            operation_group_id = operation.fk_operation_group
        self.flow_c.stop_operation(operation_group_id, 1, False)
        for operation in operations:
            operation = dao.get_operation_by_id(operation.id)
            assert operation.status == STATUS_CANCELED