def test_build_adapter_invalid(self):
     """
     Test flow for trying to build an adapter that does not inherit from ABCAdapter.
     """
     group = dao.get_algorithm_by_module(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_INVALID_CLASS)
     with pytest.raises(IntrospectionException):
         ABCAdapter.build_adapter(group)
Esempio n. 2
0
    def build(test_user=None,
              test_project=None,
              is_simulation=False,
              store_vm=False,
              operation_status=STATUS_FINISHED,
              range_values=None,
              conn_gid=None):
        """
        Create persisted operation with a ViewModel stored
        :return: Operation entity after persistence.
        """
        if test_user is None:
            test_user = user_factory()
        if test_project is None:
            test_project = project_factory(test_user)

        vm_gid = uuid.uuid4()
        view_model = None

        if is_simulation:
            algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE,
                                                    SIMULATOR_CLASS)
            if store_vm:
                adapter = ABCAdapter.build_adapter(algorithm)
                view_model = adapter.get_view_model_class()()
                view_model.connectivity = connectivity_factory(
                    4).gid if conn_gid is None else conn_gid
                vm_gid = view_model.gid

        else:
            algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                    TVB_IMPORTER_CLASS)
            if store_vm:
                adapter = ABCAdapter.build_adapter(algorithm)
                view_model = adapter.get_view_model_class()()
                view_model.data_file = "."
                vm_gid = view_model.gid

        operation = Operation(vm_gid.hex,
                              test_user.id,
                              test_project.id,
                              algorithm.id,
                              status=operation_status,
                              range_values=range_values)
        dao.store_entity(operation)

        if store_vm:
            op_folder = FilesHelper().get_project_folder(
                test_project, str(operation.id))
            h5.store_view_model(view_model, op_folder)

        # Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
Esempio n. 3
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a list of BCT adapters;
        imports a CFF data-set
        """
        self.test_user = TestFactory.create_user("BCT_User")
        self.test_project = TestFactory.create_project(self.test_user,
                                                       "BCT-Project")
        ### Make sure Connectivity is in DB
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = dao.get_generic_entity(Connectivity, 'John Doe',
                                                   'subject')[0]

        # make weights matrix symmetric, or else some BCT algorithms will run infinitely:
        w = self.connectivity.weights
        self.connectivity.weights = w + w.T - numpy.diag(w.diagonal())

        algorithms = dao.get_generic_entity(model.Algorithm,
                                            'Brain Connectivity Toolbox',
                                            'group_description')
        self.assertTrue(algorithms is not None)
        self.assertTrue(len(algorithms) > 5)

        self.bct_adapters = []
        for algo in algorithms:
            self.bct_adapters.append(ABCAdapter.build_adapter(algo))
    def get_creator_and_interface(self,
                                  creator_module,
                                  creator_class,
                                  datatype_instance,
                                  lock_midpoint_for_eq=None):
        """
        Returns a Tuple: a creator instance and a dictionary for the creator interface.
        The interface is prepared for rendering, it is populated with existent data, in case of a
        parameter of type DataType. The name of the attributes are also prefixed to identify groups.
        """
        algorithm = self.flow_service.get_algorithm_by_module_and_class(
            creator_module, creator_class)

        # We can't use 'build_adapter_from_class' from flow service
        # because the selects that display dataTypes will also have the 'All' entry.
        datatype_instance.trait.bound = traited_interface.INTERFACE_ATTRIBUTES_ONLY
        input_list = datatype_instance.interface[
            traited_interface.INTERFACE_ATTRIBUTES]
        if lock_midpoint_for_eq is not None:
            for idx in lock_midpoint_for_eq:
                input_list[idx] = self._lock_midpoints(input_list[idx])
        category = self.flow_service.get_visualisers_category()
        itree_mngr = self.flow_service.input_tree_manager
        input_list = itree_mngr.fill_input_tree_with_options(
            input_list,
            common.get_current_project().id, category.id)
        input_list = itree_mngr.prepare_param_names(input_list)

        return ABCAdapter.build_adapter(algorithm), input_list
    def get_series_array_discrete(self, datatype_group_gid, backPage, color_metric=None, size_metric=None):
        """
        Create new data for when the user chooses to refresh from the UI.
        """
        if color_metric == 'None':
            color_metric = None
        if size_metric == 'None':
            size_metric = None

        algorithm = self.flow_service.get_algorithm_by_module_and_class(DISCRETE_PSE_ADAPTER_MODULE,
                                                                        DISCRETE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                pse_context = adapter.prepare_parameters(datatype_group_gid, backPage, color_metric, size_metric)
                return dict(series_array=pse_context.series_array,
                            has_started_ops=pse_context.has_started_ops)
            except LaunchException as ex:
                error_msg = urllib.quote(ex.message)
        else:
            error_msg = urllib.quote(
                "Discrete PSE is incompatible (most probably due to result size being too large).")

        name = urllib.quote(adapter._ui_name)
        raise cherrypy.HTTPRedirect(REDIRECT_MSG % (name, error_msg))
    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group("tvb.adapters.uploaders.region_mapping_importer", "RegionMapping_Importer")
        importer = ABCAdapter.build_adapter(group)

        args = {
            "mapping_file": import_file_path,
            "surface": surface_gid,
            "connectivity": connectivity_gid,
            DataTypeMetaData.KEY_SUBJECT: "test",
        }

        now = datetime.datetime.now()

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(fields=[FilterChain.datatype + ".create_date"], operations=[">"], values=[now])
        region_mapping = self._get_entity(RegionMapping(), data_filter)

        return region_mapping
    def post(self, project_gid, algorithm_module, algorithm_classname):
        """
        :generic method of launching Analyzers
        """
        model_file = self.extract_file_from_request()
        destination_folder = RestResource.get_destination_folder()
        h5_path = RestResource.save_temporary_file(model_file,
                                                   destination_folder)

        try:
            project = self.project_service.find_project_lazy_by_gid(
                project_gid)
        except ProjectServiceException:
            raise InvalidIdentifierException(INVALID_PROJECT_GID_MESSAGE %
                                             project_gid)

        algorithm = FlowService.get_algorithm_by_module_and_class(
            algorithm_module, algorithm_classname)
        if algorithm is None:
            raise InvalidIdentifierException(
                'No algorithm found for: %s.%s' %
                (algorithm_module, algorithm_classname))

        try:
            adapter_instance = ABCAdapter.build_adapter(algorithm)
            view_model = adapter_instance.get_view_model_class()()

            view_model_h5 = ViewModelH5(h5_path, view_model)
            view_model_gid = view_model_h5.gid.load()

            # TODO: use logged user
            user_id = project.fk_admin
            operation = self.operation_service.prepare_operation(
                user_id, project.id, algorithm.id,
                algorithm.algorithm_category, view_model_gid.hex, None, {})
            storage_path = self.files_helper.get_project_folder(
                project, str(operation.id))

            if isinstance(adapter_instance, ABCUploader):

                for key, value in adapter_instance.get_form_class(
                ).get_upload_information().items():
                    data_file = self.extract_file_from_request(
                        file_name=key, file_extension=value)
                    data_file_path = RestResource.save_temporary_file(
                        data_file, destination_folder)
                    file_name = os.path.basename(data_file_path)
                    upload_field = getattr(view_model_h5, key)
                    upload_field.store(os.path.join(storage_path, file_name))
                    shutil.move(data_file_path, storage_path)

            shutil.move(h5_path, storage_path)
            os.rmdir(destination_folder)
            view_model_h5.close()
            OperationService().launch_operation(operation.id, True)
        except Exception as excep:
            self.logger.error(excep, exc_info=True)
            raise ServiceException(str(excep))

        return operation.gid, HTTP_STATUS_CREATED
    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {DataTypeMetaData.KEY_SUBJECT: "",
                              DataTypeMetaData.KEY_STATE: "RAW"}

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)
        self.assertEqual(1, len(data_types), "Project should contain only one data type = Sensors.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "Sensors instance should not be none")

        return time_series
Esempio n. 9
0
 def test_happy_flow_surface_import(self):
     dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                    ProjectionSurfaceEEG())
     group = dao.find_group(
         'tvb.adapters.uploaders.projection_matrix_importer',
         'ProjectionMatrixSurfaceEEGImporter')
     importer = ABCAdapter.build_adapter(group)
     importer.meta_data = {
         DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
         DataTypeMetaData.KEY_STATE: "RAW"
     }
     zip_path = os.path.join(
         os.path.abspath(os.path.dirname(dataset.__file__)),
         'region_conn_74_eeg_1020_62.mat')
     args = {
         'projection_file': zip_path,
         'dataset_name': 'ProjectionMatrix',
         'connectivity': self.connectivity.gid,
         'sensors': self.sensors.gid,
         'surface': self.surface.gid
     }
     FlowService().fire_operation(importer, self.test_user,
                                  self.test_project.id, **args)
     dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                   ProjectionRegionEEG())
     self.assertTrue(dt_count_after == dt_count_before + 1)
Esempio n. 10
0
    def parse_event_node(self):
        """
        Parse the stored event node to get required data and arguments.
        """
        kw_parameters = {}
        for one_arg in self.event_node.childNodes:
            if one_arg.nodeType != Node.ELEMENT_NODE:
                continue
            if one_arg.nodeName == ELEM_ADAPTER:
                #TODO: so far there is no need for it, but we should maybe
                #handle cases where same module/class but different init parameter
                group = dao.find_group(one_arg.getAttribute(ATT_MODULE), one_arg.getAttribute(ATT_CLASS))
                adapter = ABCAdapter.build_adapter(group)
                result_uid = one_arg.getAttribute(ATT_UID)
                if result_uid:
                    kw_parameters[ATT_UID] = result_uid
                LOGGER.debug("Adapter used is %s", str(adapter.__class__))
                self.callable_object = adapter
                continue
            if one_arg.nodeName == ELEM_METHOD:
                self.call_method = one_arg.getAttribute(ATT_NAME)
                if one_arg.getAttribute(ATT_OPERATION_HIDDEN):
                    self.operation_visible = False
                continue
            if one_arg.nodeName == ELEM_ARGS:
                kw_parameters.update(_parse_arguments(one_arg))
                continue
            LOGGER.info("Ignored undefined node %s", str(one_arg.nodeName))

        self.arguments.update(kw_parameters)
Esempio n. 11
0
    def _prepare_pse_context(self, datatype_group_gid, back_page, color_metric,
                             size_metric, is_refresh):

        if color_metric == 'None' or color_metric == "undefined":
            color_metric = None
        if size_metric == 'None' or size_metric == "undefined":
            size_metric = None

        algorithm = self.algorithm_service.get_algorithm_by_module_and_class(
            IntrospectionRegistry.DISCRETE_PSE_ADAPTER_MODULE,
            IntrospectionRegistry.DISCRETE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)

        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                pse_context = adapter.prepare_parameters(
                    datatype_group_gid, back_page, color_metric, size_metric)
                if is_refresh:
                    return dict(series_array=pse_context.series_array,
                                has_started_ops=pse_context.has_started_ops)
                else:
                    pse_context.prepare_individual_jsons()
                    return pse_context
            except LaunchException as ex:
                error_msg = urllib.parse.quote(ex.message)
        else:
            error_msg = urllib.parse.quote(
                "Discrete PSE is incompatible (most probably due to result size being too large)."
            )

        name = urllib.parse.quote(adapter._ui_name)
        raise LaunchException(REDIRECT_MSG % (name, error_msg))
 def test_build_adapter_instance(self):
     """
     Test standard flow for building an adapter instance.
     """
     algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
     adapter = ABCAdapter.build_adapter(algo_group)
     self.assertTrue(isinstance(adapter, ABCSynchronous), "Something went wrong with valid data!")
Esempio n. 13
0
    def draw_discrete_exploration(self,
                                  datatype_group_gid,
                                  back_page,
                                  color_metric=None,
                                  size_metric=None):
        """
        Create new data for when the user chooses to refresh from the UI.
        """
        if color_metric == 'None' or color_metric == "undefined":
            color_metric = None
        if size_metric == 'None' or size_metric == "undefined":
            size_metric = None

        algorithm = self.flow_service.get_algorithm_by_module_and_class(
            DISCRETE_PSE_ADAPTER_MODULE, DISCRETE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                pse_context = adapter.prepare_parameters(
                    datatype_group_gid, back_page, color_metric, size_metric)
                pse_context.prepare_individual_jsons()
                return pse_context
            except LaunchException as ex:
                error_msg = urllib.quote(ex.message)
        else:
            error_msg = urllib.quote(
                "Discrete PSE is incompatible (most probably due to result size being too large)."
            )

        name = urllib.quote(adapter._ui_name)
        raise cherrypy.HTTPRedirect(REDIRECT_MSG % (name, error_msg))
Esempio n. 14
0
    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group(
            'tvb.adapters.uploaders.region_mapping_importer',
            'RegionMapping_Importer')
        importer = ABCAdapter.build_adapter(group)

        args = {
            'mapping_file': import_file_path,
            'surface': surface_gid,
            'connectivity': connectivity_gid,
            DataTypeMetaData.KEY_SUBJECT: "test"
        }

        now = datetime.datetime.now()

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(
            fields=[FilterChain.datatype + ".create_date"],
            operations=[">"],
            values=[now])
        region_mapping = self._get_entity(RegionMapping(), data_filter)

        return region_mapping
Esempio n. 15
0
    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return review_operation_inputs_from_adapter(adapter, operation)

        except Exception:
            self.logger.exception("Could not load details for operation %s" %
                                  operation_gid)
            parameters = json.loads(operation.parameters)
            if 'gid' in parameters.keys():
                changed_parameters = dict(
                    Warning=
                    "Algorithm changed dramatically. We can not offer more details"
                )
            else:
                changed_parameters = dict(
                    Warning=
                    "GID parameter is missing. Old implementation of the operation."
                )
            return [], changed_parameters
Esempio n. 16
0
 def launch_visualization(visualization, frame_width=None, frame_height=None, 
                          method_name=ABCAdapter.LAUNCH_METHOD, is_preview=True):
     """
     :param visualization: a visualization workflow step
     """
     dynamic_params = visualization.dynamic_param
     static_params = visualization.static_param
     parameters_dict = static_params
     current_project_id = 0
     ## Current operation id needed for export mechanism. So far just use ##
     ## the operation of the workflow_step from which the inputs are taken    ####
     for param in dynamic_params:
         step_index = dynamic_params[param][WorkflowStepConfiguration.STEP_INDEX_KEY]
         datatype_index = dynamic_params[param][WorkflowStepConfiguration.DATATYPE_INDEX_KEY]
         referred_workflow_step = dao.get_workflow_step_by_step_index(visualization.fk_workflow, step_index)
         referred_operation_id = referred_workflow_step.fk_operation
         referred_operation = dao.get_operation_by_id(referred_operation_id)
         current_project_id = referred_operation.fk_launched_in
         if type(datatype_index) is IntType:
             ## Entry is the output of a previous step ##
             datatypes = dao.get_results_for_operation(referred_operation_id)
             parameters_dict[param] = datatypes[datatype_index].gid
         else:
             ## Entry is the input of a previous step ###
             parameters_dict[param] = json.loads(referred_operation.parameters)[datatype_index]
     algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm)
     adapter_instance = ABCAdapter.build_adapter(algorithm.algo_group)
     adapter_instance.current_project_id = current_project_id
     prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict)
     if frame_width is not None:
         prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width, frame_height)
     if isinstance(adapter_instance, ABCMPLH5Displayer) and is_preview is True:
         prepared_inputs[ABCMPLH5Displayer.SHOW_FULL_TOOLBAR] = False
     result = eval("adapter_instance." + method_name + "(**prepared_inputs)")
     return result, parameters_dict
    def execute_post(self, project_id, submit_url, step_key, algorithm, **data):
        """ Execute HTTP POST on a generic step."""
        errors = None
        adapter_instance = ABCAdapter.build_adapter(algorithm)

        try:
            result = self.flow_service.fire_operation(adapter_instance, common.get_logged_user(), project_id, **data)

            # Store input data in session, for informing user of it.
            step = self.flow_service.get_category_by_id(step_key)
            if not step.rawinput:
                self.context.add_adapter_to_session(None, None, copy.deepcopy(data))

            if isinstance(adapter_instance, ABCDisplayer):
                if isinstance(result, dict):
                    result[common.KEY_OPERATION_ID] = adapter_instance.operation_id
                    return result
                else:
                    common.set_error_message("Invalid result returned from Displayer! Dictionary is expected!")
            else:
                if isinstance(result, list):
                    result = "Launched %s operations." % len(result)
                common.set_important_message(str(result))
        except formencode.Invalid, excep:
            errors = excep.unpack_errors()
Esempio n. 18
0
    def launch_operation(self,
                         operation_id,
                         send_to_cluster=False,
                         adapter_instance=None):
        """
        Method exposed for Burst-Workflow related calls.
        It is used for cascading operation in the same workflow.
        """
        if operation_id is not None:
            operation = dao.get_operation_by_id(operation_id)
            if adapter_instance is None:
                algorithm = operation.algorithm
                adapter_instance = ABCAdapter.build_adapter(algorithm)
            parsed_params = utils.parse_json_parameters(operation.parameters)
            if not 'SimulatorAdapter' in adapter_instance.__class__.__name__:
                adapter_form = adapter_instance.get_form()()
                adapter_form.fill_from_post(parsed_params)
                adapter_instance.submit_form(adapter_form)

            if send_to_cluster:
                self._send_to_cluster([operation], adapter_instance,
                                      operation.user.username)
            else:
                self.initiate_prelaunch(operation, adapter_instance,
                                        **parsed_params)
Esempio n. 19
0
def fire_simulation(project_id=1, **kwargs):
    project = dao.get_project_by_id(project_id)
    flow_service = FlowService()

    # below the holy procedure to launch with the correct parameters taken from the defaults
    stored_adapter = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
    simulator_adapter = ABCAdapter.build_adapter(stored_adapter)
    flatten_interface = simulator_adapter.flaten_input_interface()
    itree_mngr = flow_service.input_tree_manager
    prepared_flatten_interface = itree_mngr.fill_input_tree_with_options(flatten_interface, project.id,
                                                                         stored_adapter.fk_category)
    launch_args = {}
    for entry in prepared_flatten_interface:
        value = entry['default']
        if isinstance(value, dict):
            value = str(value)
        if hasattr(value, 'tolist'):
            value = value.tolist()
        launch_args[entry['name']] = value
    launch_args.update(**kwargs)
    # end of magic

    launched_operation = flow_service.fire_operation(simulator_adapter, project.administrator,
                                                     project.id, **launch_args)[0]
    return launched_operation
Esempio n. 20
0
 def import_sensors(user, project, zip_path, sensors_type):
     ### Retrieve Adapter instance 
     group = dao.find_group('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
     importer = ABCAdapter.build_adapter(group)
     args = {'sensors_file': zip_path, 'sensors_type': sensors_type}
     ### Launch Operation
     FlowService().fire_operation(importer, user, project.id, **args)
Esempio n. 21
0
 def create_adapter(module='tvb.tests.framework.adapters.ndimensionarrayadapter',
                    class_name='NDimensionArrayAdapter'):
     """
     :returns: Adapter Class after initialization.
     """
     algorithm = dao.get_algorithm_by_module(module, class_name )
     return ABCAdapter.build_adapter(algorithm)
Esempio n. 22
0
    def execute_post(self, project_id, submit_url, step_key, algorithm,
                     **data):
        """ Execute HTTP POST on a generic step."""
        errors = None
        adapter_instance = ABCAdapter.build_adapter(algorithm)

        try:
            form = adapter_instance.get_form()(project_id=project_id)
            form.fill_from_post(data)
            dt_dict = None
            if form.validate():
                dt_dict = form.get_dict()
            if dt_dict is None:
                raise formencode.Invalid(
                    "Could not build a dict out of this form!", {},
                    None,
                    error_dict=form.get_errors_dict())
            adapter_instance.submit_form(form)
            result = self.flow_service.fire_operation(adapter_instance,
                                                      common.get_logged_user(),
                                                      project_id, **dt_dict)

            # Store input data in session, for informing user of it.
            step = self.flow_service.get_category_by_id(step_key)
            if not step.rawinput:
                self.context.add_adapter_to_session(None, None,
                                                    copy.deepcopy(data))

            if isinstance(adapter_instance, ABCDisplayer):
                if isinstance(result, dict):
                    result[common.
                           KEY_OPERATION_ID] = adapter_instance.operation_id
                    return result
                else:
                    common.set_error_message(
                        "Invalid result returned from Displayer! Dictionary is expected!"
                    )
            else:
                if isinstance(result, list):
                    result = "Launched %s operations." % len(result)
                common.set_important_message(str(result))
        except formencode.Invalid as excep:
            errors = excep.unpack_errors()
            common.set_error_message("Invalid form inputs")
            self.logger.warning("Invalid form inputs %s" % errors)
        except OperationException as excep1:
            self.logger.exception("Error while executing a Launch procedure:" +
                                  excep1.message)
            common.set_error_message(excep1.message)

        previous_step = self.context.get_current_substep()
        should_reset = previous_step is None or data.get(
            common.KEY_ADAPTER) != previous_step
        template_specification = self.get_template_for_adapter(
            project_id, step_key, algorithm, submit_url, should_reset)
        if (errors is not None) and (template_specification is not None):
            template_specification[common.KEY_ERRORS] = errors
        template_specification[
            common.KEY_OPERATION_ID] = adapter_instance.operation_id
        return template_specification
Esempio n. 23
0
    def import_cff(cff_path=None, test_user=None, test_project=None):
        """
        This method is used for importing a CFF data-set (load CFF_Importer, launch it).
        :param cff_path: absolute path where CFF file exists. When None, a default CFF will be used.
        :param test_user: optional persisted User instance, to use as Operation->launcher
        :param test_project: optional persisted Project instance, to use for launching Operation in it. 
        """
        ### Prepare Data
        if cff_path is None:
            cff_path = os.path.join(os.path.dirname(cff_dataset.__file__),
                                    'connectivities.cff')
        if test_user is None:
            test_user = TestFactory.create_user()
        if test_project is None:
            test_project = TestFactory.create_project(test_user)

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.cff_importer',
                               'CFF_Importer')
        importer = ABCAdapter.build_adapter(group)
        args = {
            'cff': cff_path,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }

        ### Launch Operation
        FlowService().fire_operation(importer, test_user, test_project.id,
                                     **args)
Esempio n. 24
0
 def import_sensors(user, project, zip_path, sensors_type):
     ### Retrieve Adapter instance 
     group = dao.find_group('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
     importer = ABCAdapter.build_adapter(group)
     args = {'sensors_file': zip_path, 'sensors_type': sensors_type}
     ### Launch Operation
     FlowService().fire_operation(importer, user, project.id, **args)
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    LOGGER = get_logger('tvb.core.operation_async_launcher')

    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        stored_adapter = curent_operation.algorithm
        LOGGER.debug("Importing Algorithm: " + str(stored_adapter.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(stored_adapter)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation,
                                              adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception, excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst,
                                                  error_message=str(excep))
    def test_happy_flow_surface_import(self):
        """
        Verifies the happy flow for importing a surface.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project, ProjectionSurfaceEEG())
        group = dao.find_group(
            "tvb.adapters.uploaders.projection_matrix_importer", "ProjectionMatrixSurfaceEEGImporter"
        )
        importer = ABCAdapter.build_adapter(group)

        file_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)), "projection_eeg_65_surface_16k.npy"
        )
        args = {
            "projection_file": file_path,
            "dataset_name": "ProjectionMatrix",
            "sensors": self.sensors.gid,
            "surface": self.surface.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
        }

        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project, ProjectionSurfaceEEG())

        self.assertEqual(dt_count_before + 1, dt_count_after)
    def _importSurface(self, import_file_path=None):
        """
        This method is used for importing data in GIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.gifti_surface_importer',
                               'GIFTISurfaceImporter')
        importer = ABCAdapter.build_adapter(group)

        args = {
            'data_file': import_file_path,
            DataTypeMetaData.KEY_SUBJECT: ""
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        surface = CorticalSurface()
        data_types = FlowService().get_available_datatypes(
            self.test_project.id, surface.module + "." + surface.type)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "TimeSeries should not be none")

        return surface
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                       Connectivity())
        group = dao.find_group(
            'tvb.adapters.uploaders.zip_connectivity_importer',
            'ZIPConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
            DataTypeMetaData.KEY_STATE: "RAW"
        }
        zip_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)),
            'connectivity_regions_96.zip')
        args = {'uploaded': zip_path}

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                      Connectivity())
        self.assertTrue(dt_count_after == dt_count_before + 1)
Esempio n. 29
0
    def execute_post(self, project_id, submit_url, step_key, algorithm,
                     **data):
        """ Execute HTTP POST on a generic step."""
        errors = None
        adapter_instance = ABCAdapter.build_adapter(algorithm)

        try:
            result = self.flow_service.fire_operation(adapter_instance,
                                                      common.get_logged_user(),
                                                      project_id, **data)

            # Store input data in session, for informing user of it.
            step = self.flow_service.get_category_by_id(step_key)
            if not step.rawinput:
                self.context.add_adapter_to_session(None, None,
                                                    copy.deepcopy(data))

            if isinstance(adapter_instance, ABCDisplayer):
                if isinstance(result, dict):
                    result[common.
                           KEY_OPERATION_ID] = adapter_instance.operation_id
                    return result
                else:
                    common.set_error_message(
                        "Invalid result returned from Displayer! Dictionary is expected!"
                    )
            else:
                if isinstance(result, list):
                    result = "Launched %s operations." % len(result)
                common.set_important_message(str(result))
        except formencode.Invalid, excep:
            errors = excep.unpack_errors()
    def _import(self, import_file_path=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.nifti_importer',
                               'NIFTIImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: "",
            DataTypeMetaData.KEY_STATE: "RAW"
        }

        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        time_series = TimeSeries()
        data_types = FlowService().get_available_datatypes(
            self.test_project.id, time_series.module + "." + time_series.type)
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None,
                        "TimeSeries should not be none")

        return time_series
    def get_series_array_discrete(self,
                                  datatype_group_gid,
                                  backPage,
                                  color_metric=None,
                                  size_metric=None):
        """
        Create new data for when the user chooses to refresh from the UI.
        """
        if color_metric == 'None':
            color_metric = None
        if size_metric == 'None':
            size_metric = None

        algorithm = self.flow_service.get_algorithm_by_module_and_class(
            IntrospectionRegistry.DISCRETE_PSE_ADAPTER_MODULE,
            IntrospectionRegistry.DISCRETE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                pse_context = adapter.prepare_parameters(
                    datatype_group_gid, backPage, color_metric, size_metric)
                return dict(series_array=pse_context.series_array,
                            has_started_ops=pse_context.has_started_ops)
            except LaunchException as ex:
                error_msg = urllib.parse.quote(ex.message)
        else:
            error_msg = urllib.parse.quote(
                "Discrete PSE is incompatible (most probably due to result size being too large)."
            )

        name = urllib.parse.quote(adapter._ui_name)
        raise cherrypy.HTTPRedirect(REDIRECT_MSG % (name, error_msg))
Esempio n. 32
0
    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a list of BCT adapters;
        imports a CFF data-set
        """
        self.test_user = TestFactory.create_user("BCT_User")
        self.test_project = TestFactory.create_project(self.test_user,
                                                       "BCT-Project")
        # Make sure Connectivity is in DB
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        conn_index = TestFactory.import_zip_connectivity(
            self.test_user, self.test_project, zip_path)
        self.connectivity = ABCAdapter.load_traited_by_gid(conn_index.gid)

        # make weights matrix symmetric, or else some BCT algorithms will run infinitely:
        w = self.connectivity.weights
        self.connectivity.weights = w + w.T - numpy.diag(w.diagonal())

        algorithms = dao.get_generic_entity(Algorithm,
                                            'Brain Connectivity Toolbox',
                                            'group_description')
        assert algorithms is not None
        assert len(algorithms) > 5

        self.bct_adapters = []
        for algo in algorithms:
            self.bct_adapters.append(ABCAdapter.build_adapter(algo))
Esempio n. 33
0
    def test_datatypes_groups(self, test_adapter_factory):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        # TODO: re-write this to use groups correctly
        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 0, "There should be no operation"

        algo = test_adapter_factory(TestAdapter3)
        adapter_instance = ABCAdapter.build_adapter(algo)
        data = {model_burst.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        FlowService().fire_operation(adapter_instance, self.test_user, self.test_project.id)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 1, "Expected one operation group"
        assert all_operations[0][2] == 2, "Expected 2 operations in group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
    def test_wrong_shape(self):
        """
        Verifies that importing a different shape throws exception
        """
        group = dao.find_group(
            "tvb.adapters.uploaders.projection_matrix_importer", "ProjectionMatrixSurfaceEEGImporter"
        )
        importer = ABCAdapter.build_adapter(group)

        file_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)), "projection_eeg_62_surface_16k.mat"
        )
        args = {
            "projection_file": file_path,
            "dataset_name": "ProjectionMatrix",
            "sensors": self.sensors.gid,
            "surface": self.surface.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
        }

        try:
            FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
            self.fail("This was expected not to run! 62 rows in proj matrix, but 65 sensors")
        except OperationException:
            pass
Esempio n. 35
0
    def create_operation(test_user=None,
                         test_project=None,
                         operation_status=STATUS_FINISHED):
        """
        Create persisted operation.
        :return: Operation entity after persistence.
        """
        if test_user is None:
            test_user = TestFactory.create_user()
        if test_project is None:
            test_project = TestFactory.create_project(test_user)

        algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                TVB_IMPORTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        view_model = adapter.get_view_model_class()()
        view_model.data_file = "."
        operation = Operation(view_model.gid.hex,
                              test_user.id,
                              test_project.id,
                              algorithm.id,
                              status=operation_status)
        dao.store_entity(operation)
        op_dir = StorageInterface().get_project_folder(test_project.name,
                                                       str(operation.id))
        h5.store_view_model(view_model, op_dir)
        return dao.get_operation_by_id(operation.id)
    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        assert count == 0

        group = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter',
                                            'NDimensionArrayAdapter')
        adapter_instance = ABCAdapter.build_adapter(group)
        data = {'param_1': 'some value'}
        # create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        assert count == 1

        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        assert count == 2

        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        assert count == 3

        return array_wrappers
Esempio n. 37
0
    def test_happy_flow_region_import(self):
        """
        Verifies the happy flow for importing a region.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                       ProjectionRegionEEG())
        group = dao.find_group(
            'tvb.adapters.uploaders.projection_matrix_importer',
            'ProjectionMatrixRegionEEGImporter')
        importer = ABCAdapter.build_adapter(group)

        zip_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)),
            'region_conn_74_eeg_1020_62.mat')
        args = {
            'projection_file': zip_path,
            'dataset_name': 'ProjectionMatrix',
            'connectivity': self.connectivity.gid,
            'sensors': self.sensors.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }

        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                      ProjectionRegionEEG())

        self.assertEqual(dt_count_before + 1, dt_count_after)
Esempio n. 38
0
    def parse_event_node(self):
        """
        Parse the stored event node to get required data and arguments.
        """
        kw_parameters = {}
        for one_arg in self.event_node.childNodes:
            if one_arg.nodeType != Node.ELEMENT_NODE:
                continue
            if one_arg.nodeName == ELEM_ADAPTER:
                #TODO: so far there is no need for it, but we should maybe
                #handle cases where same module/class but different init parameter
                group = dao.find_group(one_arg.getAttribute(ATT_MODULE),
                                       one_arg.getAttribute(ATT_CLASS))
                adapter = ABCAdapter.build_adapter(group)
                result_uid = one_arg.getAttribute(ATT_UID)
                if result_uid:
                    kw_parameters[ATT_UID] = result_uid
                LOGGER.debug("Adapter used is %s", str(adapter.__class__))
                self.callable_object = adapter
                continue
            if one_arg.nodeName == ELEM_METHOD:
                self.call_method = one_arg.getAttribute(ATT_NAME)
                if one_arg.getAttribute(ATT_OPERATION_HIDDEN):
                    self.operation_visible = False
                continue
            if one_arg.nodeName == ELEM_ARGS:
                kw_parameters.update(_parse_arguments(one_arg))
                continue
            LOGGER.info("Ignored undefined node %s", str(one_arg.nodeName))

        self.arguments.update(kw_parameters)
    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 0)
        
        group = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
        adapter_instance = ABCAdapter.build_adapter(group)
        data = {'param_1': 'some value'}
        #create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 1)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 2)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(count, 3)

        return array_wrappers
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    LOGGER = get_logger('tvb.core.operation_async_launcher')

    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        stored_adapter = curent_operation.algorithm
        LOGGER.debug("Importing Algorithm: " + str(stored_adapter.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(stored_adapter)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation, adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception as excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst, error_message=str(excep))
Esempio n. 41
0
    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        # todo rewrite after neotraits TVB-2687
        operation = dao.get_operation_by_gid(operation_gid)
        parameters = json.loads(operation.parameters)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return adapter.review_operation_inputs(parameters)

        except Exception:
            self.logger.exception("Could not load details for operation %s" %
                                  operation_gid)
            inputs_datatypes = []
            changed_parameters = dict(
                Warning=
                "Algorithm changed dramatically. We can not offer more details"
            )
            for submit_param in parameters.values():
                self.logger.debug("Searching DT by GID %s" % submit_param)
                datatype = ABCAdapter.load_entity_by_gid(str(submit_param))
                if datatype is not None:
                    inputs_datatypes.append(datatype)
            return inputs_datatypes, changed_parameters
Esempio n. 42
0
def fire_simulation(project_id=1, **kwargs):
    project = dao.get_project_by_id(project_id)
    flow_service = FlowService()

    # below the holy procedure to launch with the correct parameters taken from the defaults
    stored_adapter = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
    simulator_adapter = ABCAdapter.build_adapter(stored_adapter)
    flatten_interface = simulator_adapter.flaten_input_interface()
    itree_mngr = flow_service.input_tree_manager
    prepared_flatten_interface = itree_mngr.fill_input_tree_with_options(flatten_interface, project.id,
                                                                         stored_adapter.fk_category)
    launch_args = {}
    for entry in prepared_flatten_interface:
        value = entry['default']
        if isinstance(value, dict):
            value = str(value)
        if hasattr(value, 'tolist'):
            value = value.tolist()
        launch_args[entry['name']] = value
    launch_args.update(**kwargs)
    # end of magic

    launched_operation = flow_service.fire_operation(simulator_adapter, project.administrator,
                                                     project.id, **launch_args)[0]
    return launched_operation
    def _run_cff_importer(self, cff_path):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.cff_importer', 'CFF_Importer')
        importer = ABCAdapter.build_adapter(group)
        args = {'cff': cff_path, DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT}

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
    def _import_connectivity(self):
        group = dao.find_group('tvb.adapters.uploaders.zip_connectivity_importer', 'ZIPConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id,
                                     uploaded=self.connectivity_path, Data_Subject='QL')

        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
Esempio n. 45
0
 def import_surface_zip(user, project, zip_path, surface_type, zero_based):
     ### Retrieve Adapter instance 
     group = dao.find_group('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
     importer = ABCAdapter.build_adapter(group)
     args = {'uploaded': zip_path, 'surface_type': surface_type,
             'zero_based_triangles': zero_based}
     
     ### Launch Operation
     FlowService().fire_operation(importer, user, project.id, **args)
Esempio n. 46
0
 def _review_operation_inputs(operation_gid):
     """
     :returns: A list of DataTypes that are used as input parameters for the specified operation.
              And a dictionary will all operation parameters different then the default ones.
     """
     operation = dao.get_operation_by_gid(operation_gid)
     parameters = json.loads(operation.parameters)
     adapter = ABCAdapter.build_adapter(operation.algorithm.algo_group)
     return adapter.review_operation_inputs(parameters)
Esempio n. 47
0
    def import_surface_obj(user, project, obj_path, surface_type):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {'data_file': obj_path,
                'surface_type': surface_type}

        ### Launch Operation
        FlowService().fire_operation(importer, user, project.id, **args)
    def import_test_connectivity96(test_user, test_project, subject=DataTypeMetaData.DEFAULT_SUBJECT):
        """
        Import a connectivity with 96 regions from tvb_data.
        """
        group = dao.find_group('tvb.adapters.uploaders.zip_connectivity_importer', 'ZIPConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)

        data_dir = path.abspath(path.dirname(tvb_data.__file__))
        zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip')
        ### Launch Operation
        FlowService().fire_operation(importer, test_user, test_project.id, uploaded=zip_path, Data_Subject=subject)
    def _import(self, import_file_path=None):
        """
        This method is used for importing data in TVB format
        :param import_file_path: absolute path of the file to be imported
        """
        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.tvb_importer', 'TVBImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
 def build_adapter_from_declaration(cls, adapter_declaration):
     """
     Build and adapter from the declaration in the portlets xml.
     """
     adapter_import_path = adapter_declaration[ABCAdapter.KEY_TYPE]
     class_name = adapter_import_path.split('.')[-1]
     module = adapter_import_path.replace('.' + class_name, '')
     algo = dao.get_algorithm_by_module(module, class_name)
     if algo is not None:
         return ABCAdapter.build_adapter(algo)
     else:
         return None
    def _import(self, import_file_name):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.connectivity_measure_importer', 'ConnectivityMeasureImporter')
        importer = ABCAdapter.build_adapter(group)
        path = os.path.join(os.path.dirname(test_data.__file__), import_file_name)

        args = {'data_file': path,
                'connectivity' : self.connectivity.gid,
                DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
Esempio n. 52
0
 def build_adapter_instance(self, group):
     """
     Having a module and a class name, create an instance of ABCAdapter.
     """
     try:
         return ABCAdapter.build_adapter(group)
     except IntrospectionException, excep:
         if group is None:
             self.logger.error('The given algorithm group is None.')
             self.logger.exception(excep)
             raise OperationException("Could not prepare the algo- group.")
         self.logger.error('Not found: ' + group.classname + ' in:' + group.module)
         self.logger.exception(excep)
         raise OperationException("Could not prepare " + group.classname)
Esempio n. 53
0
    def setUp(self):
        """
        Set up any additionally needed parameters.
        """
        super(GenshiTestGroup, self).setUp()

        xml_group_path = os.path.join('tests', 'framework', 'interfaces', 'web', "test_group.xml")
        algo_group = dao.find_group('tvb.tests.framework.adapters.testgroupadapter', 'TestGroupAdapter', xml_group_path)
        self.xml_group_adapter = ABCAdapter.build_adapter(algo_group)
        input_tree = self.xml_group_adapter.get_input_tree()
        input_tree = ABCAdapter.prepare_param_names(input_tree)
        self.template_specification['inputList'] = input_tree
        self.template_specification[common.KEY_PARAMETERS_CONFIG] = False
        resulted_html = _template2string(self.template_specification)
        self.soup = BeautifulSoup(resulted_html)
 def submit_noise_configuration(self):
     """
     Collects the model parameters values from all the models used for the connectivity nodes.
     """
     context_noise_config = base.get_from_session(KEY_CONTEXT_NC)
     burst_configuration = base.get_from_session(base.KEY_BURST_CONFIG)
     _, simulator_group = FlowService().get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
     simulator_adapter = ABCAdapter.build_adapter(simulator_group)
     for param_name in simulator_adapter.noise_configurable_parameters():
         burst_configuration.update_simulation_parameter(param_name, str(context_noise_config.noise_values)) 
     ### Clean from session drawing context
     base.remove_from_session(KEY_CONTEXT_NC)
     ### Update in session BURST configuration for burst-page. 
     base.add2session(base.KEY_BURST_CONFIG, burst_configuration.clone())
     raise cherrypy.HTTPRedirect("/burst/")
 def build_adapter_from_declaration(cls, adapter_declaration):
     """
     Build and adapter from the declaration in the portlets xml.
     """
     adapter_import_path = adapter_declaration[ABCAdapter.KEY_TYPE]
     class_name = adapter_import_path.split(".")[-1]
     module = adapter_import_path.replace("." + class_name, "")
     if "initparam" in adapter_declaration:
         algo_group = dao.find_group(module, class_name, adapter_declaration["initparam"])
     else:
         algo_group = dao.find_group(module, class_name)
     if algo_group is not None:
         return ABCAdapter.build_adapter(algo_group), algo_group
     else:
         return None, None
    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        self.datatypes_factory = DatatypesFactory()
        self.test_user = self.datatypes_factory.get_user()
        self.test_project = self.datatypes_factory.get_project()
        self.connectivity = self.datatypes_factory.create_connectivity(self.CONNECTIVITY_NODES)[1]

        algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.simulator_adapter = ABCAdapter.build_adapter(algorithm)
        self.operation = TestFactory.create_operation(algorithm, self.test_user, self.test_project,
                                                      model.STATUS_STARTED, json.dumps(SIMULATOR_PARAMETERS))

        SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid
    def draw_isocline_exploration(self, datatype_group_gid):

        algorithm = self.flow_service.get_algorithm_by_module_and_class(ISOCLINE_PSE_ADAPTER_MODULE,
                                                                        ISOCLINE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                return adapter.burst_preview(datatype_group_gid)
            except LaunchException as ex:
                self.logger.error(ex.message)
                error_msg = urllib.quote(ex.message)
        else:
            error_msg = urllib.quote("Isocline PSE requires a 2D range of floating point values.")

        name = urllib.quote(adapter._ui_name)
        raise cherrypy.HTTPRedirect(REDIRECT_MSG % (name, error_msg))
    def launch_operation(self, operation_id, send_to_cluster=False, adapter_instance=None):
        """
        Method exposed for Burst-Workflow related calls.
        It is used for cascading operation in the same workflow.
        """
        if operation_id is not None:
            operation = dao.get_operation_by_id(operation_id)
            if adapter_instance is None:
                algorithm = operation.algorithm
                adapter_instance = ABCAdapter.build_adapter(algorithm)
            parsed_params = utils.parse_json_parameters(operation.parameters)

            if send_to_cluster:
                self._send_to_cluster([operation], adapter_instance, operation.user.username)
            else:
                self.initiate_prelaunch(operation, adapter_instance, {}, **parsed_params)
    def draw_isocline_exploration(self, datatype_group_gid, width=None, height=None):

        if width is not None:
            width = int(width)
        if height is not None:
            height = int(height)

        algorithm = self.flow_service.get_algorithm_by_module_and_class(ISOCLINE_PSE_ADAPTER_MODULE,
                                                                        ISOCLINE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                return adapter.burst_preview(datatype_group_gid, width, height)
            except LaunchException, ex:
                self.logger.error(ex.message)
                error_msg = urllib.quote(ex.message)