def load_region_stimulus(self, region_stimulus_gid, from_step=None):
        """
        Loads the interface for the selected region stimulus.
        """
        selected_region_stimulus = ABCAdapter.load_entity_by_gid(region_stimulus_gid)
        temporal_eq = selected_region_stimulus.temporal
        spatial_eq = selected_region_stimulus.spatial
        connectivity = selected_region_stimulus.connectivity
        weights = selected_region_stimulus.weight

        temporal_eq_type = temporal_eq.__class__.__name__
        spatial_eq_type = spatial_eq.__class__.__name__
        default_dict = {'temporal': temporal_eq_type, 'spatial': spatial_eq_type,
                        'connectivity': connectivity.gid, 'weight': json.dumps(weights)}
        for param in temporal_eq.parameters:
            prepared_name = 'temporal_parameters_option_' + str(temporal_eq_type)
            prepared_name = prepared_name + '_parameters_parameters_' + str(param)
            default_dict[prepared_name] = str(temporal_eq.parameters[param])
        for param in spatial_eq.parameters:
            prepared_name = 'spatial_parameters_option_' + str(spatial_eq_type) + '_parameters_parameters_' + str(param)
            default_dict[prepared_name] = str(spatial_eq.parameters[param])

        input_list = self.get_creator_and_interface(REGION_STIMULUS_CREATOR_MODULE,
                                                    REGION_STIMULUS_CREATOR_CLASS, StimuliRegion())[1]
        input_list = ABCAdapter.fill_defaults(input_list, default_dict)
        context = common.get_from_session(KEY_REGION_CONTEXT)
        context.reset()
        context.update_from_interface(input_list)
        context.equation_kwargs[DataTypeMetaData.KEY_TAG_1] = selected_region_stimulus.user_tag_1
        context.set_active_stimulus(region_stimulus_gid)

        return self.do_step(from_step)
Ejemplo n.º 2
0
    def update_default_values(portlet_interface, portlet_configuration):
        """
        :param portlet_interface: a list of AdapterConfiguration entities.
        :param portlet_configuration: a PortletConfiguration entity.
        
        Update the defaults from each AdapterConfiguration entity with the 
        values stored in the corresponding workflow step held in the 
        PortletConfiguration entity.
        """
        # Check for any defaults first in analyzer steps
        if portlet_configuration.analyzers:
            for adapter_idx in xrange(len(portlet_interface[:-1])):
                saved_configuration = portlet_configuration.analyzers[adapter_idx]
                replaced_defaults_dict = ABCAdapter.fill_defaults(
                    portlet_interface[adapter_idx].interface, saved_configuration.static_param
                )
                portlet_interface[adapter_idx].interface = replaced_defaults_dict

        # Check for visualization defaults
        if portlet_configuration.visualizer:
            saved_configuration = portlet_configuration.visualizer
            replaced_defaults_dict = ABCAdapter.fill_defaults(
                portlet_interface[-1].interface, saved_configuration.static_param
            )
            portlet_interface[-1].interface = replaced_defaults_dict
Ejemplo n.º 3
0
    def read_datatype_attribute(self, entity_gid, dataset_name, flatten=False, datatype_kwargs='null', **kwargs):
        """
        Retrieve from a given DataType a property or a method result.

        :returns: JSON representation of the attribute.
        :param entity_gid: GID for DataType entity
        :param dataset_name: name of the dataType property /method 
        :param flatten: result should be flatten before return (use with WebGL data mainly e.g vertices/triangles)
            Ignored if the attribute is not an ndarray
        :param datatype_kwargs: if passed, will contain a dictionary of type {'name' : 'gid'}, and for each such
            pair, a load_entity will be performed and kwargs will be updated to contain the result
        :param kwargs: extra parameters to be passed when dataset_name is method.

        """
        self.logger.debug("Starting to read HDF5: " + entity_gid + "/" + dataset_name + "/" + str(kwargs))
        entity = ABCAdapter.load_entity_by_gid(entity_gid)
        datatype_kwargs = json.loads(datatype_kwargs)
        if datatype_kwargs:
            for key, value in datatype_kwargs.iteritems():
                kwargs[key] = ABCAdapter.load_entity_by_gid(value)
        dataset = getattr(entity, dataset_name)
        if not kwargs:
            # why the deep copy?
            result = copy.deepcopy(dataset)
        else:
            result = dataset(**kwargs)

        if isinstance(result, numpy.ndarray):
            # for ndarrays honor the flatten kwarg and convert to lists as ndarrs are not json-able
            if flatten is True or flatten == "True":
                result = result.flatten()
            return result.tolist()
        else:
            return result
 def read_datatype_attribute(self, entity_gid, dataset_name, flatten=False, datatype_kwargs='null', **kwargs):
     """
     Retrieve from a given DataType a property or a method result.
     :returns: JSON with a NumPy array
     :param entity_gid: GID for DataType entity
     :param dataset_name: name of the dataType property /method 
     :param flatten: result should be flatten before return (use with WebGL data mainly e.g vertices/triangles)
     :param datatype_kwargs: if passed, will contain a dictionary of type {'name' : 'gid'}, and for each such
     pair, a load_entity will be performed and kwargs will be updated to contain the result
     :param kwargs: extra parameters to be passed when dataset_name is method. 
     """
     try:
         self.logger.debug("Starting to read HDF5: " + entity_gid + "/" + dataset_name + "/" + str(kwargs))
         entity = ABCAdapter.load_entity_by_gid(entity_gid)
         if kwargs is None:
             kwargs = {}
         datatype_kwargs = json.loads(datatype_kwargs)
         if datatype_kwargs is not None:
             for key in datatype_kwargs:
                 kwargs[key] = ABCAdapter.load_entity_by_gid(datatype_kwargs[key])
         if len(kwargs) < 1:
             numpy_array = copy.deepcopy(getattr(entity, dataset_name))
         else:
             numpy_array = eval("entity." + dataset_name + "(**kwargs)")
         if (flatten is True) or (flatten == "True"):
             numpy_array = numpy_array.flatten()
         return numpy_array.tolist()
     except Exception, excep:
         self.logger.error("Could not retrieve complex entity field:" + str(entity_gid) + "/" + str(dataset_name))
         self.logger.exception(excep)
Ejemplo n.º 5
0
    def __init__(self, list_of_entities_to_store):
        """
        Expacts a list of 'DataType' instances.
        """
        ABCAdapter.__init__(self)
        if (list_of_entities_to_store is None 
            or not isinstance(list_of_entities_to_store, list) 
            or len(list_of_entities_to_store) == 0):
            raise Exception("The adapter expacts a list of entities")

        self.list_of_entities_to_store = list_of_entities_to_store
Ejemplo n.º 6
0
    def setUp(self):
        """
        Set up any additionally needed parameters.
        """
        super(GenshiTestGroup, self).setUp()

        xml_group_path = os.path.join('tests', 'framework', 'interfaces', 'web', "test_group.xml")
        algo_group = dao.find_group('tvb.tests.framework.adapters.testgroupadapter', 'TestGroupAdapter', xml_group_path)
        self.xml_group_adapter = ABCAdapter.build_adapter(algo_group)
        input_tree = self.xml_group_adapter.get_input_tree()
        input_tree = ABCAdapter.prepare_param_names(input_tree)
        self.template_specification['inputList'] = input_tree
        self.template_specification[common.KEY_PARAMETERS_CONFIG] = False
        resulted_html = _template2string(self.template_specification)
        self.soup = BeautifulSoup(resulted_html)
Ejemplo n.º 7
0
    def get_series_array_discrete(self, datatype_group_gid, backPage, color_metric=None, size_metric=None):
        """
        Create new data for when the user chooses to refresh from the UI.
        """
        if color_metric == 'None':
            color_metric = None
        if size_metric == 'None':
            size_metric = None

        algorithm = self.flow_service.get_algorithm_by_module_and_class(DISCRETE_PSE_ADAPTER_MODULE,
                                                                        DISCRETE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                pse_context = adapter.prepare_parameters(datatype_group_gid, backPage, color_metric, size_metric)
                return dict(series_array=pse_context.series_array,
                            has_started_ops=pse_context.has_started_ops)
            except LaunchException as ex:
                error_msg = urllib.quote(ex.message)
        else:
            error_msg = urllib.quote(
                "Discrete PSE is incompatible (most probably due to result size being too large).")

        name = urllib.quote(adapter._ui_name)
        raise cherrypy.HTTPRedirect(REDIRECT_MSG % (name, error_msg))
 def step_2(self, **kwargs):
     """
     Generate the html for the second step of the local connectivity page.
     :param kwargs: not actually used, but parameters are still submitted from UI since we just\
            use the same js function for this. TODO: do this in a smarter way
     """
     context = common.get_from_session(KEY_LCONN_CONTEXT)
     left_side_interface = self.get_select_existent_entities('Load Local Connectivity:', LocalConnectivity,
                                                             context.selected_entity)
     template_specification = dict(title="Surface - Local Connectivity")
     template_specification['mainContent'] = 'spatial/local_connectivity_step2_main'
     template_specification['existentEntitiesInputList'] = left_side_interface
     template_specification['loadExistentEntityUrl'] = LOAD_EXISTING_URL
     template_specification['resetToDefaultUrl'] = RELOAD_DEFAULT_PAGE_URL
     template_specification['next_step_url'] = '/spatial/localconnectivity/step_1'
     msg, _ = common.get_message_from_session()
     template_specification['displayedMessage'] = msg
     context = common.get_from_session(KEY_LCONN_CONTEXT)
     if context.selected_entity is not None:
         selected_local_conn = ABCAdapter.load_entity_by_gid(context.selected_entity)
         template_specification.update(self.display_surface(selected_local_conn.surface.gid))
         template_specification['no_local_connectivity'] = False
         min_value, max_value = selected_local_conn.get_min_max_values()
         template_specification['minValue'] = min_value
         template_specification['maxValue'] = max_value
     else:
         template_specification['no_local_connectivity'] = True
     template_specification[common.KEY_PARAMETERS_CONFIG] = False
     return self.fill_default_attributes(template_specification)
    def get_surface_model_parameters_data(self, default_selected_model_param=None):
        """
        Returns a dictionary which contains all the data needed for drawing the
        model parameters.
        """
        context_model_parameters = base.get_from_session(KEY_CONTEXT_MPS)
        if default_selected_model_param is None:
            default_selected_model_param = context_model_parameters.prepared_model_parameter_names.values()[0]

        equation_displayer = EquationDisplayer()
        equation_displayer.trait.bound = interface.INTERFACE_ATTRIBUTES_ONLY
        input_list = equation_displayer.interface[interface.INTERFACE_ATTRIBUTES]
        input_list[0] = self._lock_midpoints(input_list[0])

        options = []
        for original_param, modified_param in context_model_parameters.prepared_model_parameter_names.items():
            attributes = deepcopy(input_list)
            self._fill_default_values(attributes, modified_param)
            option = {'name': original_param, 'value': modified_param, 'attributes': attributes}
            options.append(option)

        input_list = [{'name': 'model_param', 'type': 'select', 'default': default_selected_model_param,
                       'label': 'Model param', 'required': True, 'options': options}]
        input_list = ABCAdapter.prepare_param_names(input_list)
        return {base.KEY_PARAMETERS_CONFIG: False, 'inputList': input_list,
                'applied_equations': context_model_parameters.get_configure_info()}
def cdata2local_connectivity(local_connectivity_data, meta, storage_path, expected_length=0):
    """
    From a CData entry in CFF, create LocalConnectivity entity.
    """
    ##### expected_length = cortex.region_mapping.shape[0]
    tmpdir = os.path.join(gettempdir(), local_connectivity_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for Local Connectivity import: " + tmpdir)
    _zipfile = ZipFile(local_connectivity_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    local_connectivity_path = _zipfile.extract(local_connectivity_data.src, tmpdir)
    
    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)
    
    local_connectivity = surfaces.LocalConnectivity()
    local_connectivity.storage_path = storage_path 
    local_connectivity_data = read_matlab_data(local_connectivity_path, constants.DATA_NAME_LOCAL_CONN)
    
    if local_connectivity_data.shape[0] < expected_length:
        padding = sparse.csc_matrix((local_connectivity_data.shape[0],
                                    expected_length - local_connectivity_data.shape[0]))
        local_connectivity_data = sparse.hstack([local_connectivity_data, padding])
            
        padding = sparse.csc_matrix((expected_length - local_connectivity_data.shape[0],
                                     local_connectivity_data.shape[1]))
        local_connectivity_data = sparse.vstack([local_connectivity_data, padding])
    
    local_connectivity.equation = None
    local_connectivity.matrix = local_connectivity_data        
    local_connectivity.surface = surface_data
    
    uid = meta[constants.KEY_UID] if constants.KEY_UID in meta else None
    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    return local_connectivity, uid
def cdata2eeg_mapping(eeg_mapping_data, meta, storage_path, expected_shape=0):
    """
    Currently not used
    """
    tmpdir = os.path.join(gettempdir(), eeg_mapping_data.parent_cfile.get_unique_cff_name())
    LOG.debug("Using temporary folder for EEG_Mapping import: " + tmpdir)
    _zipfile = ZipFile(eeg_mapping_data.parent_cfile.src, 'r', ZIP_DEFLATED)
    eeg_projection_path = _zipfile.extract(eeg_mapping_data.src, tmpdir)
    eeg_projection_data = read_matlab_data(eeg_projection_path, constants.DATA_NAME_PROJECTION)
    if eeg_projection_data.shape[1] < expected_shape:
        padding = numpy.zeros((eeg_projection_data.shape[0], expected_shape - eeg_projection_data.shape[1]))
        eeg_projection_data = numpy.hstack((eeg_projection_data, padding))
        
    gid = dao.get_last_data_with_uid(meta[constants.KEY_SURFACE_UID], surfaces.CorticalSurface)
    surface_data = ABCAdapter.load_entity_by_gid(gid)
    
    projection_matrix = projections.ProjectionSurfaceEEG(storage_path = storage_path)
    projection_matrix.projection_data = eeg_projection_data
    projection_matrix.sources = surface_data
    projection_matrix.sensors = None
    ### TODO if we decide to use this method, we will need to find a manner to fill the sensors.
    return projection_matrix
 
 
    

    
Ejemplo n.º 12
0
    def parse_event_node(self):
        """
        Parse the stored event node to get required data and arguments.
        """
        kw_parameters = {}
        for one_arg in self.event_node.childNodes:
            if one_arg.nodeType != Node.ELEMENT_NODE:
                continue
            if one_arg.nodeName == ELEM_ADAPTER:
                #TODO: so far there is no need for it, but we should maybe
                #handle cases where same module/class but different init parameter
                group = dao.find_group(one_arg.getAttribute(ATT_MODULE), one_arg.getAttribute(ATT_CLASS))
                adapter = ABCAdapter.build_adapter(group)
                result_uid = one_arg.getAttribute(ATT_UID)
                if result_uid:
                    kw_parameters[ATT_UID] = result_uid
                LOGGER.debug("Adapter used is %s", str(adapter.__class__))
                self.callable_object = adapter
                continue
            if one_arg.nodeName == ELEM_METHOD:
                self.call_method = one_arg.getAttribute(ATT_NAME)
                if one_arg.getAttribute(ATT_OPERATION_HIDDEN):
                    self.operation_visible = False
                continue
            if one_arg.nodeName == ELEM_ARGS:
                kw_parameters.update(_parse_arguments(one_arg))
                continue
            LOGGER.info("Ignored undefined node %s", str(one_arg.nodeName))

        self.arguments.update(kw_parameters)
Ejemplo n.º 13
0
    def get_template_for_adapter(self, project_id, step_key, algo_group, submit_url, session_reset=True, is_burst=True):
        """ Get Input HTML Interface template or a given adapter """
        try:
            if session_reset:
                self.context.clean_from_session()

            group = None
            # Cache some values in session, for performance
            previous_tree = self.context.get_current_input_tree()
            previous_sub_step = self.context.get_current_substep()
            if not session_reset and previous_tree is not None and previous_sub_step == algo_group.id:
                adapter_interface = previous_tree
            else:
                group, adapter_interface = self.flow_service.prepare_adapter(project_id, algo_group)
                self.context.add_adapter_to_session(algo_group, adapter_interface)

            category = self.flow_service.get_category_by_id(step_key)
            title = "Fill parameters for step " + category.displayname.lower()
            if group:
                title = title + " - " + group.displayname

            current_defaults = self.context.get_current_default()
            if current_defaults is not None:
                #Change default values in tree, according to selected input
                adapter_interface = ABCAdapter.fill_defaults(adapter_interface, current_defaults)

            template_specification = dict(submitLink=submit_url, inputList=adapter_interface, title=title)
            self._populate_section(algo_group, template_specification, is_burst)
            return template_specification
        except OperationException, oexc:
            self.logger.error("Inconsistent Adapter")
            self.logger.exception(oexc)
            common.set_warning_message('Inconsistent Adapter!  Please review the link (development problem)!')
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    LOGGER = get_logger('tvb.core.operation_async_launcher')

    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        stored_adapter = curent_operation.algorithm
        LOGGER.debug("Importing Algorithm: " + str(stored_adapter.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(stored_adapter)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation, adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception as excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst, error_message=str(excep))
Ejemplo n.º 15
0
 def test_build_adapter_instance(self):
     """
     Test standard flow for building an adapter instance.
     """
     algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
     adapter = ABCAdapter.build_adapter(algo_group)
     self.assertTrue(isinstance(adapter, ABCSynchronous), "Something went wrong with valid data!")
Ejemplo n.º 16
0
 def create_adapter(module='tvb.tests.framework.adapters.ndimensionarrayadapter',
                    class_name='NDimensionArrayAdapter'):
     """
     :returns: Adapter Class after initialization.
     """
     algorithm = dao.get_algorithm_by_module(module, class_name )
     return ABCAdapter.build_adapter(algorithm)
Ejemplo n.º 17
0
    def _read_datatype_attribute(self, entity_gid, dataset_name, datatype_kwargs='null', **kwargs):
        self.logger.debug("Starting to read HDF5: " + entity_gid + "/" + dataset_name + "/" + str(kwargs))
        entity = ABCAdapter.load_entity_by_gid(entity_gid)

        datatype_kwargs = json.loads(datatype_kwargs)
        if datatype_kwargs:
            for key, value in datatype_kwargs.iteritems():
                kwargs[key] = ABCAdapter.load_entity_by_gid(value)

        result = getattr(entity, dataset_name)
        if callable(result):
            if kwargs:
                result = result(**kwargs)
            else:
                result = result()
        return result
Ejemplo n.º 18
0
 def import_sensors(user, project, zip_path, sensors_type):
     ### Retrieve Adapter instance 
     group = dao.find_group('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
     importer = ABCAdapter.build_adapter(group)
     args = {'sensors_file': zip_path, 'sensors_type': sensors_type}
     ### Launch Operation
     FlowService().fire_operation(importer, user, project.id, **args)
Ejemplo n.º 19
0
 def _create_value_wrapper(self):
     """Persist ValueWrapper"""
     value_ = ValueWrapper(data_value=5.0, data_name="my_value")
     self._store_entity(value_, "ValueWrapper", "tvb.datatypes.mapped_values")
     valuew = self.get_all_entities(ValueWrapper)
     self.assertEqual(1, len(valuew), "Should be one value wrapper")
     return ABCAdapter.load_entity_by_gid(valuew[0].gid)
Ejemplo n.º 20
0
def fire_simulation(project_id=1, **kwargs):
    project = dao.get_project_by_id(project_id)
    flow_service = FlowService()

    # below the holy procedure to launch with the correct parameters taken from the defaults
    stored_adapter = flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
    simulator_adapter = ABCAdapter.build_adapter(stored_adapter)
    flatten_interface = simulator_adapter.flaten_input_interface()
    itree_mngr = flow_service.input_tree_manager
    prepared_flatten_interface = itree_mngr.fill_input_tree_with_options(flatten_interface, project.id,
                                                                         stored_adapter.fk_category)
    launch_args = {}
    for entry in prepared_flatten_interface:
        value = entry['default']
        if isinstance(value, dict):
            value = str(value)
        if hasattr(value, 'tolist'):
            value = value.tolist()
        launch_args[entry['name']] = value
    launch_args.update(**kwargs)
    # end of magic

    launched_operation = flow_service.fire_operation(simulator_adapter, project.administrator,
                                                     project.id, **launch_args)[0]
    return launched_operation
    def test_happy_flow_surface_import(self):
        """
        Verifies the happy flow for importing a surface.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project, ProjectionSurfaceEEG())
        group = dao.find_group(
            "tvb.adapters.uploaders.projection_matrix_importer", "ProjectionMatrixSurfaceEEGImporter"
        )
        importer = ABCAdapter.build_adapter(group)

        file_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)), "projection_eeg_65_surface_16k.npy"
        )
        args = {
            "projection_file": file_path,
            "dataset_name": "ProjectionMatrix",
            "sensors": self.sensors.gid,
            "surface": self.surface.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
        }

        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project, ProjectionSurfaceEEG())

        self.assertEqual(dt_count_before + 1, dt_count_after)
Ejemplo n.º 22
0
 def launch_visualization(visualization, frame_width=None, frame_height=None, 
                          method_name=ABCAdapter.LAUNCH_METHOD, is_preview=True):
     """
     :param visualization: a visualization workflow step
     """
     dynamic_params = visualization.dynamic_param
     static_params = visualization.static_param
     parameters_dict = static_params
     current_project_id = 0
     ## Current operation id needed for export mechanism. So far just use ##
     ## the operation of the workflow_step from which the inputs are taken    ####
     for param in dynamic_params:
         step_index = dynamic_params[param][WorkflowStepConfiguration.STEP_INDEX_KEY]
         datatype_index = dynamic_params[param][WorkflowStepConfiguration.DATATYPE_INDEX_KEY]
         referred_workflow_step = dao.get_workflow_step_by_step_index(visualization.fk_workflow, step_index)
         referred_operation_id = referred_workflow_step.fk_operation
         referred_operation = dao.get_operation_by_id(referred_operation_id)
         current_project_id = referred_operation.fk_launched_in
         if type(datatype_index) is IntType:
             ## Entry is the output of a previous step ##
             datatypes = dao.get_results_for_operation(referred_operation_id)
             parameters_dict[param] = datatypes[datatype_index].gid
         else:
             ## Entry is the input of a previous step ###
             parameters_dict[param] = json.loads(referred_operation.parameters)[datatype_index]
     algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm)
     adapter_instance = ABCAdapter.build_adapter(algorithm.algo_group)
     adapter_instance.current_project_id = current_project_id
     prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict)
     if frame_width is not None:
         prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width, frame_height)
     if isinstance(adapter_instance, ABCMPLH5Displayer) and is_preview is True:
         prepared_inputs[ABCMPLH5Displayer.SHOW_FULL_TOOLBAR] = False
     result = eval("adapter_instance." + method_name + "(**prepared_inputs)")
     return result, parameters_dict
    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group("tvb.adapters.uploaders.region_mapping_importer", "RegionMapping_Importer")
        importer = ABCAdapter.build_adapter(group)

        args = {
            "mapping_file": import_file_path,
            "surface": surface_gid,
            "connectivity": connectivity_gid,
            DataTypeMetaData.KEY_SUBJECT: "test",
        }

        now = datetime.datetime.now()

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(fields=[FilterChain.datatype + ".create_date"], operations=[">"], values=[now])
        region_mapping = self._get_entity(RegionMapping(), data_filter)

        return region_mapping
    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 0)
        
        group = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
        adapter_instance = ABCAdapter.build_adapter(group)
        data = {'param_1': 'some value'}
        #create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 1)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 2)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(count, 3)

        return array_wrappers
    def test_wrong_shape(self):
        """
        Verifies that importing a different shape throws exception
        """
        group = dao.find_group(
            "tvb.adapters.uploaders.projection_matrix_importer", "ProjectionMatrixSurfaceEEGImporter"
        )
        importer = ABCAdapter.build_adapter(group)

        file_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)), "projection_eeg_62_surface_16k.mat"
        )
        args = {
            "projection_file": file_path,
            "dataset_name": "ProjectionMatrix",
            "sensors": self.sensors.gid,
            "surface": self.surface.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
        }

        try:
            FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
            self.fail("This was expected not to run! 62 rows in proj matrix, but 65 sensors")
        except OperationException:
            pass
    def get_configurable_interface(self):
        """
        Given an algorithm identifier, go trough the adapter chain, and merge
        their input tree with the declared overwrites 
        """
        chain_adapters = self.reader.get_adapters_chain(self.algo_identifier)
        result = []
        for adapter_declaration in chain_adapters:

            adapter_instance, algorithm_group = self.build_adapter_from_declaration(adapter_declaration)

            algorithm_field = adapter_declaration[KEY_FIELD]
            if algorithm_field:
                default_algorithm = adapter_declaration[ABCAdapter.KEY_DEFAULT]
            else:
                default_algorithm = ''

            all_portlet_defined_params = self.reader.get_inputs(self.algo_identifier)
            specific_adapter_overwrites = [entry for entry in all_portlet_defined_params
                                           if ATT_OVERWRITE in entry and entry[ATT_OVERWRITE] ==
                                              adapter_declaration[ABCAdapter.KEY_NAME]]

            if default_algorithm:
                alg_inputs = adapter_instance.xml_reader.get_inputs(default_algorithm)
                prefix = ABCAdapter.form_prefix(algorithm_field, None, default_algorithm)
            else:
                alg_inputs = adapter_instance.get_input_tree()
                prefix = ''

            replace_values = self._prepare_input_tree(alg_inputs, specific_adapter_overwrites, prefix)
            adapter_configuration = AdapterConfiguration(replace_values, algorithm_group, prefix=prefix,
                                                         subalgorithm_field=algorithm_field,
                                                         subalgorithm_value=default_algorithm)
            result.append(adapter_configuration)
        return result
Ejemplo n.º 27
0
    def execute_post(self, project_id, submit_url, step_key, algorithm, **data):
        """ Execute HTTP POST on a generic step."""
        errors = None
        adapter_instance = ABCAdapter.build_adapter(algorithm)

        try:
            result = self.flow_service.fire_operation(adapter_instance, common.get_logged_user(), project_id, **data)

            # Store input data in session, for informing user of it.
            step = self.flow_service.get_category_by_id(step_key)
            if not step.rawinput:
                self.context.add_adapter_to_session(None, None, copy.deepcopy(data))

            if isinstance(adapter_instance, ABCDisplayer):
                if isinstance(result, dict):
                    result[common.KEY_OPERATION_ID] = adapter_instance.operation_id
                    return result
                else:
                    common.set_error_message("Invalid result returned from Displayer! Dictionary is expected!")
            else:
                if isinstance(result, list):
                    result = "Launched %s operations." % len(result)
                common.set_important_message(str(result))
        except formencode.Invalid, excep:
            errors = excep.unpack_errors()
 def _create_value_wrapper(self):
     """Persist ValueWrapper"""
     value_ = ValueWrapper(data_value=5.0, data_name="my_value")
     self._store_entity(value_, "ValueWrapper", "tvb.datatypes.mapped_values")
     valuew = self.flow_service.get_available_datatypes(self.test_project.id,
                                                        "tvb.datatypes.mapped_values.ValueWrapper")[0]
     self.assertEqual(len(valuew), 1, "Should be only one value wrapper")
     return ABCAdapter.load_entity_by_gid(valuew[0][2])
Ejemplo n.º 29
0
    def _run_cff_importer(self, cff_path):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.cff_importer', 'CFF_Importer')
        importer = ABCAdapter.build_adapter(group)
        args = {'cff': cff_path, DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT}

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
Ejemplo n.º 30
0
 def _review_operation_inputs(operation_gid):
     """
     :returns: A list of DataTypes that are used as input parameters for the specified operation.
              And a dictionary will all operation parameters different then the default ones.
     """
     operation = dao.get_operation_by_gid(operation_gid)
     parameters = json.loads(operation.parameters)
     adapter = ABCAdapter.build_adapter(operation.algorithm.algo_group)
     return adapter.review_operation_inputs(parameters)
    def get_template_from_context(self):
        """
        Return the parameters for the local connectivity in case one is stored in context. Load the entity
        and use it to populate the defaults from the interface accordingly.
        """
        context = common.get_from_session(KEY_LCONN_CONTEXT)
        selected_local_conn = ABCAdapter.load_entity_by_gid(
            context.selected_entity)
        cutoff = selected_local_conn.cutoff
        equation = selected_local_conn.equation
        surface = selected_local_conn.surface

        default_dict = {'surface': surface.gid, 'cutoff': cutoff}
        if equation is not None:
            equation_type = equation.__class__.__name__
            default_dict['equation'] = equation_type
            for param in equation.parameters:
                prepared_name = 'equation_parameters_option_' + str(
                    equation_type)
                prepared_name = prepared_name + '_parameters_parameters_' + str(
                    param)
                default_dict[prepared_name] = equation.parameters[param]
        else:
            msg = "There is no equation specified for this local connectivity. "
            msg += "The default equation is displayed into the spatial field."
            self.logger.warning(msg)
            common.set_info_message(msg)

        default_dict[
            DataTypeMetaData.KEY_TAG_1] = selected_local_conn.user_tag_1

        input_list = self.get_creator_and_interface(LOCAL_CONN_CREATOR_MODULE,
                                                    LOCAL_CONN_CREATOR_CLASS,
                                                    LocalConnectivity(),
                                                    lock_midpoint_for_eq=[1
                                                                          ])[1]
        input_list = self._add_extra_fields_to_interface(input_list)
        input_list = InputTreeManager.fill_defaults(input_list, default_dict)

        template_specification = {
            'inputList': input_list,
            common.KEY_PARAMETERS_CONFIG: False,
            'equationViewerUrl':
            '/spatial/localconnectivity/get_equation_chart',
            'equationsPrefixes': json.dumps(self.plotted_equations_prefixes)
        }
        return template_specification
Ejemplo n.º 32
0
    def import_surface_zip(user, project, zip_path, surface_type, zero_based):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.zip_surface_importer',
                               'ZIPSurfaceImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
            DataTypeMetaData.KEY_STATE: "RAW"
        }
        args = {
            'uploaded': zip_path,
            'surface_type': surface_type,
            'zero_based_triangles': zero_based
        }

        ### Launch Operation
        FlowService().fire_operation(importer, user, project.id, **args)
Ejemplo n.º 33
0
 def setUp(self):
     """
     This method sets up the necessary paths for testing.
     """
     self.folder_path = os.path.dirname(__file__)
     #tvb_test path
     core_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
     self.old_path = cfg.CURRENT_DIR
     cfg.CURRENT_DIR = os.path.dirname(core_path)
     adapters_init.__xml_folders__ = [os.path.join('core', 'adapters')]
     self.introspector = Introspector("tvb_test")
     self.introspector.introspect(True)
     xml_group_path = os.path.join('core', 'adapters', "test_group.xml")
     algo_group = dao.find_group('tvb_test.adapters.testgroupadapter',
                                 'TestGroupAdapter', xml_group_path)
     self.xml_group_adapter = ABCAdapter.build_adapter(algo_group)
     self.xml_group_reader = self.xml_group_adapter.xml_reader
Ejemplo n.º 34
0
 def prepare_adapter(self, project_id, stored_adapter):
     """
     Having a  StoredAdapter, return the Tree Adapter Interface object, populated with datatypes from 'project_id'.
     """
     adapter_module = stored_adapter.module
     adapter_name = stored_adapter.classname
     try:
         # Prepare Adapter Interface, by populating with existent data,
         # in case of a parameter of type DataType.
         adapter_instance = ABCAdapter.build_adapter(stored_adapter)
         interface = adapter_instance.get_input_tree()
         interface = self.input_tree_manager.fill_input_tree_with_options(interface, project_id, stored_adapter.fk_category)
         interface = self.input_tree_manager.prepare_param_names(interface)
         return interface
     except Exception:
         self.logger.exception('Not found:' + adapter_name + ' in:' + adapter_module)
         raise OperationException("Could not prepare " + adapter_name)
Ejemplo n.º 35
0
    def get_node_matrix(self, datatype_group_gid):

        algorithm = self.flow_service.get_algorithm_by_module_and_class(ISOCLINE_PSE_ADAPTER_MODULE,
                                                                        ISOCLINE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
                return adapter.prepare_node_data(datatype_group)
            except LaunchException as ex:
                self.logger.error(ex.message)
                error_msg = urllib.quote(ex.message)
        else:
            error_msg = urllib.quote("Isocline PSE requires a 2D range of floating point values.")

        name = urllib.quote(adapter._ui_name)
        raise cherrypy.HTTPRedirect(REDIRECT_MSG % (name, error_msg))
Ejemplo n.º 36
0
    def _import(self,
                import_file_path=None,
                expected_result_class=StructuralMRIIndex,
                connectivity_gid=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        # Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')

        form = NIFTIImporterForm()
        form.fill_from_post({
            'data_file':
            Part(import_file_path, HeaderMap({}), ''),
            'apply_corrections':
            'True',
            'connectivity':
            connectivity_gid,
            'mappings_file':
            Part(self.TXT_FILE, HeaderMap({}), ''),
            'Data_Subject':
            'bla bla'
        })
        form.data_file.data = import_file_path
        form.mappings_file.data = self.TXT_FILE
        view_model = form.get_view_model()()
        view_model.data_subject = 'bla bla'
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer,
                                     self.test_user,
                                     self.test_project.id,
                                     view_model=view_model)

        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                expected_result_class, None)
        assert 1, count == "Project should contain only one data type."

        result = ABCAdapter.load_entity_by_gid(dts[0][2])
        assert result is not None, "Result should not be none"
        return result
 def submit_noise_configuration(self):
     """
     Collects the model parameters values from all the models used for the connectivity nodes.
     """
     context_noise_config = base.get_from_session(KEY_CONTEXT_NC)
     burst_configuration = base.get_from_session(base.KEY_BURST_CONFIG)
     _, simulator_group = FlowService().get_algorithm_by_module_and_class(
         SIMULATOR_MODULE, SIMULATOR_CLASS)
     simulator_adapter = ABCAdapter.build_adapter(simulator_group)
     for param_name in simulator_adapter.noise_configurable_parameters():
         burst_configuration.update_simulation_parameter(
             param_name, str(context_noise_config.noise_values))
     ### Clean from session drawing context
     base.remove_from_session(KEY_CONTEXT_NC)
     ### Update in session BURST configuration for burst-page.
     base.add2session(base.KEY_BURST_CONFIG, burst_configuration.clone())
     raise cherrypy.HTTPRedirect("/burst/")
Ejemplo n.º 38
0
    def step_2(self):
        """
        Generate the required template dictionary for the second step.
        """
        current_region_stimulus = common.get_from_session(KEY_REGION_STIMULUS)
        region_stim_selector_form = StimulusRegionSelectorForm(
            common.get_current_project().id)
        region_stim_selector_form.region_stimulus.data = current_region_stimulus.gid.hex
        region_stim_selector_form.display_name.data = common.get_from_session(
            KEY_REGION_STIMULUS_NAME)

        template_specification = dict(
            title="Spatio temporal - Region stimulus")
        template_specification[
            'mainContent'] = 'spatial/stimulus_region_step2_main'
        template_specification[
            'next_step_url'] = '/spatial/stimulus/region/step_2_submit'
        template_specification[
            'regionStimSelectorForm'] = self.render_adapter_form(
                region_stim_selector_form)

        default_weights = current_region_stimulus.weight
        if len(default_weights) == 0:
            selected_connectivity = ABCAdapter.load_entity_by_gid(
                current_region_stimulus.connectivity.hex)
            if selected_connectivity is None:
                common.set_error_message(self.MSG_MISSING_CONNECTIVITY)
                default_weights = numpy.array([])
            else:
                default_weights = StimuliRegion.get_default_weights(
                    selected_connectivity.number_of_regions)

        template_specification['baseUrl'] = self.base_url
        self.plotted_equation_prefixes = {
            self.DISPLAY_NAME_FIELD:
            region_stim_selector_form.display_name.name
        }
        template_specification['fieldsWithEvents'] = json.dumps(
            self.plotted_equation_prefixes)
        template_specification['node_weights'] = json.dumps(
            default_weights.tolist())
        template_specification[common.KEY_PARAMETERS_CONFIG] = False
        template_specification.update(
            self.display_connectivity(
                current_region_stimulus.connectivity.hex))
        return self.fill_default_attributes(template_specification)
Ejemplo n.º 39
0
    def test_h5_import(self):
        """
            This method tests import of TVB data in h5 format. Single data type / import
        """
        self._import(self.h5_file_path)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id,
            self.datatype.module + "." + self.datatype.type)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        data_type_entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(data_type_entity is not None,
                        "Datatype should not be none")
        self.assertEqual(self.datatype.gid, data_type_entity.gid,
                         "Imported datatype should have the same gid")
Ejemplo n.º 40
0
    def launch_operation(self, operation_id, send_to_cluster=False, adapter_instance=None):
        """
        Method exposed for Burst-Workflow related calls.
        It is used for cascading operation in the same workflow.
        """
        if operation_id is not None:
            operation = dao.get_operation_by_id(operation_id)
            if adapter_instance is None:
                algorithm = operation.algorithm
                group = dao.get_algo_group_by_id(algorithm.fk_algo_group)
                adapter_instance = ABCAdapter.build_adapter(group)
            PARAMS = parse_json_parameters(operation.parameters)

            if send_to_cluster:
                self._send_to_cluster([operation], adapter_instance)
            else:
                self.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        args = {'data_file': import_file_path,
                "surface_type": FACE,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, FaceSurface)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface
Ejemplo n.º 42
0
    def fill_from_trait(self, trait):
        super(SpatialAverageMonitorForm, self).fill_from_trait(trait)
        connectivity_index = ABCAdapter.load_entity_by_gid(
            self.session_stored_simulator.connectivity)

        if self.session_stored_simulator.is_surface_simulation is False:
            self.default_mask.choices.pop(SpatialAverage.REGION_MAPPING)

            if connectivity_index.has_cortical_mask is False:
                self.default_mask.choices.pop(SpatialAverage.CORTICAL)

            if connectivity_index.has_hemispheres_mask is False:
                self.default_mask.choices.pop(SpatialAverage.HEMISPHERES)

        else:
            self.default_mask.data = SpatialAverage.REGION_MAPPING
            self.default_mask.disabled = True
Ejemplo n.º 43
0
 def launch_visualization(visualization,
                          frame_width=None,
                          frame_height=None,
                          method_name=ABCAdapter.LAUNCH_METHOD,
                          is_preview=True):
     """
     :param visualization: a visualization workflow step
     """
     dynamic_params = visualization.dynamic_param
     static_params = visualization.static_param
     parameters_dict = static_params
     current_project_id = 0
     ## Current operation id needed for export mechanism. So far just use ##
     ## the operation of the workflow_step from which the inputs are taken    ####
     for param in dynamic_params:
         step_index = dynamic_params[param][
             WorkflowStepConfiguration.STEP_INDEX_KEY]
         datatype_index = dynamic_params[param][
             WorkflowStepConfiguration.DATATYPE_INDEX_KEY]
         referred_workflow_step = dao.get_workflow_step_by_step_index(
             visualization.fk_workflow, step_index)
         referred_operation_id = referred_workflow_step.fk_operation
         referred_operation = dao.get_operation_by_id(referred_operation_id)
         current_project_id = referred_operation.fk_launched_in
         if type(datatype_index) is IntType:
             ## Entry is the output of a previous step ##
             datatypes = dao.get_results_for_operation(
                 referred_operation_id)
             parameters_dict[param] = datatypes[datatype_index].gid
         else:
             ## Entry is the input of a previous step ###
             parameters_dict[param] = json.loads(
                 referred_operation.parameters)[datatype_index]
     algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm)
     adapter_instance = ABCAdapter.build_adapter(algorithm.algo_group)
     adapter_instance.current_project_id = current_project_id
     prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict)
     if frame_width is not None:
         prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width,
                                                            frame_height)
     if isinstance(adapter_instance,
                   ABCMPLH5Displayer) and is_preview is True:
         prepared_inputs[ABCMPLH5Displayer.SHOW_FULL_TOOLBAR] = False
     result = eval("adapter_instance." + method_name +
                   "(**prepared_inputs)")
     return result, parameters_dict
Ejemplo n.º 44
0
    def _get_new_form_view_model(operation, xml_parameters):
        # type (Operation) -> ViewModel
        ad = ABCAdapter.build_adapter(operation.algorithm)
        view_model = ad.get_view_model_class()()

        if xml_parameters:
            params = json.loads(xml_parameters)
            declarative_attrs = type(view_model).declarative_attrs

            for param in params:
                new_param_name = param
                if param[0] == "_":
                    new_param_name = param[1:]
                new_param_name = new_param_name.lower()
                if new_param_name in declarative_attrs:
                    setattr(view_model, new_param_name, params[param])
        return view_model
Ejemplo n.º 45
0
    def build(test_user, test_project):
        view_model = BaseBCTModel()
        view_model.connectivity = get_filtered_datatypes(test_project.id, ConnectivityIndex, page_size=1)[0][0][2]

        adapter = ABCAdapter.build_adapter_from_class(TransitivityBinaryDirected)
        op = OperationService().fire_operation(adapter, test_user, test_project.id, view_model=view_model)
        # wait for the operation to finish
        tries = 5
        while not op.has_finished and tries > 0:
            sleep(5)
            tries = tries - 1
            op = dao.get_operation_by_id(op.id)

        value_wrapper = try_get_last_datatype(test_project.id, ValueWrapperIndex)
        count = dao.count_datatypes(test_project.id, ValueWrapperIndex)
        assert 1 == count
        return value_wrapper
Ejemplo n.º 46
0
    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return review_operation_inputs_from_adapter(adapter, operation)

        except Exception:
            self.logger.exception("Could not load details for operation %s" % operation_gid)
            if operation.view_model_gid:
                changed_parameters = dict(Warning="Algorithm changed dramatically. We can not offer more details")
            else:
                changed_parameters = dict(Warning="GID parameter is missing. Old implementation of the operation.")
            return [], changed_parameters
Ejemplo n.º 47
0
    def get_template_for_adapter(self,
                                 project_id,
                                 step_key,
                                 algo_group,
                                 submit_url,
                                 session_reset=True):
        """ Get Input HTML Interface template or a given adapter """
        try:
            if session_reset:
                self.context.clean_from_session()

            group = None
            # Cache some values in session, for performance
            previous_tree = self.context.get_current_input_tree()
            previous_sub_step = self.context.get_current_substep()
            if not session_reset and previous_tree is not None and previous_sub_step == algo_group.id:
                adapter_interface = previous_tree
            else:
                group, adapter_interface = self.flow_service.prepare_adapter(
                    project_id, algo_group)
                self.context.add_adapter_to_session(algo_group,
                                                    adapter_interface)

            category = self.flow_service.get_category_by_id(step_key)
            title = "Fill parameters for step " + category.displayname.lower()
            if group:
                title = title + " - " + group.displayname

            current_defaults = self.context.get_current_default()
            if current_defaults is not None:
                #Change default values in tree, according to selected input
                adapter_interface = ABCAdapter.fill_defaults(
                    adapter_interface, current_defaults)

            template_specification = dict(submitLink=submit_url,
                                          inputList=adapter_interface,
                                          title=title)
            self._populate_section(algo_group, template_specification)
            return template_specification
        except OperationException, oexc:
            self.logger.error("Inconsistent Adapter")
            self.logger.exception(oexc)
            base.set_warning_message(
                'Inconsistent Adapter!  Please review the link (development problem)!'
            )
Ejemplo n.º 48
0
    def build_portlet_interface(self, portlet_configuration, project_id):
        """
        From a portlet_id and a project_id, first build the portlet
        entity then get it's configurable interface. 
        
        :param portlet_configuration: a portlet configuration entity. It holds at the
            least the portlet_id, and in case any default parameters were saved
            they can be rebuilt from the analyzers // visualizer parameters
        :param project_id: the id of the current project   
            
        :returns: the portlet interface will be of the following form::
            [{'interface': adapter_interface, 
            'prefix': prefix_for_parameter_names, 
            'subalg': {algorithm_field_name: default_algorithm_value},
            'algo_group': algorithm_group,
            'alg_ui_name': displayname},
            ......]
            A list of dictionaries for each adapter that makes up the portlet.
            
        """
        portlet_entity = dao.get_portlet_by_id(
            portlet_configuration.portlet_id)
        if portlet_entity is None:
            raise InvalidPortletConfiguration(
                "No portlet entity located in database with id=%s. "
                "Portlet configuration %s is not valid." %
                (portlet_configuration.portlet_id, portlet_configuration))
        portlet_configurer = PortletConfigurer(portlet_entity)
        portlet_interface = portlet_configurer.get_configurable_interface()
        self.logger.debug("Created interface for portlet " +
                          str([portlet_entity]))

        for adapter_conf in portlet_interface:
            interface = adapter_conf.interface
            interface = FlowService().prepare_parameters(
                interface, project_id, adapter_conf.group.fk_category)
            interface = ABCAdapter.prepare_param_names(interface,
                                                       adapter_conf.prefix)
            adapter_conf.interface = interface

        portlet_configurer.update_default_values(portlet_interface,
                                                 portlet_configuration)
        portlet_configurer.prefix_adapters_parameters(portlet_interface)

        return portlet_interface
Ejemplo n.º 49
0
    def get_configurable_interface(self):
        """
        Given an algorithm identifier, go trough the adapter chain, and merge
        their input tree with the declared overwrites 
        """
        chain_adapters = self.reader.get_adapters_chain(self.algo_identifier)
        result = []
        for adapter_declaration in chain_adapters:

            adapter_instance, algorithm_group = self.build_adapter_from_declaration(
                adapter_declaration)

            algorithm_field = adapter_declaration[KEY_FIELD]
            if algorithm_field:
                default_algorithm = adapter_declaration[ABCAdapter.KEY_DEFAULT]
            else:
                default_algorithm = ''

            all_portlet_defined_params = self.reader.get_inputs(
                self.algo_identifier)
            specific_adapter_overwrites = [
                entry for entry in all_portlet_defined_params
                if ATT_OVERWRITE in entry and entry[ATT_OVERWRITE] ==
                adapter_declaration[ABCAdapter.KEY_NAME]
            ]

            if default_algorithm:
                alg_inputs = adapter_instance.xml_reader.get_inputs(
                    default_algorithm)
                prefix = ABCAdapter.form_prefix(algorithm_field, None,
                                                default_algorithm)
            else:
                alg_inputs = adapter_instance.get_input_tree()
                prefix = ''

            replace_values = self._prepare_input_tree(
                alg_inputs, specific_adapter_overwrites, prefix)
            adapter_configuration = AdapterConfiguration(
                replace_values,
                algorithm_group,
                prefix=prefix,
                subalgorithm_field=algorithm_field,
                subalgorithm_value=default_algorithm)
            result.append(adapter_configuration)
        return result
    def load_surface_stimulus(self, surface_stimulus_gid, from_step):
        """
        Loads the interface for the selected surface stimulus.
        """
        surface_stim_index = ABCAdapter.load_entity_by_gid(
            surface_stimulus_gid)
        surface_stim_h5_path = h5.path_for_stored_index(surface_stim_index)
        existent_surface_stim = SurfaceStimulusCreatorModel()
        with StimuliSurfaceH5(surface_stim_h5_path) as surface_stim_h5:
            surface_stim_h5.load_into(existent_surface_stim)

        existent_surface_stim.surface = uuid.UUID(
            surface_stim_index.fk_surface_gid)

        common.add2session(KEY_SURFACE_STIMULI, existent_surface_stim)
        common.add2session(KEY_SURFACE_STIMULI_NAME,
                           surface_stim_index.user_tag_1)
        return self.do_step(from_step)
Ejemplo n.º 51
0
    def draw_isocline_exploration(self, datatype_group_gid):

        algorithm = self.flow_service.get_algorithm_by_module_and_class(
            IntrospectionRegistry.ISOCLINE_PSE_ADAPTER_MODULE,
            IntrospectionRegistry.ISOCLINE_PSE_ADAPTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        if self._is_compatible(algorithm, datatype_group_gid):
            try:
                return adapter.burst_preview(datatype_group_gid)
            except LaunchException as ex:
                self.logger.error(ex.message)
                error_msg = urllib.parse.quote(ex.message)
        else:
            error_msg = urllib.parse.quote(
                "Isocline PSE requires a 2D range of floating point values.")

        name = urllib.parse.quote(adapter._ui_name)
        raise cherrypy.HTTPRedirect(REDIRECT_MSG % (name, error_msg))
Ejemplo n.º 52
0
    def _import(self, import_file_name):
        ### Retrieve Adapter instance
        group = dao.find_group(
            'tvb.adapters.uploaders.connectivity_measure_importer',
            'ConnectivityMeasureImporter')
        importer = ABCAdapter.build_adapter(group)
        path = os.path.join(os.path.dirname(test_data.__file__),
                            import_file_name)

        args = {
            'data_file': path,
            'connectivity': self.connectivity.gid,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
Ejemplo n.º 53
0
    def display_connectivity(connectivity_gid):
        """
        Generates the html for displaying the connectivity matrix.
        """
        connectivity = ABCAdapter.load_entity_by_gid(connectivity_gid)
        if connectivity is None:
            raise MissingDataException(
                RegionStimulusController.MSG_MISSING_CONNECTIVITY + "!!")
        current_project = common.get_current_project()
        conn_path = FilesHelper().get_project_folder(
            current_project, str(connectivity.fk_from_operation))
        connectivity_viewer_params = ConnectivityViewer.get_connectivity_parameters(
            connectivity, conn_path)

        template_specification = dict()
        template_specification['isSingleMode'] = True
        template_specification.update(connectivity_viewer_params)
        return template_specification
Ejemplo n.º 54
0
    def create_stimulus(self):
        """
        Creates a stimulus from the given data.
        """
        try:
            current_surface_stim = common.get_from_session(KEY_SURFACE_STIMULI)
            surface_stimulus_creator = ABCAdapter.build_adapter_from_class(SurfaceStimulusCreator)
            self.flow_service.fire_operation(surface_stimulus_creator, common.get_logged_user(),
                                             common.get_current_project().id, view_model=current_surface_stim)
            common.set_important_message("The operation for creating the stimulus was successfully launched.")

        except (NameError, ValueError, SyntaxError):
            common.set_error_message("The operation failed due to invalid parameter input.")
            return False
        except Exception as ex:
            common.set_error_message(ex)
            return False
        return True
    def _get_entity(self, expected_data, filters=None):
        """
        Checks there is exactly one datatype with required specifications and returns it

        :param expected_data: a class whose entity is to be returned
        :param filters: optional, the returned entity will also have the required filters
        :return: an object of class `expected_data`
        """
        dt_full_name = expected_data.__module__ + "." + expected_data.__name__
        data_types = FlowService().get_available_datatypes(
            self.test_project.id, dt_full_name, filters)[0]
        self.assertEqual(
            1, len(data_types), "Project should contain only one data type:" +
            str(expected_data.type))

        entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(entity is not None, "Instance should not be none")
        return entity
Ejemplo n.º 56
0
def import_h5(file_path, project_id):
    service = OperationService()

    # This ID of a project needs to exists in Db, and it can be taken from the WebInterface:
    project = dao.get_project_by_id(project_id)

    adapter_instance = ABCAdapter.build_adapter_from_class(TVBImporter)

    # Prepare the input algorithms as if they were coming from web UI submit:
    launch_args = {"data_file": file_path}

    print("We will try to import file at path " + file_path)
    # launch an operation and have the results stored both in DB and on disk
    launched_operations = service.fire_operation(adapter_instance,
                                                 project.administrator,
                                                 project.id, **launch_args)

    print("Operation launched. Check the web UI")
Ejemplo n.º 57
0
    def _prelaunch(self,
                   operation,
                   view_model,
                   uid=None,
                   available_disk_space=0):
        """
        Before going with the usual prelaunch, get from input parameters the 'subject'.
        """
        self.generic_attributes.subject = view_model.data_subject

        trait_upload_field_names = list(
            self.get_form_class().get_upload_information().keys())
        if view_model.encrypted_aes_key is not None:
            for upload_field_name in trait_upload_field_names:
                self._decrypt_content(view_model, upload_field_name)

        return ABCAdapter._prelaunch(self, operation, view_model, uid,
                                     available_disk_space)
Ejemplo n.º 58
0
    def display_connectivity(connectivity_gid):
        """
        Generates the html for displaying the connectivity matrix.
        """
        connectivity = ABCAdapter.load_entity_by_gid(connectivity_gid)

        current_project = common.get_current_project()
        file_handler = FilesHelper()
        conn_path = file_handler.get_project_folder(
            current_project, str(connectivity.fk_from_operation))

        connectivity_viewer_params = ConnectivityViewer.get_connectivity_parameters(
            connectivity, conn_path)

        template_specification = dict()
        template_specification['isSingleMode'] = True
        template_specification.update(connectivity_viewer_params)
        return template_specification
Ejemplo n.º 59
0
    def start_operation(operation_id):
        LOGGER.info("Start processing operation id:{}".format(operation_id))

        operation = dao.get_operation_by_id(operation_id)
        operation.queue_full = False
        dao.store_entity(operation)

        thread = OperationExecutor(operation_id)
        CURRENT_ACTIVE_THREADS.append(thread)

        adapter_instance = ABCAdapter.build_adapter(operation.algorithm)
        if adapter_instance.launch_mode is AdapterLaunchModeEnum.SYNC_DIFF_MEM:
            thread.run()
            operation = dao.get_operation_by_id(operation_id)
            if operation.additional_info and operation.status == STATUS_ERROR:
                raise TVBException(operation.additional_info)
        else:
            thread.start()
 def apply_equation(self, **kwargs):
     """
     Applies an equations for computing a model parameter.
     """
     submitted_data = ABCAdapter.collapse_arrays(kwargs, ['model_param'])
     model_param, equation = self._compute_equation(submitted_data)
     context_model_parameters = common.get_from_session(KEY_CONTEXT_MPS)
     context_model_parameters.apply_equation(model_param, equation)
     common.add2session(KEY_CONTEXT_MPS, context_model_parameters)
     template_specification = self.get_surface_model_parameters_data(
         model_param)
     template_specification = self._add_entra_equation_entries(
         template_specification, kwargs['min_x'], kwargs['max_x'])
     template_specification[
         'equationViewerUrl'] = '/spatial/modelparameters/surface/get_equation_chart'
     template_specification['equationsPrefixes'] = json.dumps(
         self.plotted_equations_prefixes)
     return self.fill_default_attributes(template_specification)