Beispiel #1
0
    def compute_data_for_gradient_view(self, local_connectivity_gid,
                                       selected_triangle):
        """
        When the user loads an existent local connectivity and he picks a vertex from the used surface, this
        method computes the data needed for drawing a gradient view corresponding to that vertex.

        Returns a json which contains the data needed for drawing a gradient view for the selected vertex.
        """
        lconn_index = load_entity_by_gid(local_connectivity_gid)
        triangle_index = int(selected_triangle)

        surface_indx = load_entity_by_gid(lconn_index.fk_surface_gid)
        surface_h5 = h5.h5_file_for_index(surface_indx)
        assert isinstance(surface_h5, SurfaceH5)
        vertex_index = int(surface_h5.triangles[triangle_index][0])

        lconn_h5 = h5.h5_file_for_index(lconn_index)
        assert isinstance(lconn_h5, LocalConnectivityH5)
        lconn_matrix = lconn_h5.matrix.load()
        picked_data = list(lconn_matrix[vertex_index].toarray().squeeze())
        lconn_h5.close()

        result = []
        number_of_split_slices = surface_h5.number_of_split_slices.load()
        if number_of_split_slices <= 1:
            result.append(picked_data)
        else:
            for slice_number in range(number_of_split_slices):
                start_idx, end_idx = surface_h5.get_slice_vertex_boundaries(
                    slice_number)
                result.append(picked_data[start_idx:end_idx])

        surface_h5.close()
        result = {'data': json.dumps(result)}
        return result
Beispiel #2
0
    def launch(self, view_model):
        # type: (TractViewerModel) -> dict
        tracts_index = load.load_entity_by_gid(view_model.tracts)
        region_volume_mapping_index = load.load_entity_by_gid(
            tracts_index.fk_region_volume_map_gid)

        shell_surface_index = None
        if view_model.shell_surface:
            shell_surface_index = self.load_entity_by_gid(
                view_model.shell_surface)

        shell_surface_index = ensure_shell_surface(self.current_project_id,
                                                   shell_surface_index, FACE)

        tracts_starts = URLGenerator.build_h5_url(tracts_index.gid,
                                                  'get_line_starts')
        tracts_vertices = URLGenerator.build_binary_datatype_attribute_url(
            tracts_index.gid, 'get_vertices')

        params = dict(title="Tract Visualizer",
                      shellObject=self.prepare_shell_surface_params(
                          shell_surface_index, SurfaceURLGenerator),
                      urlTrackStarts=tracts_starts,
                      urlTrackVertices=tracts_vertices)

        connectivity = self.load_traited_by_gid(
            region_volume_mapping_index.fk_connectivity_gid)
        params.update(
            self.build_params_for_selectable_connectivity(connectivity))

        return self.build_display_result(
            "tract/tract_view",
            params,
            pages={"controlPage": "tract/tract_viewer_controls"})
Beispiel #3
0
    def index(self):
        current_user_id = self.simulator_context.logged_user.id
        # In case the number of dynamics gets big we should add a filter in the ui.
        dynamics = dao.get_dynamics_for_user(current_user_id)

        if not dynamics:
            return self.no_dynamics_page()

        sim_config = self.simulator_context.simulator
        connectivity = sim_config.connectivity

        if connectivity is None:
            msg = 'You have to select a connectivity before setting up the region Model. '
            common.set_error_message(msg)
            raise ValueError(msg)

        current_project = common.get_current_project()
        file_handler = FilesHelper()
        conn_idx = load.load_entity_by_gid(connectivity)
        conn_path = file_handler.get_project_folder(current_project, str(conn_idx.fk_from_operation))

        params = ConnectivityViewer.get_connectivity_parameters(conn_idx, conn_path)
        burst_config = self.simulator_context.burst_config

        params.update({
            'title': 'Model parameters',
            'mainContent': 'burst/model_param_region',
            'isSingleMode': True,
            'submit_parameters_url': '/burst/modelparameters/regions/submit_model_parameters',
            'dynamics': dynamics,
            'dynamics_json': self._dynamics_json(dynamics),
            'initial_dynamic_ids': burst_config.dynamic_ids
        })

        return self.fill_default_attributes(params, 'regionmodel')
Beispiel #4
0
    def launch(self, view_model):
        # type: (ConnectivityViewerModel) -> dict
        """
        Given the input connectivity data and the surface data, 
        build the HTML response to be displayed.
        """
        connectivity, colors, rays = self._load_input_data(view_model)

        global_params, global_pages = self._compute_connectivity_global_params(
            connectivity)
        if view_model.surface_data is not None:
            surface_index = load_entity_by_gid(view_model.surface_data.hex)
            surface_h5 = h5.h5_file_for_index(surface_index)
            url_vertices, url_normals, _, url_triangles, _ = SurfaceURLGenerator.get_urls_for_rendering(
                surface_h5)
        else:
            url_vertices, url_normals, url_triangles = [], [], []

        global_params["urlVertices"] = json.dumps(url_vertices)
        global_params["urlTriangles"] = json.dumps(url_triangles)
        global_params["urlNormals"] = json.dumps(url_normals)
        global_params['isSingleMode'] = False

        result_params, result_pages = Connectivity2DViewer(
        ).compute_parameters(connectivity, colors, rays, view_model.step)
        result_params.update(global_params)
        result_pages.update(global_pages)
        _params, _pages = Connectivity3DViewer().compute_parameters(
            connectivity, colors, rays)
        result_params.update(_params)
        result_pages.update(_pages)

        return self.build_display_result("connectivity/main_connectivity",
                                         result_params, result_pages)
    def display_surface(surface_gid, region_mapping_gid=None):
        """
        Generates the HTML for displaying the surface with the given ID.
        """
        surface = load_entity_by_gid(surface_gid)
        if surface is None:
            raise MissingDataException(
                SpatioTemporalController.MSG_MISSING_SURFACE + "!!")
        common.add2session(PARAM_SURFACE, surface_gid)
        surface_h5 = h5.h5_file_for_index(surface)
        url_vertices_pick, url_normals_pick, url_triangles_pick = SurfaceURLGenerator.get_urls_for_pick_rendering(
            surface_h5)
        url_vertices, url_normals, _, url_triangles, _ = SurfaceURLGenerator.get_urls_for_rendering(
            surface_h5, region_mapping_gid)
        surface_h5.close()

        return {
            'urlVerticesPick': json.dumps(url_vertices_pick),
            'urlTrianglesPick': json.dumps(url_triangles_pick),
            'urlNormalsPick': json.dumps(url_normals_pick),
            'urlVertices': json.dumps(url_vertices),
            'urlTriangles': json.dumps(url_triangles),
            'urlNormals': json.dumps(url_normals),
            'brainCenter': json.dumps(surface_h5.center())
        }
Beispiel #6
0
    def _convert_to_array(self, input_data, row):
        """
        Method used when the type of an input is array, to parse or read.

        If the user set an equation for computing a model parameter then the
        value of that parameter will be a dictionary which contains all the data
        needed for computing that parameter for each vertex from the used surface.
        """
        if KEY_EQUATION in str(input_data) and KEY_FOCAL_POINTS in str(
                input_data) and KEY_SURFACE_GID in str(input_data):
            try:
                input_data = eval(str(input_data))
                # TODO move at a different level
                equation_type = input_data.get(KEY_DTYPE)
                if equation_type is None:
                    self.log.warning(
                        "Cannot figure out type of equation from input dictionary: %s. "
                        "Returning []." % input_data)
                    return []
                eq_class = get_class_by_name(equation_type)
                equation = eq_class.from_json(input_data[KEY_EQUATION])
                focal_points = json.loads(input_data[KEY_FOCAL_POINTS])
                surface_gid = input_data[KEY_SURFACE_GID]
                surface = load_entity_by_gid(surface_gid)
                return surface.compute_equation(focal_points, equation)
            except Exception:
                self.log.exception(
                    "The parameter %s was ignored. None value was returned.",
                    row['name'])
                return None

        dtype = None
        if KEY_DTYPE in row:
            dtype = row[KEY_DTYPE]
        return string2array(str(input_data), ",", dtype)
Beispiel #7
0
    def launch(self, view_model):
        """
        Method to be called when user submits changes on the
        Connectivity matrix in the Visualizer.
        """
        # note: is_branch is missing instead of false because browsers only send checked boxes in forms.
        original_connectivity_index = load_entity_by_gid(
            view_model.original_connectivity.hex)
        original_conn_ht = h5.load_from_index(original_connectivity_index)
        assert isinstance(original_conn_ht, Connectivity)

        if not view_model.is_branch:
            new_conn_ht = self._cut_connectivity(
                original_conn_ht, view_model.new_weights,
                view_model.interest_area_indexes, view_model.new_tracts)
            return [h5.store_complete(new_conn_ht, self.storage_path)]

        else:
            result = []
            new_conn_ht = self._branch_connectivity(
                original_conn_ht, view_model.new_weights,
                view_model.interest_area_indexes, view_model.new_tracts)
            new_conn_index = h5.store_complete(new_conn_ht, self.storage_path)
            result.append(new_conn_index)
            result.extend(
                self._store_related_region_mappings(
                    view_model.original_connectivity.gid, new_conn_ht))
            return result
Beispiel #8
0
    def _import(self,
                import_file_path=None,
                expected_result_class=StructuralMRIIndex,
                connectivity_gid=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """
        view_model = NIFTIImporterModel()
        view_model.data_file = import_file_path
        view_model.mappings_file = self.TXT_FILE
        view_model.apply_corrections = True
        view_model.connectivity = connectivity_gid
        view_model.data_subject = "Bla Bla"

        TestFactory.launch_importer(NIFTIImporter, view_model, self.test_user,
                                    self.test_project, False)

        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                expected_result_class, None)
        assert 1, count == "Project should contain only one data type."

        result = load_entity_by_gid(dts[0][2])
        assert result is not None, "Result should not be none"
        return result
Beispiel #9
0
    def _convert_to_array(self, input_data, row):
        """
        Method used when the type of an input is array, to parse or read.

        If the user set an equation for computing a model parameter then the
        value of that parameter will be a dictionary which contains all the data
        needed for computing that parameter for each vertex from the used surface.
        """
        if KEY_EQUATION in str(input_data) and KEY_FOCAL_POINTS in str(input_data) and KEY_SURFACE_GID in str(input_data):
            try:
                input_data = eval(str(input_data))
                # TODO move at a different level
                equation_type = input_data.get(KEY_DTYPE)
                if equation_type is None:
                    self.log.warning("Cannot figure out type of equation from input dictionary: %s. "
                                     "Returning []." % input_data)
                    return []
                eq_class = get_class_by_name(equation_type)
                equation = eq_class.from_json(input_data[KEY_EQUATION])
                focal_points = json.loads(input_data[KEY_FOCAL_POINTS])
                surface_gid = input_data[KEY_SURFACE_GID]
                surface = load_entity_by_gid(surface_gid)
                return surface.compute_equation(focal_points, equation)
            except Exception:
                self.log.exception("The parameter %s was ignored. None value was returned.", row['name'])
                return None

        dtype = None
        if KEY_DTYPE in row:
            dtype = row[KEY_DTYPE]
        return string2array(str(input_data), ",", dtype)
Beispiel #10
0
 def load_entity_by_gid(data_gid):
     """
     Load a generic DataType, specified by GID.
     """
     if isinstance(data_gid, uuid.UUID):
         data_gid = data_gid.hex
     return load_entity_by_gid(data_gid)
Beispiel #11
0
def gather_all_references_of_view_model(gid, base_dir, ref_files):
    vm_path = determine_filepath(gid, base_dir)
    ref_files.append(vm_path)
    view_model_class = H5File.determine_type(vm_path)
    view_model = view_model_class()

    with ViewModelH5(vm_path, view_model) as vm_h5:
        references = vm_h5.gather_references()
        uuids = vm_h5.gather_references_by_uuid()

        for _, gid in references:
            if not gid:
                continue
            if isinstance(gid, (list, tuple)):
                for list_gid in gid:
                    gather_all_references_of_view_model(
                        list_gid, base_dir, ref_files)
            else:
                gather_all_references_of_view_model(gid, base_dir, ref_files)

        uuid_files = []
        for _, gid in uuids:
            if not gid:
                continue
            index = load_entity_by_gid(gid.hex)
            h5_file = h5_file_for_index(index)
            uuid_files.append(h5_file.path)
            gather_all_references_by_index(h5_file, uuid_files)
        ref_files.extend(uuid_files)
Beispiel #12
0
    def index(self):
        des = SerializationManager(self.simulator_context.simulator)
        conn_idx = load.load_entity_by_gid(des.conf.connectivity)
        model = des.conf.model
        integrator = des.conf.integrator

        state_vars = model.state_variables
        noise_values = self.init_noise_config_values(model, integrator,
                                                     conn_idx)
        initial_noise = self.group_noise_array_by_state_var(
            noise_values, state_vars, conn_idx.number_of_regions)

        current_project = common.get_current_project()
        file_handler = FilesHelper()
        conn_path = file_handler.get_project_folder(
            current_project, str(conn_idx.fk_from_operation))

        params = ConnectivityViewer.get_connectivity_parameters(
            conn_idx, conn_path)
        params.update({
            'title': 'Noise configuration',
            'mainContent': 'burst/noise',
            'isSingleMode': True,
            'submit_parameters_url': '/burst/noise/submit',
            'stateVars': state_vars,
            'stateVarsJson': json.dumps(state_vars),
            'noiseInputValues': initial_noise[0],
            'initialNoiseValues': json.dumps(initial_noise)
        })
        return self.fill_default_attributes(params, 'regionmodel')
Beispiel #13
0
 def step_2(self, **kwargs):
     """
     Generate the html for the second step of the local connectivity page.
     :param kwargs: not actually used, but parameters are still submitted from UI since we just\
            use the same js function for this.
     """
     current_lconn = common.get_from_session(KEY_LCONN)
     left_side_form = LocalConnectivitySelectorForm(
         project_id=common.get_current_project().id)
     left_side_form.existentEntitiesSelect.data = current_lconn.gid.hex
     template_specification = dict(title="Surface - Local Connectivity")
     template_specification[
         'mainContent'] = 'spatial/local_connectivity_step2_main'
     template_specification[
         'existentEntitiesInputList'] = self.render_adapter_form(
             left_side_form)
     template_specification['loadExistentEntityUrl'] = LOAD_EXISTING_URL
     template_specification['resetToDefaultUrl'] = RELOAD_DEFAULT_PAGE_URL
     template_specification[
         'next_step_url'] = '/spatial/localconnectivity/step_1'
     msg, _ = common.get_message_from_session()
     template_specification['displayedMessage'] = msg
     if current_lconn is not None:
         selected_local_conn = load_entity_by_gid(current_lconn.gid)
         template_specification.update(
             self.display_surface(selected_local_conn.fk_surface_gid))
         template_specification['no_local_connectivity'] = False
         template_specification[
             'minValue'] = selected_local_conn.matrix_non_zero_min
         template_specification[
             'maxValue'] = selected_local_conn.matrix_non_zero_max
     else:
         template_specification['no_local_connectivity'] = True
     template_specification[common.KEY_PARAMETERS_CONFIG] = False
     return self.fill_default_attributes(template_specification)
Beispiel #14
0
    def load_local_connectivity(self, local_connectivity_gid, from_step=None):
        """
        Loads an existing local connectivity.
        """
        lconn_index = load_entity_by_gid(local_connectivity_gid)
        existent_lconn = LocalConnectivityCreatorModel()
        lconn_h5_path = h5.path_for_stored_index(lconn_index)
        with LocalConnectivityH5(lconn_h5_path) as lconn_h5:
            lconn_h5.load_into(existent_lconn)

        existent_lconn.surface = uuid.UUID(lconn_index.fk_surface_gid)

        common.add2session(KEY_LCONN, existent_lconn)
        existent_lconn.display_name = lconn_index.user_tag_1

        if existent_lconn.equation:
            msg = "Successfully loaded existent entity gid=%s" % (
                local_connectivity_gid, )
        else:
            msg = "There is no equation specified for this local connectivity. "
            msg += "The default equation is displayed into the spatial field."
        common.set_message(msg)

        if int(from_step) == 1:
            return self.step_1()
        if int(from_step) == 2:
            return self.step_2()
Beispiel #15
0
    def test_import_region_mapping(self):
        """
        This method tests import of a NIFTI file compressed in GZ format.
        """
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_76.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path, "John")
        to_link_conn = TestFactory.get_entity(self.test_project,
                                              ConnectivityIndex)

        mapping_index = self._import(self.GZ_NII_FILE,
                                     RegionVolumeMappingIndex,
                                     to_link_conn.gid)
        mapping = h5.load_from_index(mapping_index)

        assert -1 <= mapping.array_data.min()
        assert mapping.array_data.max() < to_link_conn.number_of_regions
        assert to_link_conn.gid == mapping_index.fk_connectivity_gid

        volume_index = load_entity_by_gid(mapping_index.fk_volume_gid)
        assert volume_index is not None

        volume = h5.load_from_index(volume_index)
        assert numpy.equal(self.DEFAULT_ORIGIN, volume.origin).all()
        assert numpy.equal([3.0, 3.0, 3.0], volume.voxel_size).all()
        assert self.UNKNOWN_STR == volume.voxel_unit
Beispiel #16
0
    def store_datatype(self, datatype, current_file=None):
        """This method stores data type into DB"""
        try:
            self.logger.debug("Store datatype: %s with Gid: %s" %
                              (datatype.__class__.__name__, datatype.gid))
            # Now move storage file into correct folder if necessary
            if current_file is not None:
                final_path = h5.path_for_stored_index(datatype)
                if final_path != current_file:
                    shutil.move(current_file, final_path)
            stored_entry = load.load_entity_by_gid(datatype.gid)
            if not stored_entry:
                stored_entry = dao.store_entity(datatype)

            return stored_entry
        except MissingDataSetException as e:
            self.logger.exception(e)
            error_msg = "Datatype %s has missing data and could not be imported properly." % (
                datatype, )
            raise ImportException(error_msg)
        except IntegrityError as excep:
            self.logger.exception(excep)
            error_msg = "Could not import data with gid: %s. There is already a one with " \
                        "the same name or gid." % datatype.gid
            raise ImportException(error_msg)
Beispiel #17
0
 def load_traited_by_gid(data_gid):
     # type: (uuid.UUID) -> HasTraits
     """
     Load a generic HasTraits instance, specified by GID.
     """
     index = load_entity_by_gid(data_gid.hex)
     return h5.load_from_index(index)
Beispiel #18
0
    def review_operation_inputs(self, parameters, flat_interface):
        """
        Find out which of the submitted parameters are actually DataTypes and
        return a list holding all the dataTypes in parameters.
        :returns: list of dataTypes and changed parameters.
        """
        inputs_datatypes = []
        changed_parameters = dict()

        for field_dict in flat_interface:
            eq_flat_interface_name = self._find_field_submitted_name(
                parameters, field_dict[KEY_NAME])

            if eq_flat_interface_name is not None:
                is_datatype = False
                if field_dict.get(KEY_DATATYPE):
                    eq_datatype = load_entity_by_gid(
                        parameters.get(str(eq_flat_interface_name)))
                    if eq_datatype is not None:
                        inputs_datatypes.append(eq_datatype)
                        is_datatype = True
                elif isinstance(field_dict[KEY_TYPE], basestring):
                    try:
                        class_entity = get_class_by_name(field_dict[KEY_TYPE])
                        if issubclass(class_entity, MappedType):
                            data_gid = parameters.get(str(
                                field_dict[KEY_NAME]))
                            data_type = load_entity_by_gid(data_gid)
                            if data_type:
                                inputs_datatypes.append(data_type)
                                is_datatype = True
                    except ImportError:
                        pass

                if is_datatype:
                    changed_parameters[field_dict[
                        KEY_LABEL]] = inputs_datatypes[-1].display_name
                else:
                    if field_dict[KEY_NAME] in parameters and (
                            KEY_DEFAULT not in field_dict
                            or str(field_dict[KEY_DEFAULT]) != str(
                                parameters[field_dict[KEY_NAME]])):
                        changed_parameters[field_dict[KEY_LABEL]] = str(
                            parameters[field_dict[KEY_NAME]])

        return inputs_datatypes, changed_parameters
Beispiel #19
0
 def set_connectivity(self, **param):
     current_region_stim = common.get_from_session(KEY_REGION_STIMULUS)
     connectivity_form_field = RegionStimulusCreatorForm().connectivity
     connectivity_form_field.fill_from_post(param)
     current_region_stim.connectivity = connectivity_form_field.value
     conn_index = load_entity_by_gid(connectivity_form_field.value)
     current_region_stim.weight = StimuliRegion.get_default_weights(
         conn_index.number_of_regions)
Beispiel #20
0
    def _read_datatype_attribute(self, entity_gid, dataset_name, datatype_kwargs='null', **kwargs):

        self.logger.debug("Starting to read HDF5: " + entity_gid + "/" + dataset_name + "/" + str(kwargs))
        entity = load_entity_by_gid(entity_gid)
        entity_dt = h5.load_from_index(entity)

        datatype_kwargs = json.loads(datatype_kwargs)
        if datatype_kwargs:
            for key, value in six.iteritems(datatype_kwargs):
                kwargs[key] = load_entity_by_gid(value)

        result = getattr(entity_dt, dataset_name)
        if callable(result):
            if kwargs:
                result = result(**kwargs)
            else:
                result = result()
        return result
Beispiel #21
0
    def read_from_h5_file(self, entity_gid, method_name, flatten=False, datatype_kwargs='null', **kwargs):
        self.logger.debug("Starting to read HDF5: " + entity_gid + "/" + method_name + "/" + str(kwargs))
        entity = load_entity_by_gid(entity_gid)
        entity_h5 = h5.h5_file_for_index(entity)

        datatype_kwargs = json.loads(datatype_kwargs)
        if datatype_kwargs:
            for key, value in six.iteritems(datatype_kwargs):
                kwargs[key] = load_entity_by_gid(value)

        result = getattr(entity_h5, method_name)
        if kwargs:
            result = result(**kwargs)
        else:
            result = result()

        entity_h5.close()
        return self._prepare_result(result, flatten)
Beispiel #22
0
def gather_all_references_by_index(h5_file, ref_files):
    refs = h5_file.gather_references()
    for _, gid in refs:
        if not gid:
            continue
        index = load_entity_by_gid(gid)
        h5_file = h5_file_for_index(index)
        ref_files.append(h5_file.path)
        gather_all_references_by_index(h5_file, ref_files)
Beispiel #23
0
def function_sensors_to_surface(sensors_gid, surface_to_map_gid):
    """
    Map EEG sensors onto the head surface (skin-air).

    EEG sensor locations are typically only given on a unit sphere, that is,
    they are effectively only identified by their orientation with respect
    to a coordinate system. This method is used to map these unit vector
    sensor "locations" to a specific location on the surface of the skin.

    Assumes coordinate systems are aligned, i.e. common x,y,z and origin.

    """
    index = load_entity_by_gid(sensors_gid)
    sensors_dt = h5.load_from_index(index)

    index = load_entity_by_gid(surface_to_map_gid)
    surface_dt = h5.load_from_index(index)

    return sensors_dt.sensors_to_surface(surface_dt).tolist()
Beispiel #24
0
 def load_entity_by_gid(self, data_gid):
     # type: (typing.Union[uuid.UUID, str]) -> DataType
     """
     Load a generic DataType, specified by GID.
     """
     idx = load_entity_by_gid(data_gid)
     if idx and self.generic_attributes.parent_burst is None:
         # Only in case the BurstConfiguration references hasn't been set already, take it from the current DT
         self.generic_attributes.parent_burst = idx.fk_parent_burst
     return idx
Beispiel #25
0
    def __gather_datatypes_for_copy(self, data, dt_path_list):
        data_path = h5.path_for_stored_index(data)
        dt_path_list.append(data_path)
        with H5File.from_file(data_path) as f:
            sub_dt_refs = f.gather_references()

            for _, ref_gid in sub_dt_refs:
                if ref_gid:
                    dt = load.load_entity_by_gid(ref_gid)
                    self.__gather_datatypes_for_copy(dt, dt_path_list)
Beispiel #26
0
    def compute_conn_branch_conditions(is_branch, simulator):
        if not is_branch:
            return None

        conn = load.load_entity_by_gid(simulator.connectivity)
        if conn.number_of_regions:
            return FilterChain(
                fields=[FilterChain.datatype + '.number_of_regions'],
                operations=["=="],
                values=[conn.number_of_regions])
    def _import_from_file(self, import_file):
        """
        This method tests import of region mapping from TXT file
        """
        region_mapping_index = TestFactory.import_region_mapping(self.test_user, self.test_project, import_file,
                                                                 self.surface.gid, self.connectivity.gid, False)

        surface_index = load_entity_by_gid(region_mapping_index.fk_surface_gid)
        assert surface_index is not None

        connectivity_index = load_entity_by_gid(region_mapping_index.fk_connectivity_gid)
        assert connectivity_index is not None

        region_mapping = h5.load_from_index(region_mapping_index)

        array_data = region_mapping.array_data
        assert array_data is not None
        assert 16384 == len(array_data)
        assert surface_index.number_of_vertices == len(array_data)
    def review_operation_inputs(self, parameters, flat_interface):
        """
        Find out which of the submitted parameters are actually DataTypes and
        return a list holding all the dataTypes in parameters.
        :returns: list of dataTypes and changed parameters.
        """
        inputs_datatypes = []
        changed_parameters = dict()

        for field_dict in flat_interface:
            eq_flat_interface_name = self._find_field_submitted_name(parameters, field_dict[KEY_NAME])

            if eq_flat_interface_name is not None:
                is_datatype = False
                if field_dict.get(KEY_DATATYPE):
                    eq_datatype = load_entity_by_gid(parameters.get(str(eq_flat_interface_name)))
                    if eq_datatype is not None:
                        inputs_datatypes.append(eq_datatype)
                        is_datatype = True
                elif type(field_dict[KEY_TYPE]) in (str, unicode):
                    point_separator = field_dict[KEY_TYPE].rfind('.')
                    if point_separator > 0:
                        module = field_dict[KEY_TYPE][:point_separator]
                        classname = field_dict[KEY_TYPE][(point_separator + 1):]
                        try:
                            module = __import__(module, [], locals(), globals())
                            class_entity = eval("module." + classname)
                            if issubclass(class_entity, MappedType):
                                data_gid = parameters.get(str(field_dict[KEY_NAME]))
                                data_type = load_entity_by_gid(data_gid)
                                if data_type:
                                    inputs_datatypes.append(data_type)
                                    is_datatype = True
                        except ImportError, _:
                            pass

                if is_datatype:
                    changed_parameters[field_dict[KEY_LABEL]] = inputs_datatypes[-1].display_name
                else:
                    if field_dict[KEY_NAME] in parameters and (KEY_DEFAULT not in field_dict
                                    or str(field_dict[KEY_DEFAULT]) != str(parameters[field_dict[KEY_NAME]])):
                        changed_parameters[field_dict[KEY_LABEL]] = str(parameters[field_dict[KEY_NAME]])
Beispiel #29
0
    def prepare_cortex_fragment(simulator, rendering_rules, form_action_url,
                                project_id):
        surface_index = load_entity_by_gid(simulator.surface.surface_gid)
        form = SimulatorRMFragment(surface_index, simulator.connectivity)
        rm_fragment = AlgorithmService().prepare_adapter_form(
            form_instance=form, project_id=project_id)
        rm_fragment.fill_from_trait(simulator.surface)

        rendering_rules.form = rm_fragment
        rendering_rules.form_action_url = form_action_url
        return rendering_rules.to_dict()
 def invoke_adapter(self, algo_id, method_name, entity_gid, **kwargs):
     algorithm = self.algorithm_service.get_algorithm_by_identifier(algo_id)
     adapter_instance = ABCAdapter.build_adapter(algorithm)
     entity = load_entity_by_gid(entity_gid)
     storage_path = self.files_helper.get_project_folder(
         entity.parent_operation.project, str(entity.fk_from_operation))
     adapter_instance.storage_path = storage_path
     method = getattr(adapter_instance, method_name)
     if kwargs:
         return method(entity_gid, **kwargs)
     return method(entity_gid)
Beispiel #31
0
 def load_dts(vm_h5, ref_files):
     uuids = vm_h5.gather_datatypes_references()
     uuid_files = []
     for _, gid in uuids:
         if not gid:
             continue
         index = load_entity_by_gid(gid)
         h5_file = h5_file_for_index(index)
         uuid_files.append(h5_file.path)
         gather_all_references_by_index(h5_file, uuid_files)
     ref_files.extend(uuid_files)
 def _is_compatible(self, algorithm, datatype_group_gid):
     """
     Check if PSE view filters are compatible with current DataType.
     :param algorithm: Algorithm instance to get filters from it.
     :param datatype_group_gid: Current DataTypeGroup to validate against.
     :returns: True when DataTypeGroup can be displayed with current algorithm, False when incompatible.
     """
     datatype_group = load_entity_by_gid(datatype_group_gid)
     filter_chain = FilterChain.from_json(algorithm.datatype_filter)
     if datatype_group and (not filter_chain or filter_chain.get_python_filter_equivalent(datatype_group)):
         return True
     return False
    def review_operation_inputs(self, parameters, flat_interface):
        """
        Find out which of the submitted parameters are actually DataTypes and
        return a list holding all the dataTypes in parameters.
        :returns: list of dataTypes and changed parameters.
        """
        inputs_datatypes = []
        changed_parameters = dict()

        for field_dict in flat_interface:
            eq_flat_interface_name = self._find_field_submitted_name(parameters, field_dict[KEY_NAME])

            if eq_flat_interface_name is not None:
                is_datatype = False
                if field_dict.get(KEY_DATATYPE):
                    eq_datatype = load_entity_by_gid(parameters.get(str(eq_flat_interface_name)))
                    if eq_datatype is not None:
                        inputs_datatypes.append(eq_datatype)
                        is_datatype = True
                elif isinstance(field_dict[KEY_TYPE], basestring):
                    try:
                        class_entity = get_class_by_name(field_dict[KEY_TYPE])
                        if issubclass(class_entity, MappedType):
                            data_gid = parameters.get(str(field_dict[KEY_NAME]))
                            data_type = load_entity_by_gid(data_gid)
                            if data_type:
                                inputs_datatypes.append(data_type)
                                is_datatype = True
                    except ImportError:
                        pass

                if is_datatype:
                    changed_parameters[field_dict[KEY_LABEL]] = inputs_datatypes[-1].display_name
                else:
                    if field_dict[KEY_NAME] in parameters and (KEY_DEFAULT not in field_dict
                                    or str(field_dict[KEY_DEFAULT]) != str(parameters[field_dict[KEY_NAME]])):
                        changed_parameters[field_dict[KEY_LABEL]] = str(parameters[field_dict[KEY_NAME]])

        return inputs_datatypes, changed_parameters
    def _convert_to_array(self, input_data, row):
        """
        Method used when the type of an input is array, to parse or read.

        If the user set an equation for computing a model parameter then the
        value of that parameter will be a dictionary which contains all the data
        needed for computing that parameter for each vertex from the used surface.
        """
        if KEY_EQUATION in str(input_data) and KEY_FOCAL_POINTS in str(input_data) and KEY_SURFACE_GID in str(input_data):
            try:
                input_data = eval(str(input_data))
                # TODO move at a different level
                equation_type = input_data.get(KEY_DTYPE, None)
                if equation_type is None:
                    self.log.warning("Cannot figure out type of equation from input dictionary: %s. "
                                     "Returning []." % (str(input_data, )))
                    return []
                splitted_class = equation_type.split('.')
                module = '.'.join(splitted_class[:-1])
                classname = splitted_class[-1]
                eq_module = __import__(module, globals(), locals(), [classname])
                eq_class = eval('eq_module.' + classname)
                equation = eq_class.from_json(input_data[KEY_EQUATION])
                focal_points = json.loads(input_data[KEY_FOCAL_POINTS])
                surface_gid = input_data[KEY_SURFACE_GID]
                surface = load_entity_by_gid(surface_gid)
                return surface.compute_equation(focal_points, equation)
            except Exception:
                self.log.exception("The parameter '" + str(row['name']) + "' was ignored. None value was returned.")
                return None

        if xml_reader.ATT_QUATIFIER in row:
            quantifier = row[xml_reader.ATT_QUATIFIER]
            dtype = None
            if KEY_DTYPE in row:
                dtype = row[KEY_DTYPE]
            if quantifier == xml_reader.QUANTIFIER_MANUAL:
                return string2array(str(input_data), ",", dtype)
            elif quantifier == xml_reader.QUANTIFIER_UPLOAD:
                input_str = open(input_data, 'r').read()
                return string2array(input_str, " ", dtype)
            elif quantifier == xml_reader.QUANTIFIER_FUNTION:
                return input_data

        return None
 def load_entity_by_gid(data_gid):
     """
     Load a generic DataType, specified by GID.
     """
     return load_entity_by_gid(data_gid)
Beispiel #36
0
    def _load_entity(self, row, datatype_gid, kwargs, metadata_out):
        """
        Load specific DataType entities, as specified in DATA_TYPE table.
        Check if the GID is for the correct DataType sub-class, otherwise throw an exception.
        Updates metadata_out with the metadata of this entity
        """

        entity = load_entity_by_gid(datatype_gid)
        if entity is None:
            ## Validate required DT one more time, after actual retrieval from DB:
            if row.get(KEY_REQUIRED):
                raise InvalidParameterException("Empty DataType value for required parameter %s [%s]" % (
                    row[KEY_LABEL], row[KEY_NAME]))

            return None

        expected_dt_class = row[KEY_TYPE]
        if isinstance(expected_dt_class, basestring):
            expected_dt_class = get_class_by_name(expected_dt_class)
        if not isinstance(entity, expected_dt_class):
            raise InvalidParameterException("Expected param %s [%s] of type %s but got type %s." % (
                row[KEY_LABEL], row[KEY_NAME], expected_dt_class.__name__, entity.__class__.__name__))

        result = entity

        ## Step 2 of updating Meta-data from parent DataType.
        if entity.fk_parent_burst:
            ## Link just towards the last Burst identified.
            metadata_out[DataTypeMetaData.KEY_BURST] = entity.fk_parent_burst

        if entity.user_tag_1 and DataTypeMetaData.KEY_TAG_1 not in metadata_out:
            metadata_out[DataTypeMetaData.KEY_TAG_1] = entity.user_tag_1

        current_subject = metadata_out[DataTypeMetaData.KEY_SUBJECT]
        if current_subject == DataTypeMetaData.DEFAULT_SUBJECT:
            metadata_out[DataTypeMetaData.KEY_SUBJECT] = entity.subject
        else:
            if entity.subject != current_subject and entity.subject not in current_subject.split(','):
                metadata_out[DataTypeMetaData.KEY_SUBJECT] = current_subject + ',' + entity.subject
        ##  End Step 2 - Meta-data Updates

        ## Validate current entity to be compliant with specified ROW filters.
        dt_filter = row.get(KEY_CONDITION)
        if dt_filter is not None and entity is not None and not dt_filter.get_python_filter_equivalent(entity):
            ## If a filter is declared, check that the submitted DataType is in compliance to it.
            raise InvalidParameterException("Field %s [%s] did not pass filters." % (row[KEY_LABEL],
                                                                                     row[KEY_NAME]))

        # In case a specific field in entity is to be used, use it
        if KEY_FIELD in row:
            # note: this cannot be replaced by getattr(entity, row[KEY_FIELD])
            # at least BCT has 'fields' like scaled_weights()
            result = eval('entity.' + row[KEY_FIELD])
        if ATT_METHOD in row:
            # The 'shape' attribute of an arraywrapper is overridden by us
            # the following check is made only to improve performance
            # (to find data in the dictionary with O(1)) on else the data is found in O(n)
            prefix = row[KEY_NAME] + "_" + row[ATT_PARAMETERS]
            if hasattr(entity, 'shape'):
                param_dict = {}
                for i in xrange(1, len(entity.shape)):
                    param_key = prefix + "_" + str(i - 1)
                    if param_key in kwargs:
                        param_dict[param_key] = kwargs[param_key]
            else:
                param_dict = dict((k, v) for k, v in kwargs.items() if k.startswith(prefix))
            result = getattr(entity, row[ATT_METHOD])(param_dict)
        return result
    def _load_entity(self, row, datatype_gid, kwargs, metadata_out):
        """
        Load specific DataType entities, as specified in DATA_TYPE table.
        Check if the GID is for the correct DataType sub-class, otherwise throw an exception.
        Updates metadata_out with the metadata of this entity
        """

        entity = load_entity_by_gid(datatype_gid)
        if entity is None:
            ## Validate required DT one more time, after actual retrieval from DB:
            if row.get(xml_reader.ATT_REQUIRED, False):
                raise InvalidParameterException("Empty DataType value for required parameter %s [%s]" % (
                    row[KEY_LABEL], row[KEY_NAME]))

            return None

        expected_dt_class = row[KEY_TYPE]
        if isinstance(expected_dt_class, (str, unicode)):
            classname = expected_dt_class.split('.')[-1]
            data_class = __import__(expected_dt_class.replace(classname, ''), globals(), locals(), [classname])
            data_class = eval("data_class." + classname)
            expected_dt_class = data_class
        if not isinstance(entity, expected_dt_class):
            raise InvalidParameterException("Expected param %s [%s] of type %s but got type %s." % (
                row[KEY_LABEL], row[KEY_NAME], expected_dt_class.__name__, entity.__class__.__name__))

        result = entity

        ## Step 2 of updating Meta-data from parent DataType.
        if entity.fk_parent_burst:
            ## Link just towards the last Burst identified.
            metadata_out[DataTypeMetaData.KEY_BURST] = entity.fk_parent_burst

        if entity.user_tag_1 and DataTypeMetaData.KEY_TAG_1 not in metadata_out:
            metadata_out[DataTypeMetaData.KEY_TAG_1] = entity.user_tag_1

        current_subject = metadata_out[DataTypeMetaData.KEY_SUBJECT]
        if current_subject == DataTypeMetaData.DEFAULT_SUBJECT:
            metadata_out[DataTypeMetaData.KEY_SUBJECT] = entity.subject
        else:
            if entity.subject != current_subject and entity.subject not in current_subject.split(','):
                metadata_out[DataTypeMetaData.KEY_SUBJECT] = current_subject + ',' + entity.subject
        ##  End Step 2 - Meta-data Updates

        ## Validate current entity to be compliant with specified ROW filters.
        dt_filter = row.get(xml_reader.ELEM_CONDITIONS, False)
        if (dt_filter is not None) and (dt_filter is not False) and \
                (entity is not None) and not dt_filter.get_python_filter_equivalent(entity):
            ## If a filter is declared, check that the submitted DataType is in compliance to it.
            raise InvalidParameterException("Field %s [%s] did not pass filters." % (row[KEY_LABEL],
                                                                                     row[KEY_NAME]))

        # In case a specific field in entity is to be used, use it
        if xml_reader.ATT_FIELD in row:
            val = eval("entity." + row[xml_reader.ATT_FIELD])
            result = val
        if ATT_METHOD in row:
            param_dict = dict()
            #The 'shape' attribute of an arraywrapper is overridden by us
            #the following check is made only to improve performance
            # (to find data in the dictionary with O(1)) on else the data is found in O(n)
            if hasattr(entity, 'shape'):
                for i in xrange(len(entity.shape)):
                    if not i:
                        continue
                    param_key = (row[xml_reader.ATT_NAME] + "_" + row[ATT_PARAMETERS] + "_" + str(i - 1))
                    if param_key in kwargs:
                        param_dict[param_key] = kwargs[param_key]
            else:
                param_dict = dict((k, v) for k, v in kwargs.items()
                                  if k.startswith(row[xml_reader.ATT_NAME] + "_" + row[ATT_PARAMETERS]))
            val = eval("entity." + row[ATT_METHOD] + "(param_dict)")
            result = val
        return result