def __init__(self, obj_file): """ Create a surface from an obj file """ self.logger = get_logger(__name__) try: obj = ObjParser() obj.read(obj_file) self.triangles = [] self.vertices = obj.vertices self.normals = [(0.0, 0.0, 0.0)] * len(self.vertices) self.have_normals = len(obj.normals) for face in obj.faces: triangles = self._triangulate(face) for v_idx, t_idx, n_idx in triangles: self.triangles.append(v_idx) if n_idx != -1: # last normal index wins # alternative: self.normals[v_idx] += obj.normals[n_idx] # The correct behaviour is to duplicate the vertex # self.vertices.append(self.vertices[v_idx]) # self.tex_coords.append(self.tex_coords[v_idx]) self.normals[v_idx] = obj.normals[n_idx] # checks if not self.vertices or not self.triangles: raise ParseException("No geometry data in file.") self._to_numpy() except ValueError as ex: self.logger.exception(" Error in obj") raise ParseException(str(ex))
def __init__(self, data_file): self.logger = get_logger(__name__) if data_file is None: raise ParseException( "Please select NIFTI file which contains data to import") if not os.path.exists(data_file): raise ParseException("Provided file %s does not exists" % data_file) try: self.nifti_image = nibabel.load(data_file) except nibabel.spatialimages.ImageFileError as e: self.logger.exception(e) msg = "File: %s does not have a valid NIFTI-1 format." % data_file raise ParseException(msg) nifti_image_hdr = self.nifti_image.get_header() # Check if there is a time dimensions (4th dimension). nifti_data_shape = nifti_image_hdr.get_data_shape() self.nr_dims = len(nifti_data_shape) self.has_time_dimension = self.nr_dims > 3 self.time_dim_size = nifti_data_shape[ 3] if self.has_time_dimension else 1 # Extract sample unit measure self.units = nifti_image_hdr.get_xyzt_units() # Usually zooms defines values for x, y, z, time and other dimensions self.zooms = nifti_image_hdr.get_zooms()
def parse(self, data_file, data_file_part2=None, surface_type=OPTION_READ_METADATA, should_center=False): """ Parse NIFTI file(s) and returns A Surface or a TimeSeries for it. :param surface_type: one of "Cortex" "Head" "ReadFromMetaData" :param data_file_part2: a file containing the second part of the surface """ self.logger.debug("Start to parse GIFTI file: %s" % data_file) if data_file is None: raise ParseException("Please select GIFTI file which contains data to import") if not os.path.exists(data_file): raise ParseException("Provided file %s does not exists" % data_file) if data_file_part2 is not None and not os.path.exists(data_file_part2): raise ParseException("Provided file part %s does not exists" % data_file_part2) try: gifti_image = giftiio.read(data_file) data_arrays = gifti_image.darrays self.logger.debug("File parsed successfully") if data_file_part2 is not None: data_arrays_part2 = giftiio.read(data_file_part2).darrays else: data_arrays_part2 = None except Exception, excep: self.logger.exception(excep) msg = "File: %s does not have a valid GIFTI format." % data_file raise ParseException(msg)
def _parse_surface(self, data_arrays, data_arrays_part2, surface_type, should_center): meta_dict = self._get_meta_dict(data_arrays[0]) anatomical_structure_primary = meta_dict.get(self.ASP_ATTR) gid = meta_dict.get(self.UNIQUE_ID_ATTR) subject = meta_dict.get(self.SUBJECT_ATTR) title = meta_dict.get(self.NAME_ATTR) # Now try to determine what type of surface we have # If a surface type is not explicitly given we use the type specified in the metadata if surface_type == OPTION_READ_METADATA: surface_type = anatomical_structure_primary if surface_type is None: raise ParseException("Please specify the type of the surface") surface = make_surface(surface_type) if surface is None: raise ParseException("Could not determine surface type! %s" % surface_type) # Now fill TVB data type with metadata if gid is not None: gid = gid.replace("{", "").replace("}", "") surface.gid = gid if subject is not None: surface.subject = subject if title is not None: surface.title = title surface.storage_path = self.storage_path surface.zero_based_triangles = True # Now fill TVB data type with geometry data vertices = data_arrays[0].data triangles = data_arrays[1].data vertices_in_lh = len(vertices) # If a second file is present append that data if data_arrays_part2 is not None: # offset the indices offset = len(vertices) vertices = np.vstack([vertices, data_arrays_part2[0].data]) triangles = np.vstack( [triangles, offset + data_arrays_part2[1].data]) if should_center: vertices = center_vertices(vertices) # set hemisphere mask if cortex if isinstance(surface, CorticalSurface): # if there was a 2nd file then len(vertices) != vertices_in_lh surface.hemisphere_mask = np.zeros(len(vertices), dtype=np.bool) surface.hemisphere_mask[vertices_in_lh:] = 1 surface.vertices = vertices surface.triangles = triangles return surface
def _find_value(self, node_data, candidate_keys): """ Find a value in node data using a list of candidate keys :return string value or raise ParseException """ for key in candidate_keys: if key in node_data: return node_data[key] else: # Try to parse a simple number try: return float(key) except ValueError: pass # Try to parse a chain of operations between multiple keys try: split_keys = re.split(self.OPERATORS, key) operators = re.findall(self.OPERATORS, key) if len(split_keys) < 2 or len(split_keys) != len(operators) + 1: continue value = self._find_value(node_data, [split_keys[0]]) for i in range(0, len(operators)): expression = "value" + operators[i] + str(self._find_value(node_data, [split_keys[i + 1]])) value = eval(expression) return value except ParseException: continue msg = "Could not find any of the labels %s in value %s" % (str(candidate_keys), str(node_data)) self.logger.error(msg) raise ParseException(msg)
def _find_region_idxs(self, region_label): """ Find IDX in Connectivity based on label read from XML :param region_label: Label ar read from XML (including PREFIX_TVB_ID) :return: (left_hemisphere, right_hemisphere_idx) :raise: ParseException in case of not match """ short_name = region_label.lower().replace(self.PREFIX_TVB_ID, "") left_idx = [ self.connectivity_labels.index(prefix + short_name) for prefix in self.PREFIX_LEFT if prefix + short_name in self.connectivity_labels ] right_idx = [ self.connectivity_labels.index(prefix + short_name) for prefix in self.PREFIX_RIGHT if prefix + short_name in self.connectivity_labels ] if len(left_idx) > 0 and len(right_idx) > 0: return left_idx[0], right_idx[0] raise ParseException( "Could not match regionID '%s' from XML with the chosen connectivity" % region_label)
def __init__(self, data_file): self.logger = get_logger(__name__) if data_file is None: raise ParseException("Please select NIFTI file which contains data to import") if not os.path.exists(data_file): raise ParseException("Provided file %s does not exists" % data_file) try: self.nifti_image = nib.load(data_file) except nib.spatialimages.ImageFileError, e: self.logger.exception(e) msg = "File: %s does not have a valid NIFTI-1 format." % data_file raise ParseException(msg)
def parse(self, data_file): """ Parse NIFTI file and returns TimeSeries for it. """ self.logger.debug("Start to parse GIFTI file: %s"%data_file) if data_file is None: raise ParseException ("Please select GIFTI file which contains data to import") if not os.path.exists(data_file): raise ParseException ("Provided file %s does not exists"%data_file) try: gifti_image = giftiio.read(data_file) self.logger.debug("File parsed successfully") except Exception, excep: self.logger.exception(excep) msg = "File: %s does not have a valid GIFTI format." % data_file raise ParseException(msg)
def parse(self, data_file): """ Parse NIFTI file and returns TimeSeries for it. """ if data_file is None: raise ParseException( "Please select NIFTI file which contains data to import") if not os.path.exists(data_file): raise ParseException("Provided file %s does not exists" % data_file) try: nifti_image = nib.load(data_file) except nib.spatialimages.ImageFileError, e: self.logger.exception(e) msg = "File: %s does not have a valid NIFTI-1 format." % data_file raise ParseException(msg)
def parse(self, network): """ Populate Connectivity DataType from NetworkX object. Tested with results from Connectome Mapper Toolkit. :param network: NetworkX graph :return: Connectivity object """ graph_size = len(network.nodes()) weights_matrix = numpy.zeros((graph_size, graph_size)) tract_matrix = numpy.zeros((graph_size, graph_size)) labels_vector, positions, cortical, hemisphere = [], [], [], [] try: for node in xrange(1, graph_size + 1): node_data = network.nodes[node] pos = self._find_value(node_data, self.KEY_NODE_COORDINATES) positions.append(list(pos)) label = self._find_value(node_data, self.KEY_NODE_LABEL) labels_vector.append(str(label)) if self.REGION_CORTICAL == self._find_value( node_data, self.KEY_NODE_REGION): cortical.append(1) else: cortical.append(0) if self.HEMISPHERE_RIGHT == self._find_value( node_data, self.KEY_NODE_HEMISPHERE): hemisphere.append(True) else: hemisphere.append(False) # Iterate over edges: for start, end in network.edges(): weights_matrix[start - 1][end - 1] = self._find_value( network.adj[start][end], self.KEY_EDGE_WEIGHT) tract_matrix[start - 1][end - 1] = self._find_value( network.adj[start][end], self.KEY_EDGE_TRACT) result = Connectivity() result.storage_path = self.storage_path result.region_labels = labels_vector result.centres = positions result.set_metadata( {'description': 'Array Columns: labels, X, Y, Z'}, 'centres') result.hemispheres = hemisphere result.cortical = cortical result.weights = weights_matrix result.tract_lengths = tract_matrix return result except KeyError as err: self.logger.exception("Could not parse Connectivity") raise ParseException(err)
def _find_value(self, node_data, candidate_keys): """ Find a value in node data using a list of candidate keys :return string value or raise ParseException """ for key in candidate_keys: if key in node_data: return node_data[key] msg = "Could not find any of the labels %s" % str(candidate_keys) self.logger.error(msg) raise ParseException(msg)
def launch(self, view_model): # type: (ObjSurfaceImporterModel) -> [SurfaceIndex] """ Execute import operations: """ try: surface = make_surface(view_model.surface_type) if surface is None: raise ParseException("Could not determine surface type! %s" % view_model.surface_type) surface.zero_based_triangles = True with open(view_model.data_file) as f: obj = ObjSurface(f) if view_model.should_center: vertices = center_vertices(obj.vertices) else: vertices = obj.vertices surface.vertices = vertices surface.triangles = obj.triangles if obj.have_normals: self.log.debug("OBJ came with normals included") surface.vertex_normals = obj.normals else: self.log.warning( "OBJ came without normals. We will try to compute them...") surface.number_of_vertices = surface.vertices.shape[0] surface.number_of_triangles = surface.triangles.shape[0] surface.compute_triangle_normals() surface.compute_vertex_normals() validation_result = surface.validate() if validation_result.warnings: self.add_operation_additional_info(validation_result.summary()) return h5.store_complete(surface, self.storage_path) except ParseException as excep: self.log.exception(excep) raise LaunchException(excep)
def parse(self, data_file, data_file_part2=None, surface_type=OPTION_READ_METADATA, should_center=False): """ Parse NIFTI file(s) and returns A Surface or a TimeSeries for it. :param surface_type: one of "Cortex" "Head" "ReadFromMetaData" :param data_file_part2: a file containing the second part of the surface """ self.logger.debug("Start to parse GIFTI file: %s" % data_file) if data_file is None: raise ParseException( "Please select GIFTI file which contains data to import") if not os.path.exists(data_file): raise ParseException("Provided file %s does not exists" % data_file) if data_file_part2 is not None and not os.path.exists(data_file_part2): raise ParseException("Provided file part %s does not exists" % data_file_part2) try: gifti_image = nibabel.load(data_file) data_arrays = gifti_image.darrays self.logger.debug("File parsed successfully") if data_file_part2 is not None: data_arrays_part2 = nibabel.load(data_file_part2).darrays else: data_arrays_part2 = None except Exception as excep: self.logger.exception(excep) msg = "File: %s does not have a valid GIFTI format." % data_file raise ParseException(msg) self.logger.debug("Determine data type stored in GIFTI file") # First check if it's a surface if self._is_surface_gifti(data_arrays): # If a second part exists is must be of the same type if data_arrays_part2 is not None and not self._is_surface_gifti( data_arrays_part2): raise ParseException("Second file must be a surface too") return self._parse_surface(data_arrays, data_arrays_part2, surface_type, should_center) elif self._is_timeseries_gifti(data_arrays): return self._parse_timeseries(data_arrays) else: raise ParseException( "Could not map data from GIFTI file to a TVB data type")
def launch(self, surface_type, data_file, should_center=False): """ Execute import operations: """ try: surface = make_surface(surface_type) if surface is None: raise ParseException("Could not determine surface type! %s" % surface_type) surface.storage_path = self.storage_path surface.set_operation_id(self.operation_id) surface.zero_based_triangles = True with open(data_file) as f: obj = ObjSurface(f) if should_center: vertices = center_vertices(obj.vertices) else: vertices = obj.vertices surface.vertices = vertices surface.triangles = obj.triangles if obj.have_normals: self.log.debug("OBJ came with normals included") surface.vertex_normals = obj.normals else: self.log.warning( "OBJ came without normals. We will try to compute them...") validation_result = surface.validate() if validation_result.warnings: self.add_operation_additional_info(validation_result.summary()) return [surface] except ParseException, excep: self.log.exception(excep) raise LaunchException(excep)
class GIFTIParser(object): """ This class reads content of a GIFTI file and builds / returns a Surface instance filled with details. """ UNIQUE_ID_ATTR = "UniqueID" SUBJECT_ATTR = "SubjectID" ASP_ATTR = "AnatomicalStructurePrimary" DATE_ATTR = "Date" DESCRIPTION_ATTR = "Description" NAME_ATTR = "Name" TIME_STEP_ATTR = "TimeStep" def __init__(self, storage_path, operation_id): self.logger = get_logger(__name__) self.storage_path = storage_path self.operation_id = operation_id @staticmethod def _get_meta_dict(data_array): data_array_meta = data_array.meta if data_array_meta is None or data_array_meta.data is None: return {} return dict((meta_pair.name, meta_pair.value) for meta_pair in data_array_meta.data) @staticmethod def _is_surface_gifti(data_arrays): return (len(data_arrays) == 2 and intent_codes.code["NIFTI_INTENT_POINTSET"] == data_arrays[0].intent and data_type_codes.code["NIFTI_TYPE_FLOAT32"] == data_arrays[0].datatype and intent_codes.code["NIFTI_INTENT_TRIANGLE"] == data_arrays[1].intent and data_type_codes.code["NIFTI_TYPE_INT32"] == data_arrays[1].datatype) @staticmethod def _is_timeseries_gifti(data_arrays): return (len(data_arrays) > 1 and intent_codes.code["NIFTI_INTENT_TIME_SERIES"] == data_arrays[0].intent and data_type_codes.code["NIFTI_TYPE_FLOAT32"] == data_arrays[0].datatype) def _parse_surface(self, data_arrays, data_arrays_part2, surface_type, should_center): meta_dict = self._get_meta_dict(data_arrays[0]) anatomical_structure_primary = meta_dict.get(self.ASP_ATTR) gid = meta_dict.get(self.UNIQUE_ID_ATTR) subject = meta_dict.get(self.SUBJECT_ATTR) title = meta_dict.get(self.NAME_ATTR) # Now try to determine what type of surface we have # If a surface type is not explicitly given we use the type specified in the metadata if surface_type == OPTION_READ_METADATA: surface_type = anatomical_structure_primary if surface_type is None: raise ParseException("Please specify the type of the surface") surface = make_surface(surface_type) if surface is None: raise ParseException("Could not determine surface type! %s" % surface_type) # Now fill TVB data type with metadata if gid is not None: gid = gid.replace("{", "").replace("}", "") surface.gid = gid if subject is not None: surface.subject = subject if title is not None: surface.title = title surface.storage_path = self.storage_path surface.set_operation_id(self.operation_id) surface.zero_based_triangles = True # Now fill TVB data type with geometry data vertices = data_arrays[0].data triangles = data_arrays[1].data vertices_in_lh = len(vertices) # If a second file is present append that data if data_arrays_part2 is not None: # offset the indices offset = len(vertices) vertices = np.vstack([vertices, data_arrays_part2[0].data]) triangles = np.vstack([triangles, offset + data_arrays_part2[1].data]) if should_center: vertices = center_vertices(vertices) # set hemisphere mask if cortex if isinstance(surface, CorticalSurface): # if there was a 2nd file then len(vertices) != vertices_in_lh surface.hemisphere_mask = np.zeros(len(vertices), dtype=np.bool) surface.hemisphere_mask[vertices_in_lh:] = 1 surface.vertices = vertices surface.triangles = triangles return surface def _parse_timeseries(self, data_arrays): # Create TVB time series to be filled time_series = TimeSeriesSurface() time_series.storage_path = self.storage_path time_series.set_operation_id(self.operation_id) time_series.start_time = 0.0 time_series.sample_period = 1.0 # First process first data_array and extract important data from it's metadata meta_dict = self._get_meta_dict(data_arrays[0]) gid = meta_dict.get(self.UNIQUE_ID_ATTR) sample_period = meta_dict.get(self.TIME_STEP_ATTR) time_series.subject = meta_dict.get(self.SUBJECT_ATTR) time_series.title = meta_dict.get(self.NAME_ATTR) if gid: time_series.gid = gid.replace("{", "").replace("}", "") if sample_period: time_series.sample_period = float(sample_period) # todo : make sure that write_time_slice is not required here # Now read time series data for data_array in data_arrays: time_series.write_data_slice([data_array.data]) # Close file after writing data time_series.close_file() return time_series def parse(self, data_file, data_file_part2=None, surface_type=OPTION_READ_METADATA, should_center=False): """ Parse NIFTI file(s) and returns A Surface or a TimeSeries for it. :param surface_type: one of "Cortex" "Head" "ReadFromMetaData" :param data_file_part2: a file containing the second part of the surface """ self.logger.debug("Start to parse GIFTI file: %s" % data_file) if data_file is None: raise ParseException("Please select GIFTI file which contains data to import") if not os.path.exists(data_file): raise ParseException("Provided file %s does not exists" % data_file) if data_file_part2 is not None and not os.path.exists(data_file_part2): raise ParseException("Provided file part %s does not exists" % data_file_part2) try: gifti_image = giftiio.read(data_file) data_arrays = gifti_image.darrays self.logger.debug("File parsed successfully") if data_file_part2 is not None: data_arrays_part2 = giftiio.read(data_file_part2).darrays else: data_arrays_part2 = None except Exception, excep: self.logger.exception(excep) msg = "File: %s does not have a valid GIFTI format." % data_file raise ParseException(msg) self.logger.debug("Determine data type stored in GIFTI file") # First check if it's a surface if self._is_surface_gifti(data_arrays): # If a second part exists is must be of the same type if data_arrays_part2 is not None and not self._is_surface_gifti(data_arrays_part2): raise ParseException("Second file must be a surface too") return self._parse_surface(data_arrays, data_arrays_part2, surface_type, should_center) elif self._is_timeseries_gifti(data_arrays): return self._parse_timeseries(data_arrays) else: raise ParseException("Could not map data from GIFTI file to a TVB data type")
class GIFTIParser(): """ This class reads content of a GIFTI file and builds / returns a Surface instance filled with details. """ UNIQUE_ID_ATTR = "UniqueID" SUBJECT_ATTR = "SubjectID" ASP_ATTR = "AnatomicalStructurePrimary" DATE_ATTR = "Date" DESCRIPTION_ATTR = "Description" NAME_ATTR = "Name" TIME_STEP_ATTR = "TimeStep" def __init__(self, storage_path, operation_id): self.logger = get_logger(__name__) self.storage_path = storage_path self.operation_id = operation_id def parse(self, data_file): """ Parse NIFTI file and returns TimeSeries for it. """ self.logger.debug("Start to parse GIFTI file: %s"%data_file) if data_file is None: raise ParseException ("Please select GIFTI file which contains data to import") if not os.path.exists(data_file): raise ParseException ("Provided file %s does not exists"%data_file) try: gifti_image = giftiio.read(data_file) self.logger.debug("File parsed successfully") except Exception, excep: self.logger.exception(excep) msg = "File: %s does not have a valid GIFTI format." % data_file raise ParseException(msg) # Now try to determine what data is stored in GIFTI file data_arrays = gifti_image.darrays self.logger.debug("Determine data type stored in GIFTI file") # First check if it's a surface if (len(data_arrays) == 2 and intent_codes.code["NIFTI_INTENT_POINTSET"] == data_arrays[0].intent and data_type_codes.code["NIFTI_TYPE_FLOAT32"] == data_arrays[0].datatype and intent_codes.code["NIFTI_INTENT_TRIANGLE"] == data_arrays[1].intent and data_type_codes.code["NIFTI_TYPE_INT32"] == data_arrays[1].datatype): # Now try to determine what type of surface we have data_array_meta = data_arrays[0].meta surface = None gid = None subject = None title = None if (data_array_meta is not None and data_array_meta.data is not None and len(data_array_meta.data) > 0): anatomical_structure_primary = None for meta_pair in data_array_meta.data: if meta_pair.name == self.ASP_ATTR: anatomical_structure_primary = meta_pair.value elif meta_pair.name == self.UNIQUE_ID_ATTR: gid = meta_pair.value.replace("{", "").replace("}", "") elif meta_pair.name == self.SUBJECT_ATTR: subject = meta_pair.value elif meta_pair.name == self.NAME_ATTR: title = meta_pair.value # Based on info found in meta create correct surface type if anatomical_structure_primary == "Head": surface = SkinAir() elif anatomical_structure_primary.startswith("Cortex"): surface = CorticalSurface() if surface is None: raise ParseException("Could not determine type of the surface") # Now fill TVB data type with info if gid is not None: surface.gid = gid surface.storage_path = self.storage_path surface.set_operation_id(self.operation_id) surface.zero_based_triangles = True surface.vertices = data_arrays[0].data surface.triangles = data_arrays[1].data if subject is not None: surface.subject = subject if title is not None: surface.title = title return surface elif(len(data_arrays) > 1 and intent_codes.code["NIFTI_INTENT_TIME_SERIES"] == data_arrays[0].intent and data_type_codes.code["NIFTI_TYPE_FLOAT32"] == data_arrays[0].datatype): # Create TVB time series to be filled time_series = TimeSeriesSurface() time_series.storage_path = self.storage_path time_series.set_operation_id(self.operation_id) time_series.start_time = 0.0 time_series.sample_period = 1.0 # First process first data_array and extract important data from it's metadata data_array_meta = data_arrays[0].meta if (data_array_meta is not None and data_array_meta.data is not None and len(data_array_meta.data) > 0): for meta_pair in data_array_meta.data: if meta_pair.name == self.UNIQUE_ID_ATTR: gid = meta_pair.value.replace("{", "").replace("}", "") if gid is not None and len(gid) > 0: time_series.gid = gid elif meta_pair.name == self.SUBJECT_ATTR: time_series.subject = meta_pair.value elif meta_pair.name == self.NAME_ATTR: time_series.title = meta_pair.value elif meta_pair.name == self.TIME_STEP_ATTR: time_series.sample_period = float(meta_pair.value) # Now read time series data for data_array in data_arrays: time_series.write_data_slice([data_array.data]) # Close file after writing data time_series.close_file() return time_series else: raise ParseException("Could not map data from GIFTI file to a TVB data type")