def get_subgraph_masks(cache, structure_ids, target_hemisphere_id=3): # construct new masks for subgraph # useful if further subdivision is wanted or to map back to 3D CCF space row_mask = Mask.from_cache(cache, structure_ids=structure_ids, hemisphere_id=2) col_mask = Mask.from_cache(cache, structure_ids=structure_ids, hemisphere_id=target_hemisphere_id) return row_mask, col_mask
def filter_projection(cache, cortical_projection): # TODO: bilateral for now, should improve in future cortical_mask = Mask.from_cache(cache, structure_ids=[315], hemisphere_id=3) cortical_structures = get_cortical_structure_ids(cache) result = np.zeros_like(cortical_projection) for sid in cortical_structures: logging.debug("Filtering target %d", sid) left_mask = cache.get_structure_mask(sid)[0] right_mask = left_mask.copy() left_mask[..., left_mask.shape[-1]//2:] = 0 right_mask[..., :right_mask.shape[-1]//2] = 0 for mask, hid in zip((right_mask, left_mask), (2, 1)): key = cortical_mask.get_structure_indices(structure_ids=[sid], hemisphere_id=hid) result += masked_filter( cortical_projection, mask, key=key, kind='median', axis=1, bounds=((-2, 2), (-2, 2), (-2, 2))) return result
def test_repr(mcc): # ------------------------------------------------------------------------ # test structure_ids <= 3 structure_ids = list(range(3)) mask = Mask.from_cache(mcc, structure_ids=structure_ids, hemisphere_id=3) s = "Mask(hemisphere_id=3, structure_ids=[0, 1, 2])" assert repr(mask) == s # ------------------------------------------------------------------------ # test structure_ids > 3 structure_ids = list(range(10)) mask = Mask.from_cache(mcc, structure_ids=structure_ids, hemisphere_id=3) s = "Mask(hemisphere_id=3, structure_ids=[0, ..., 9])" assert repr(mask) == s
def get_target_mask(self, target, hemisphere): """returns a 'key' array and a mask object used to transform projection data from linear arrays to 3D volumes. """ target_ids = self._get_structure_id(target) self.tgt_mask = Mask.from_cache( self.cache, structure_ids=target_ids, hemisphere_id=self.hemispheres[hemisphere])
def get_injection_ratio_contained(experiment): # np.ndarray mask = np.ones_like(experiment.injection_density) mask[..., :mask.shape[2] // 2] = 0 assert (experiment.get_injection_ratio_contained(mask) == 0.5) # Mask object mask = Mask(mcc, [2, 3], hemisphere_id=3) assert (type(experiment.get_injection_ratio_contained(mask)) == float) # wrong np.ndarray size assert_raises(ValueError, experiment.get_injection_density, np.ones( (2, 2)))
def mask_volume(experiment): # np.ndarray mask = np.ones_like(experiment.injection_density) mask[..., :mask.shape[2] // 2] = 0 n_nnz = len(mask.nonzero()[0]) assert (experiment.mask_volume("injection_density", mask).shape == (n_nnz, )) # Mask object mask = Mask(mcc, [2, 3], hemisphere_id=3) assert (len(experiment.mask_volume("injection_density", mask).shape) == 1) # wrong np.ndarray size assert_raises(ValueError, experiment.mask_volume, "data", mask) assert_raises(ValueError, experiment.mask_volume, "injection_density", np.ones((2, 2)))
def get_full_map(cache, structures, experiments_exclude): # get ids structure_ids = [ s['id'] for s in cache.get_structure_tree().get_structures_by_acronym( structures) ] # get summary structure ids ss_ids = get_ss_ids(cache) target_mask = Mask.from_cache(cache, structure_ids=structure_ids, hemisphere_id=3) # initialize by pulling data full_map = {s: dict() for s in structures} for sid, structure_map in zip(structure_ids, full_map.values()): # get data model_data = VoxelData(cache, injection_structure_ids=[sid], injection_hemisphere_id=2, flip_experiments=True, normalized_injection=False) experiments = cache.get_experiments(cre=False, injection_structure_ids=[sid]) print(len(experiments)) eids = set([e['id'] for e in experiments]) - set(experiments_exclude) model_data.get_experiment_data(eids) regions = get_summary_structures(model_data.injection_mask, ss_ids) structure_map.update(dict(regions=regions, n_regions=len(regions))) structure_map["region_map"] = get_nexps_regions( cache, model_data, regions) structure_map["centroid_stats"] = get_centroid_stats(model_data) structure_map["injection_stats"] = get_injection_stats(model_data) pull_out_region_volume_stats(structure_map) pull_out_region_exps_stats(structure_map) pull_out_centroid_stats(structure_map) pull_out_injection_stats(structure_map) return full_map
def get_projection(cache, region_id, full=False, filtered=False): def get_centroid_voxel(): region_mask = cache.get_structure_mask(region_id)[0] region_mask[..., :region_mask.shape[2]//2] = 0 # ipsi return np.argwhere(region_mask).mean(axis=0).astype(int) # get voxel array voxel_array, source_mask, target_mask = cache.get_voxel_connectivity_array() # get cortical targets col_idx = target_mask.get_structure_indices(structure_ids=[315], hemisphere_id=3) if full: logging.debug("Filling region") row_idx = source_mask.get_structure_indices(structure_ids=[region_id]) projection = voxel_array[row_idx, col_idx] if filtered: logging.debug("Filtering target") projection = filter_projection(cache, projection) projection = projection.sum(axis=0) # we are normalizing anyway else: # single voxel in center logging.debug("Computing region centriod") row_idx = source_mask.get_flattened_voxel_index(get_centroid_voxel()) projection = voxel_array[row_idx, col_idx] if filtered: logging.error("filtered keyword only implemented for full region") # scale to be in [0,1] projection /= projection.max()#6e-2 projection.clip(min=0, max=1, out=projection) cortical_mask = Mask.from_cache(cache, structure_ids=[315], hemisphere_id=3) return cortical_mask.map_masked_to_annotation(projection)
def scoring_dict(self): def get_nnz_assigned(key): assigned = np.unique(key) if assigned[0] == 0: return assigned[1:] return assigned # target is whole brain target_mask = Mask.from_cache(cache=self.cache, hemisphere_id=3) ipsi_key = target_mask.get_key(structure_ids=self.structure_ids, hemisphere_id=2) contra_key = target_mask.get_key(structure_ids=self.structure_ids, hemisphere_id=1) reg_kwargs = dict(ipsi_key=ipsi_key, contra_key=contra_key, ipsi_regions=get_nnz_assigned(ipsi_key), contra_regions=get_nnz_assigned(contra_key)) return dict(voxel=self.voxel_scorer(), regional=self.regional_scorer(**reg_kwargs))
def main(): input_data = ju.read(INPUT_JSON) structures = input_data.get('structures') manifest_file = input_data.get('manifest_file') manifest_file = os.path.join(TOP_DIR, manifest_file) log_level = input_data.get('log_level', logging.DEBUG) logging.getLogger().setLevel(log_level) # experiments to exclude experiments_exclude = ju.read(EXPERIMENTS_EXCLUDE_JSON) # load hyperparameter dict suffix = 'log' if LOG else 'standard' hyperparameter_json = os.path.join(OUTPUT_DIR, 'hyperparameters-%s.json' % suffix) hyperparameters = ju.read(hyperparameter_json) # get caching object cache = VoxelModelCache(manifest_file=manifest_file) # get structure ids structure_ids = [get_structure_id(cache, s) for s in structures] # mask for reordering source annotation = cache.get_annotation_volume()[0] cumm_source_mask = np.zeros(annotation.shape, dtype=np.int) offset = 1 # start @ 1 so that nonzero can be used weights, nodes = [], [] for sid, sac in zip(structure_ids, structures): logging.debug("Building model for structure: %s", sac) data, reg = fit_structure(cache, sid, experiments_exclude, hyperparameters[sac], model_option=suffix) w = reg.get_weights(data.injection_mask.coordinates) # assign ordering to full source ordering = np.arange(offset, w.shape[0] + offset, dtype=np.int) offset += w.shape[0] # get source mask data.injection_mask.fill_volume_where_masked(cumm_source_mask, ordering) # append to list weights.append(w) nodes.append(reg.nodes) # stack weights = padded_diagonal_fill(weights) nodes = np.vstack(nodes) # need to reorder weights # (subtract 1 to get proper index) permutation = cumm_source_mask[cumm_source_mask.nonzero()] - 1 weights = weights[permutation, :] # regionalized logging.debug('regionalizing voxel weights') ontological_order = get_ordered_summary_structures(cache) source_mask = Mask.from_cache(cache, structure_ids=structure_ids, hemisphere_id=2) source_key = source_mask.get_key(structure_ids=ontological_order) ipsi_key = data.projection_mask.get_key(structure_ids=ontological_order, hemisphere_id=2) contra_key = data.projection_mask.get_key(structure_ids=ontological_order, hemisphere_id=1) ipsi_model = RegionalizedModel(weights, nodes, source_key, ipsi_key, ordering=ontological_order, dataframe=True) contra_model = RegionalizedModel(weights, nodes, source_key, contra_key, ordering=ontological_order, dataframe=True) get_metric = lambda s: pd.concat( (getattr(ipsi_model, s), getattr(contra_model, s)), keys=('ipsi', 'contra'), axis=1) # write results output_dir = os.path.join(TOP_DIR, 'connectivity', 'voxel-%s-model' % suffix) if not os.path.exists(output_dir): os.makedirs(output_dir) # regionalized logging.debug('saving to directory: %s', output_dir) get_metric('connection_density').to_csv( os.path.join(output_dir, 'connection_density.csv')) get_metric('connection_strength').to_csv( os.path.join(output_dir, 'connection_strength.csv')) get_metric('normalized_connection_density').to_csv( os.path.join(output_dir, 'normalized_connection_density.csv')) get_metric('normalized_connection_strength').to_csv( os.path.join(output_dir, 'normalized_connection_strength.csv')) # voxel ju.write(os.path.join(output_dir, 'target_mask_params.json'), dict(structure_ids=structure_ids, hemisphere_id=3)) ju.write(os.path.join(output_dir, 'source_mask_params.json'), dict(structure_ids=structure_ids, hemisphere_id=2)) np.savetxt(os.path.join(output_dir, 'weights.csv.gz'), weights.astype(np.float32), delimiter=',') np.savetxt(os.path.join(output_dir, 'nodes.csv.gz'), nodes.astype(np.float32), delimiter=',')
def bi_mask(mcc, structure_ids): return Mask.from_cache(mcc, structure_ids=structure_ids, hemisphere_id=3)
def contra_mask(mcc, structure_ids): return Mask.from_cache(mcc, structure_ids=structure_ids, hemisphere_id=1)