def main(connectivity_path, output_path, coords_path, distances_namespace, destination, bin_size, cache_size, verbose): """ Measures vertex distribution with respect to septo-temporal distance :param connectivity_path: :param coords_path: :param distances_namespace: :param destination: :param source: """ utils.config_logging(verbose) logger = utils.get_script_logger(os.path.basename(__file__)) comm = MPI.COMM_WORLD rank = comm.Get_rank() (population_ranges, _) = read_population_ranges(coords_path) destination_start = population_ranges[destination][0] destination_count = population_ranges[destination][1] if rank == 0: logger.info('reading %s distances...' % destination) destination_soma_distances = bcast_cell_attributes( coords_path, destination, namespace=distances_namespace, comm=comm, root=0) destination_soma_distance_U = {} destination_soma_distance_V = {} for k, v in destination_soma_distances: destination_soma_distance_U[k] = v['U Distance'][0] destination_soma_distance_V[k] = v['V Distance'][0] del (destination_soma_distances) sources = [] for (src, dst) in read_projection_names(connectivity_path): if dst == destination: sources.append(src) source_soma_distances = {} for s in sources: if rank == 0: logger.info('reading %s distances...' % s) source_soma_distances[s] = bcast_cell_attributes( coords_path, s, namespace=distances_namespace, comm=comm, root=0) source_soma_distance_U = {} source_soma_distance_V = {} for s in sources: this_source_soma_distance_U = {} this_source_soma_distance_V = {} for k, v in source_soma_distances[s]: this_source_soma_distance_U[k] = v['U Distance'][0] this_source_soma_distance_V[k] = v['V Distance'][0] source_soma_distance_U[s] = this_source_soma_distance_U source_soma_distance_V[s] = this_source_soma_distance_V del (source_soma_distances) logger.info('reading connections %s -> %s...' % (str(sources), destination)) gg = [ NeuroH5ProjectionGen(connectivity_path, source, destination, cache_size=cache_size, comm=comm) for source in sources ] dist_bins = defaultdict(dict) dist_u_bins = defaultdict(dict) dist_v_bins = defaultdict(dict) for prj_gen_tuple in utils.zip_longest(*gg): destination_gid = prj_gen_tuple[0][0] if not all([ prj_gen_elt[0] == destination_gid for prj_gen_elt in prj_gen_tuple ]): raise Exception( 'destination %s: destination_gid %i not matched across multiple projection generators: %s' % (destination, destination_gid, [prj_gen_elt[0] for prj_gen_elt in prj_gen_tuple])) if destination_gid is not None: logger.info('reading connections of gid %i' % destination_gid) for (source, (this_destination_gid, rest)) in zip(sources, prj_gen_tuple): this_source_soma_distance_U = source_soma_distance_U[source] this_source_soma_distance_V = source_soma_distance_V[source] this_dist_bins = dist_bins[source] this_dist_u_bins = dist_u_bins[source] this_dist_v_bins = dist_v_bins[source] (source_indexes, attr_dict) = rest dst_U = destination_soma_distance_U[destination_gid] dst_V = destination_soma_distance_V[destination_gid] for source_gid in source_indexes: dist_u = dst_U - this_source_soma_distance_U[source_gid] dist_v = dst_V - this_source_soma_distance_V[source_gid] dist = abs(dist_u) + abs(dist_v) update_bins(this_dist_bins, bin_size, dist) update_bins(this_dist_u_bins, bin_size, dist_u) update_bins(this_dist_v_bins, bin_size, dist_v) comm.barrier() logger.info('merging distance dictionaries...') add_bins_op = MPI.Op.Create(add_bins, commute=True) for source in sources: dist_bins[source] = comm.reduce(dist_bins[source], op=add_bins_op, root=0) dist_u_bins[source] = comm.reduce(dist_u_bins[source], op=add_bins_op, root=0) dist_v_bins[source] = comm.reduce(dist_v_bins[source], op=add_bins_op, root=0) comm.barrier() if rank == 0: color = 1 else: color = 0 ## comm0 includes only rank 0 comm0 = comm.Split(color, 0) if rank == 0: if output_path is None: output_path = connectivity_path logger.info('writing output to %s...' % output_path) #f = h5py.File(output_path, 'a', driver='mpio', comm=comm0) #if 'Nodes' in f: # nodes_grp = f['Nodes'] #else: # nodes_grp = f.create_group('Nodes') #grp = nodes_grp.create_group('Connectivity Distance Histogram') #dst_grp = grp.create_group(destination) for source in sources: dist_histoCount, dist_bin_edges = finalize_bins( dist_bins[source], bin_size) dist_u_histoCount, dist_u_bin_edges = finalize_bins( dist_u_bins[source], bin_size) dist_v_histoCount, dist_v_bin_edges = finalize_bins( dist_v_bins[source], bin_size) np.savetxt('%s Distance U Bin Count.dat' % source, dist_u_histoCount) np.savetxt('%s Distance U Bin Edges.dat' % source, dist_u_bin_edges) np.savetxt('%s Distance V Bin Count.dat' % source, dist_v_histoCount) np.savetxt('%s Distance V Bin Edges.dat' % source, dist_v_bin_edges) np.savetxt('%s Distance Bin Count.dat' % source, dist_histoCount) np.savetxt('%s Distance Bin Edges.dat' % source, dist_bin_edges) #f.close() comm.barrier()
def main(config, config_prefix, weights_path, weights_namespace, weights_name, connections_path, destination, sources, io_size, chunk_size, value_chunk_size, write_size, cache_size, verbose, dry_run): """ :param weights_path: str :param weights_namespace: str :param connections_path: str :param io_size: int :param chunk_size: int :param value_chunk_size: int :param cache_size: int :param verbose: bool :param dry_run: bool """ utils.config_logging(verbose) logger = utils.get_script_logger(os.path.basename(__file__)) comm = MPI.COMM_WORLD rank = comm.rank env = Env(comm=comm, config_file=config, config_prefix=config_prefix) if io_size == -1: io_size = comm.size if rank == 0: logger.info('%i ranks have been allocated' % comm.size) if (not dry_run) and (rank==0): if not os.path.isfile(weights_path): input_file = h5py.File(connections_path,'r') output_file = h5py.File(weights_path,'w') input_file.copy('/H5Types',output_file) input_file.close() output_file.close() comm.barrier() seed_offset = int(env.model_config['Random Seeds']['GC Log-Normal Weights 1']) pop_ranges, pop_size = read_population_ranges(connections_path, comm=comm) count = 0 gid_count = 0 start_time = time.time() connection_gen_list = [NeuroH5ProjectionGen(connections_path, source, destination, \ namespaces=['Synapses'], \ comm=comm, io_size=io_size) for source in sources] weights_dict = {} for attr_gen_package in utils.zip_longest(*connection_gen_list): local_time = time.time() source_syn_dict = defaultdict(list) source_gid_array = None conn_attr_dict = None destination_gid = attr_gen_package[0][0] if not all([attr_gen_items[0] == destination_gid for attr_gen_items in attr_gen_package]): raise Exception('Rank: %i; destination: %s; destination_gid %i not matched across multiple attribute generators: %s' % (rank, destination, destination_gid, str([attr_gen_items[0] for attr_gen_items in attr_gen_package]))) if destination_gid is not None: seed = int(destination_gid + seed_offset) for this_destination_gid, (source_gid_array, conn_attr_dict) in attr_gen_package: for j in range(len(source_gid_array)): this_source_gid = source_gid_array[j] this_syn_id = conn_attr_dict['Synapses']['syn_id'][j] source_syn_dict[this_source_gid].append(this_syn_id) weights_dict[destination_gid] = \ synapses.generate_log_normal_weights(weights_name, mu, sigma, seed, source_syn_dict, clip=clip) logger.info('Rank %i; destination: %s; destination gid %i; sources: %s; generated log-normal weights for %i inputs in ' \ '%.2f s' % (rank, destination, destination_gid, \ [source.encode('ascii') for source in list(sources)], \ len(weights_dict[destination_gid]['syn_id']), \ time.time() - local_time)) count += 1 else: logger.info('Rank: %i received destination_gid as None' % rank) gid_count += 1 if (write_size > 0) and (gid_count % write_size == 0): if not dry_run: append_cell_attributes(weights_path, destination, weights_dict, namespace=weights_namespace, comm=comm, io_size=io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) # print 'Rank: %i, just after append' % rank del source_syn_dict del source_gid_array del conn_attr_dict weights_dict.clear() gc.collect() if not dry_run: append_cell_attributes( weights_path, destination, weights_dict, namespace=weights_namespace, comm=comm, io_size=io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) global_count = comm.gather(count, root=0) if rank == 0: logger.info('destination: %s; %i ranks generated log-normal weights for %i cells in %.2f s' % \ (destination, comm.size, np.sum(global_count), time.time() - start_time)) MPI.Finalize()
def vertex_distribution(connectivity_path, coords_path, distances_namespace, destination, sources, bin_size=20.0, cache_size=100, comm=None): """ Obtain spatial histograms of source vertices connecting to a given destination population. :param connectivity_path: :param coords_path: :param distances_namespace: :param destination: :param source: """ if comm is None: comm = MPI.COMM_WORLD rank = comm.Get_rank() color = 0 if rank == 0: color = 1 comm0 = comm.Split(color, 0) (population_ranges, _) = read_population_ranges(coords_path) destination_start = population_ranges[destination][0] destination_count = population_ranges[destination][1] destination_soma_distances = {} if rank == 0: logger.info(f'Reading {destination} distances...') distances_iter = read_cell_attributes( coords_path, destination, comm=comm0, mask=set(['U Distance', 'V Distance']), namespace=distances_namespace) destination_soma_distances = { k: (float(v['U Distance'][0]), float(v['V Distance'][0])) for (k, v) in distances_iter } gc.collect() comm.barrier() destination_soma_distances = comm.bcast(destination_soma_distances, root=0) destination_soma_distance_U = {} destination_soma_distance_V = {} for k, v in viewitems(destination_soma_distances): destination_soma_distance_U[k] = v[0] destination_soma_distance_V[k] = v[1] del (destination_soma_distances) if sources == (): sources = [] for (src, dst) in read_projection_names(connectivity_path): if dst == destination: sources.append(src) source_soma_distances = {} if rank == 0: for s in sources: logger.info(f'Reading {s} distances...') distances_iter = read_cell_attributes( coords_path, s, comm=comm0, mask=set(['U Distance', 'V Distance']), namespace=distances_namespace) source_soma_distances[s] = { k: (float(v['U Distance'][0]), float(v['V Distance'][0])) for (k, v) in distances_iter } gc.collect() comm.barrier() comm0.Free() source_soma_distances = comm.bcast(source_soma_distances, root=0) source_soma_distance_U = {} source_soma_distance_V = {} for s in sources: this_source_soma_distance_U = {} this_source_soma_distance_V = {} for k, v in viewitems(source_soma_distances[s]): this_source_soma_distance_U[k] = v[0] this_source_soma_distance_V[k] = v[1] source_soma_distance_U[s] = this_source_soma_distance_U source_soma_distance_V[s] = this_source_soma_distance_V del (source_soma_distances) if rank == 0: logger.info('Reading connections %s -> %s...' % (str(sources), destination)) dist_bins = defaultdict(dict) dist_u_bins = defaultdict(dict) dist_v_bins = defaultdict(dict) gg = [ NeuroH5ProjectionGen(connectivity_path, source, destination, cache_size=cache_size, comm=comm) for source in sources ] for prj_gen_tuple in zip_longest(*gg): destination_gid = prj_gen_tuple[0][0] if rank == 0 and destination_gid is not None: logger.info('%d' % destination_gid) if not all([ prj_gen_elt[0] == destination_gid for prj_gen_elt in prj_gen_tuple ]): raise RuntimeError( 'destination %s: destination gid %i not matched across multiple projection generators: ' '%s' % (destination, destination_gid, [prj_gen_elt[0] for prj_gen_elt in prj_gen_tuple])) if destination_gid is not None: for (source, (this_destination_gid, rest)) in zip_longest(sources, prj_gen_tuple): this_source_soma_distance_U = source_soma_distance_U[source] this_source_soma_distance_V = source_soma_distance_V[source] this_dist_bins = dist_bins[source] this_dist_u_bins = dist_u_bins[source] this_dist_v_bins = dist_v_bins[source] (source_indexes, attr_dict) = rest dst_U = destination_soma_distance_U[destination_gid] dst_V = destination_soma_distance_V[destination_gid] for source_gid in source_indexes: dist_u = dst_U - this_source_soma_distance_U[source_gid] dist_v = dst_V - this_source_soma_distance_V[source_gid] dist = abs(dist_u) + abs(dist_v) update_bins(this_dist_bins, bin_size, dist) update_bins(this_dist_u_bins, bin_size, dist_u) update_bins(this_dist_v_bins, bin_size, dist_v) add_bins_op = MPI.Op.Create(add_bins, commute=True) for source in sources: dist_bins[source] = comm.reduce(dist_bins[source], op=add_bins_op) dist_u_bins[source] = comm.reduce(dist_u_bins[source], op=add_bins_op) dist_v_bins[source] = comm.reduce(dist_v_bins[source], op=add_bins_op) dist_hist_dict = defaultdict(dict) dist_u_hist_dict = defaultdict(dict) dist_v_hist_dict = defaultdict(dict) if rank == 0: for source in sources: dist_hist_dict[destination][source] = finalize_bins( dist_bins[source], bin_size) dist_u_hist_dict[destination][source] = finalize_bins( dist_u_bins[source], bin_size) dist_v_hist_dict[destination][source] = finalize_bins( dist_v_bins[source], bin_size) return { 'Total distance': dist_hist_dict, 'U distance': dist_u_hist_dict, 'V distance': dist_v_hist_dict }
def main(config, coordinates, field_width, gid, input_features_path, input_features_namespaces, initial_weights_path, output_features_namespace, output_features_path, output_weights_path, reference_weights_path, h5types_path, synapse_name, initial_weights_namespace, output_weights_namespace, reference_weights_namespace, connections_path, destination, sources, non_structured_sources, non_structured_weights_namespace, non_structured_weights_path, arena_id, field_width_scale, max_delta_weight, max_opt_iter, max_weight_decay_fraction, optimize_method, optimize_tol, optimize_grad, peak_rate, reference_weights_are_delta, arena_margin, target_amplitude, io_size, chunk_size, value_chunk_size, cache_size, write_size, verbose, dry_run, plot, show_fig, save_fig): """ :param config: str (path to .yaml file) :param input_features_path: str (path to .h5 file) :param initial_weights_path: str (path to .h5 file) :param initial_weights_namespace: str :param output_weights_namespace: str :param connections_path: str (path to .h5 file) :param destination: str :param sources: list of str :param io_size: :param chunk_size: :param value_chunk_size: :param cache_size: :param write_size: :param verbose: :param dry_run: :return: """ utils.config_logging(verbose) logger = utils.get_script_logger(__file__) comm = MPI.COMM_WORLD rank = comm.rank nranks = comm.size if io_size == -1: io_size = comm.size if rank == 0: logger.info('%s: %i ranks have been allocated' % (__file__, comm.size)) env = Env(comm=comm, config_file=config, io_size=io_size) if plot and (not save_fig) and (not show_fig): show_fig = True if (not dry_run) and (rank == 0): if not os.path.isfile(output_weights_path): if initial_weights_path is not None: input_file = h5py.File(initial_weights_path, 'r') elif h5types_path is not None: input_file = h5py.File(h5types_path, 'r') else: raise RuntimeError( 'h5types input path must be specified when weights path is not specified.' ) output_file = h5py.File(output_weights_path, 'w') input_file.copy('/H5Types', output_file) input_file.close() output_file.close() env.comm.barrier() LTD_output_weights_namespace = 'LTD %s %s' % (output_weights_namespace, arena_id) LTP_output_weights_namespace = 'LTP %s %s' % (output_weights_namespace, arena_id) this_input_features_namespaces = [ '%s %s' % (input_features_namespace, arena_id) for input_features_namespace in input_features_namespaces ] selectivity_type_index = { i: n for n, i in viewitems(env.selectivity_types) } target_selectivity_type_name = 'place' target_selectivity_type = env.selectivity_types[ target_selectivity_type_name] features_attrs = defaultdict(dict) source_features_attr_names = [ 'Selectivity Type', 'Num Fields', 'Field Width', 'Peak Rate', 'Module ID', 'Grid Spacing', 'Grid Orientation', 'Field Width Concentration Factor', 'X Offset', 'Y Offset' ] target_features_attr_names = [ 'Selectivity Type', 'Num Fields', 'Field Width', 'Peak Rate', 'X Offset', 'Y Offset' ] local_random = np.random.RandomState() seed_offset = int( env.model_config['Random Seeds']['GC Structured Weights']) spatial_resolution = env.stimulus_config['Spatial Resolution'] # cm arena = env.stimulus_config['Arena'][arena_id] default_run_vel = arena.properties['default run velocity'] # cm/s gid_count = 0 start_time = time.time() target_gid_set = None if len(gid) > 0: target_gid_set = set(gid) all_sources = sources + non_structured_sources connection_gen_list = [ NeuroH5ProjectionGen(connections_path, source, destination, namespaces=['Synapses'], comm=comm) \ for source in all_sources ] output_features_dict = {} LTP_output_weights_dict = {} LTD_output_weights_dict = {} for iter_count, attr_gen_package in enumerate( zip_longest(*connection_gen_list)): local_time = time.time() this_gid = attr_gen_package[0][0] if not all([ attr_gen_items[0] == this_gid for attr_gen_items in attr_gen_package ]): raise Exception( 'Rank: %i; destination: %s; this_gid not matched across multiple attribute ' 'generators: %s' % (rank, destination, [attr_gen_items[0] for attr_gen_items in attr_gen_package])) if (target_gid_set is not None) and (this_gid not in target_gid_set): continue if this_gid is None: selection = [] logger.info('Rank: %i received None' % rank) else: selection = [this_gid] local_random.seed(int(this_gid + seed_offset)) has_structured_weights = False dst_input_features_attr_dict = {} for input_features_namespace in this_input_features_namespaces: input_features_iter = read_cell_attribute_selection( input_features_path, destination, namespace=input_features_namespace, mask=set(target_features_attr_names), comm=env.comm, selection=selection) count = 0 for gid, attr_dict in input_features_iter: dst_input_features_attr_dict[gid] = attr_dict count += 1 if rank == 0: logger.info( 'Read %s feature data for %i cells in population %s' % (input_features_namespace, count, destination)) arena_margin_size = 0. arena_margin = max(arena_margin, 0.) target_selectivity_features_dict = {} target_selectivity_config_dict = {} target_field_width_dict = {} for gid in selection: target_selectivity_features_dict[ gid] = dst_input_features_attr_dict.get(gid, {}) target_selectivity_features_dict[gid][ 'Selectivity Type'] = np.asarray([target_selectivity_type], dtype=np.uint8) num_fields = target_selectivity_features_dict[gid]['Num Fields'][0] if coordinates[0] is not None: num_fields = 1 target_selectivity_features_dict[gid]['X Offset'] = np.asarray( [coordinates[0]], dtype=np.float32) target_selectivity_features_dict[gid]['Y Offset'] = np.asarray( [coordinates[1]], dtype=np.float32) target_selectivity_features_dict[gid][ 'Num Fields'] = np.asarray([num_fields], dtype=np.uint8) if field_width is not None: target_selectivity_features_dict[gid][ 'Field Width'] = np.asarray([field_width] * num_fields, dtype=np.float32) else: this_field_width = target_selectivity_features_dict[gid][ 'Field Width'] target_selectivity_features_dict[gid][ 'Field Width'] = this_field_width[:num_fields] if peak_rate is not None: target_selectivity_features_dict[gid][ 'Peak Rate'] = np.asarray([peak_rate] * num_fields, dtype=np.float32) input_cell_config = stimulus.get_input_cell_config( target_selectivity_type, selectivity_type_index, selectivity_attr_dict=target_selectivity_features_dict[gid]) if input_cell_config.num_fields > 0: arena_margin_size = max( arena_margin_size, np.max(input_cell_config.field_width) * arena_margin) target_field_width_dict[gid] = input_cell_config.field_width target_selectivity_config_dict[gid] = input_cell_config has_structured_weights = True arena_x, arena_y = stimulus.get_2D_arena_spatial_mesh( arena, spatial_resolution, margin=arena_margin_size) for gid, input_cell_config in viewitems( target_selectivity_config_dict): target_map = np.asarray(input_cell_config.get_rate_map( arena_x, arena_y, scale=field_width_scale), dtype=np.float32) target_selectivity_features_dict[gid][ 'Arena Rate Map'] = target_map if not has_structured_weights: selection = [] initial_weights_by_syn_id_dict = defaultdict(lambda: dict()) initial_weights_by_source_gid_dict = defaultdict(lambda: dict()) if initial_weights_path is not None: initial_weights_iter = \ read_cell_attribute_selection(initial_weights_path, destination, namespace=initial_weights_namespace, selection=selection) initial_weights_gid_count = 0 initial_weights_syn_count = 0 for this_gid, syn_weight_attr_dict in initial_weights_iter: syn_ids = syn_weight_attr_dict['syn_id'] weights = syn_weight_attr_dict[synapse_name] for (syn_id, weight) in zip(syn_ids, weights): initial_weights_by_syn_id_dict[this_gid][int( syn_id)] = float(weight) initial_weights_gid_count += 1 initial_weights_syn_count += len(syn_ids) logger.info( 'destination: %s; read initial synaptic weights for %i gids and %i syns' % (destination, initial_weights_gid_count, initial_weights_syn_count)) if len(non_structured_sources) > 0: non_structured_weights_by_syn_id_dict = defaultdict(lambda: dict()) non_structured_weights_by_source_gid_dict = defaultdict( lambda: dict()) else: non_structured_weights_by_syn_id_dict = None if non_structured_weights_path is not None: non_structured_weights_iter = \ read_cell_attribute_selection(initial_weights_path, destination, namespace=non_structured_weights_namespace, selection=selection) non_structured_weights_gid_count = 0 non_structured_weights_syn_count = 0 for this_gid, syn_weight_attr_dict in non_structured_weights_iter: syn_ids = syn_weight_attr_dict['syn_id'] weights = syn_weight_attr_dict[synapse_name] for (syn_id, weight) in zip(syn_ids, weights): non_structured_weights_by_syn_id_dict[this_gid][int( syn_id)] = float(weight) non_structured_weights_gid_count += 1 non_structured_weights_syn_count += len(syn_ids) logger.info( 'destination: %s; read non-structured synaptic weights for %i gids and %i syns' % ( destination, non_structured_weights_gid_count, non_structured_weights_syn_count, )) reference_weights_by_syn_id_dict = None reference_weights_by_source_gid_dict = defaultdict(lambda: dict()) if reference_weights_path is not None: reference_weights_by_syn_id_dict = defaultdict(lambda: dict()) reference_weights_iter = \ read_cell_attribute_selection(reference_weights_path, destination, namespace=reference_weights_namespace, selection=selection) reference_weights_gid_count = 0 for this_gid, syn_weight_attr_dict in reference_weights_iter: syn_ids = syn_weight_attr_dict['syn_id'] weights = syn_weight_attr_dict[synapse_name] for (syn_id, weight) in zip(syn_ids, weights): reference_weights_by_syn_id_dict[this_gid][int( syn_id)] = float(weight) logger.info( 'destination: %s; read reference synaptic weights for %i gids' % (destination, reference_weights_gid_count)) syn_count_by_source_gid_dict = defaultdict(int) source_gid_set_dict = defaultdict(set) syn_ids_by_source_gid_dict = defaultdict(list) structured_syn_id_count = 0 if has_structured_weights: for source, (destination_gid, (source_gid_array, conn_attr_dict)) in zip_longest( all_sources, attr_gen_package): syn_ids = conn_attr_dict['Synapses']['syn_id'] count = 0 this_initial_weights_by_syn_id_dict = None this_initial_weights_by_source_gid_dict = None this_reference_weights_by_syn_id_dict = None this_reference_weights_by_source_gid_dict = None this_non_structured_weights_by_syn_id_dict = None this_non_structured_weights_by_source_gid_dict = None if destination_gid is not None: this_initial_weights_by_syn_id_dict = initial_weights_by_syn_id_dict[ destination_gid] this_initial_weights_by_source_gid_dict = initial_weights_by_source_gid_dict[ destination_gid] if reference_weights_by_syn_id_dict is not None: this_reference_weights_by_syn_id_dict = reference_weights_by_syn_id_dict[ destination_gid] this_reference_weights_by_source_gid_dict = reference_weights_by_source_gid_dict[ destination_gid] this_non_structured_weights_by_syn_id_dict = non_structured_weights_by_syn_id_dict[ destination_gid] this_non_structured_weights_by_source_gid_dict = non_structured_weights_by_source_gid_dict[ destination_gid] for i in range(len(source_gid_array)): this_source_gid = source_gid_array[i] this_syn_id = syn_ids[i] if this_syn_id in this_initial_weights_by_syn_id_dict: this_syn_wgt = this_initial_weights_by_syn_id_dict[ this_syn_id] if this_source_gid not in this_initial_weights_by_source_gid_dict: this_initial_weights_by_source_gid_dict[ this_source_gid] = this_syn_wgt if this_reference_weights_by_syn_id_dict is not None: this_reference_weights_by_source_gid_dict[this_source_gid] = \ this_reference_weights_by_syn_id_dict[this_syn_id] elif this_syn_id in this_non_structured_weights_by_syn_id_dict: this_syn_wgt = this_non_structured_weights_by_syn_id_dict[ this_syn_id] if this_source_gid not in this_non_structured_weights_by_source_gid_dict: this_non_structured_weights_by_source_gid_dict[ this_source_gid] = this_syn_wgt source_gid_set_dict[source].add(this_source_gid) syn_ids_by_source_gid_dict[this_source_gid].append( this_syn_id) syn_count_by_source_gid_dict[this_source_gid] += 1 count += 1 if source not in non_structured_sources: structured_syn_id_count += len(syn_ids) logger.info( 'Rank %i; destination: %s; gid %i; %d edges from source population %s' % (rank, destination, this_gid, count, source)) input_rate_maps_by_source_gid_dict = {} if len(non_structured_sources) > 0: non_structured_input_rate_maps_by_source_gid_dict = {} else: non_structured_input_rate_maps_by_source_gid_dict = None for source in all_sources: if has_structured_weights: source_gids = list(source_gid_set_dict[source]) else: source_gids = [] if rank == 0: logger.info( 'Reading %s feature data for %i cells in population %s...' % (input_features_namespace, len(source_gids), source)) for input_features_namespace in this_input_features_namespaces: input_features_iter = read_cell_attribute_selection( input_features_path, source, namespace=input_features_namespace, mask=set(source_features_attr_names), comm=env.comm, selection=source_gids) count = 0 for gid, attr_dict in input_features_iter: this_selectivity_type = attr_dict['Selectivity Type'][0] this_selectivity_type_name = selectivity_type_index[ this_selectivity_type] input_cell_config = stimulus.get_input_cell_config( this_selectivity_type, selectivity_type_index, selectivity_attr_dict=attr_dict) this_arena_rate_map = np.asarray( input_cell_config.get_rate_map(arena_x, arena_y), dtype=np.float32) if source in non_structured_sources: non_structured_input_rate_maps_by_source_gid_dict[ gid] = this_arena_rate_map else: input_rate_maps_by_source_gid_dict[ gid] = this_arena_rate_map count += 1 if rank == 0: logger.info( 'Read %s feature data for %i cells in population %s' % (input_features_namespace, count, source)) if has_structured_weights: if is_interactive: context.update(locals()) save_fig_path = None if save_fig is not None: save_fig_path = '%s/Structured Weights %s %d.png' % ( save_fig, destination, this_gid) normalized_LTP_delta_weights_dict, LTD_delta_weights_dict, arena_LS_map = \ synapses.generate_structured_weights(target_map=target_selectivity_features_dict[this_gid]['Arena Rate Map'], initial_weight_dict=this_initial_weights_by_source_gid_dict, reference_weight_dict=this_reference_weights_by_source_gid_dict, reference_weights_are_delta=reference_weights_are_delta, reference_weights_namespace=reference_weights_namespace, input_rate_map_dict=input_rate_maps_by_source_gid_dict, non_structured_input_rate_map_dict=non_structured_input_rate_maps_by_source_gid_dict, non_structured_weights_dict=this_non_structured_weights_by_source_gid_dict, syn_count_dict=syn_count_by_source_gid_dict, max_delta_weight=max_delta_weight, max_opt_iter=max_opt_iter, max_weight_decay_fraction=max_weight_decay_fraction, target_amplitude=target_amplitude, arena_x=arena_x, arena_y=arena_y, optimize_method=optimize_method, optimize_tol=optimize_tol, optimize_grad=optimize_grad, verbose=verbose, plot=plot, show_fig=show_fig, save_fig=save_fig_path, fig_kwargs={'gid': this_gid, 'field_width': target_field_width_dict[this_gid]}) gc.collect() this_selectivity_dict = target_selectivity_features_dict[this_gid] output_features_dict[this_gid] = { fld: this_selectivity_dict[fld] for fld in [ 'Selectivity Type', 'Num Fields', 'Field Width', 'Peak Rate', 'X Offset', 'Y Offset' ] } output_features_dict[this_gid]['Arena State Map'] = np.asarray( arena_LS_map.ravel(), dtype=np.float32) output_syn_ids = np.empty(structured_syn_id_count, dtype='uint32') LTD_output_weights = np.empty(structured_syn_id_count, dtype='float32') LTP_output_weights = np.empty(structured_syn_id_count, dtype='float32') i = 0 for source_gid in normalized_LTP_delta_weights_dict: for syn_id in syn_ids_by_source_gid_dict[source_gid]: output_syn_ids[i] = syn_id LTP_output_weights[i] = normalized_LTP_delta_weights_dict[ source_gid] LTD_output_weights[i] = LTD_delta_weights_dict[source_gid] i += 1 LTP_output_weights_dict[this_gid] = { 'syn_id': output_syn_ids, synapse_name: LTP_output_weights } LTD_output_weights_dict[this_gid] = { 'syn_id': output_syn_ids, synapse_name: LTD_output_weights } logger.info( 'Rank %i; destination: %s; gid %i; generated structured weights for %i inputs in %.2f ' 's' % (rank, destination, this_gid, len(output_syn_ids), time.time() - local_time)) gid_count += 1 if iter_count % write_size == 0: if not dry_run: append_cell_attributes(output_weights_path, destination, LTD_output_weights_dict, namespace=LTD_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) append_cell_attributes(output_weights_path, destination, LTP_output_weights_dict, namespace=LTP_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) count = comm.reduce(len(LTP_output_weights_dict), op=MPI.SUM, root=0) if rank == 0: logger.info( 'Destination: %s; appended weights for %i cells' % (destination, count)) if output_features_path is not None: if output_features_namespace is None: output_features_namespace = '%s Selectivity' % target_selectivity_type_name.title( ) this_output_features_namespace = '%s %s' % ( output_features_namespace, arena_id) logger.info(str(output_features_dict)) append_cell_attributes( output_features_path, destination, output_features_dict, namespace=this_output_features_namespace) count = comm.reduce(len(output_features_dict), op=MPI.SUM, root=0) if rank == 0: logger.info( 'Destination: %s; appended selectivity features for %i cells' % (destination, count)) LTP_output_weights_dict.clear() LTD_output_weights_dict.clear() output_features_dict.clear() gc.collect() env.comm.barrier() if not dry_run: append_cell_attributes(output_weights_path, destination, LTD_output_weights_dict, namespace=LTD_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) append_cell_attributes(output_weights_path, destination, LTP_output_weights_dict, namespace=LTP_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) count = comm.reduce(len(LTP_output_weights_dict), op=MPI.SUM, root=0) if rank == 0: logger.info('Destination: %s; appended weights for %i cells' % (destination, count)) if output_features_path is not None: if output_features_namespace is None: output_features_namespace = 'Selectivity Features' this_output_features_namespace = '%s %s' % ( output_features_namespace, arena_id) append_cell_attributes(output_features_path, destination, output_features_dict, namespace=this_output_features_namespace) count = comm.reduce(len(output_features_dict), op=MPI.SUM, root=0) if rank == 0: logger.info( 'Destination: %s; appended selectivity features for %i cells' % (destination, count)) env.comm.barrier() global_count = comm.gather(gid_count, root=0) if rank == 0: logger.info( 'destination: %s; %i ranks assigned structured weights to %i cells in %.2f s' % (destination, comm.size, np.sum(global_count), time.time() - start_time))
def spatial_bin_graph(connectivity_path, coords_path, distances_namespace, destination, sources, extents, bin_size=20.0, cache_size=100, comm=None): """ Obtain reduced graphs of the specified projections by binning nodes according to their spatial position. :param connectivity_path: :param coords_path: :param distances_namespace: :param destination: :param source: """ import networkx as nx if comm is None: comm = MPI.COMM_WORLD rank = comm.Get_rank() (population_ranges, _) = read_population_ranges(coords_path) destination_start = population_ranges[destination][0] destination_count = population_ranges[destination][1] if rank == 0: logger.info('reading %s distances...' % destination) destination_soma_distances = bcast_cell_attributes( coords_path, destination, namespace=distances_namespace, comm=comm, root=0) ((x_min, x_max), (y_min, y_max)) = extents u_bins = np.arange(x_min, x_max, bin_size) v_bins = np.arange(y_min, y_max, bin_size) dest_u_bins = {} dest_v_bins = {} destination_soma_distance_U = {} destination_soma_distance_V = {} for k, v in destination_soma_distances: dist_u = v['U Distance'][0] dist_v = v['V Distance'][0] dest_u_bins[k] = np.searchsorted(u_bins, dist_u, side='left') dest_v_bins[k] = np.searchsorted(v_bins, dist_v, side='left') destination_soma_distance_U[k] = dist_u destination_soma_distance_V[k] = dist_v del (destination_soma_distances) if (sources == ()) or (sources == []) or (sources is None): sources = [] for (src, dst) in read_projection_names(connectivity_path): if dst == destination: sources.append(src) source_soma_distances = {} for s in sources: if rank == 0: logger.info('reading %s distances...' % s) source_soma_distances[s] = bcast_cell_attributes( coords_path, s, namespace=distances_namespace, comm=comm, root=0) source_u_bins = {} source_v_bins = {} source_soma_distance_U = {} source_soma_distance_V = {} for s in sources: this_source_soma_distance_U = {} this_source_soma_distance_V = {} this_source_u_bins = {} this_source_v_bins = {} for k, v in source_soma_distances[s]: dist_u = v['U Distance'][0] dist_v = v['V Distance'][0] this_source_u_bins[k] = np.searchsorted(u_bins, dist_u, side='left') this_source_v_bins[k] = np.searchsorted(v_bins, dist_v, side='left') this_source_soma_distance_U[k] = dist_u this_source_soma_distance_V[k] = dist_v source_soma_distance_U[s] = this_source_soma_distance_U source_soma_distance_V[s] = this_source_soma_distance_V source_u_bins[s] = this_source_u_bins source_v_bins[s] = this_source_v_bins del (source_soma_distances) if rank == 0: logger.info('reading connections %s -> %s...' % (str(sources), destination)) gg = [ NeuroH5ProjectionGen(connectivity_path, source, destination, cache_size=cache_size, comm=comm) for source in sources ] dist_bins = defaultdict(dict) dist_u_bins = defaultdict(dict) dist_v_bins = defaultdict(dict) local_u_bin_graph = defaultdict(dict) local_v_bin_graph = defaultdict(dict) for prj_gen_tuple in zip_longest(*gg): destination_gid = prj_gen_tuple[0][0] if not all([ prj_gen_elt[0] == destination_gid for prj_gen_elt in prj_gen_tuple ]): raise RuntimeError( 'destination %s: destination_gid %i not matched across multiple projection generators: ' '%s' % (destination, destination_gid, [prj_gen_elt[0] for prj_gen_elt in prj_gen_tuple])) if destination_gid is not None: dest_u_bin = dest_u_bins[destination_gid] dest_v_bin = dest_v_bins[destination_gid] for (source, (this_destination_gid, rest)) in zip_longest(sources, prj_gen_tuple): this_source_u_bins = source_u_bins[source] this_source_v_bins = source_v_bins[source] (source_indexes, attr_dict) = rest source_u_bin_dict = defaultdict(int) source_v_bin_dict = defaultdict(int) for source_gid in source_indexes: source_u_bin = this_source_u_bins[source_gid] source_v_bin = this_source_v_bins[source_gid] source_u_bin_dict[source_u_bin] += 1 source_v_bin_dict[source_v_bin] += 1 local_u_bin_graph[dest_u_bin][source] = source_u_bin_dict local_v_bin_graph[dest_v_bin][source] = source_v_bin_dict local_u_bin_graphs = comm.gather(dict(local_u_bin_graph), root=0) local_v_bin_graphs = comm.gather(dict(local_v_bin_graph), root=0) u_bin_graph = None v_bin_graph = None nu = None nv = None if rank == 0: u_bin_edges = {destination: dict(ChainMap(*local_u_bin_graphs))} v_bin_edges = {destination: dict(ChainMap(*local_v_bin_graphs))} nu = len(u_bins) u_bin_graph = nx.Graph() for pop in [destination] + list(sources): for i in range(nu): u_bin_graph.add_node((pop, i)) for i, ss in viewitems(u_bin_edges[destination]): for source, ids in viewitems(ss): u_bin_graph.add_weighted_edges_from([ ((source, j), (destination, i), count) for j, count in viewitems(ids) ]) nv = len(v_bins) v_bin_graph = nx.Graph() for pop in [destination] + list(sources): for i in range(nv): v_bin_graph.add_node((pop, i)) for i, ss in viewitems(v_bin_edges[destination]): for source, ids in viewitems(ss): v_bin_graph.add_weighted_edges_from([ ((source, j), (destination, i), count) for j, count in viewitems(ids) ]) label = '%s to %s' % (str(sources), destination) return { 'label': label, 'bin size': bin_size, 'destination': destination, 'sources': sources, 'U graph': u_bin_graph, 'V graph': v_bin_graph }