def main(config, coordinates, gid, field_width, peak_rate, input_features_path, input_features_namespaces, output_features_namespace, output_weights_path, output_features_path, initial_weights_path, reference_weights_path, h5types_path, synapse_name, initial_weights_namespace, reference_weights_namespace, output_weights_namespace, reference_weights_are_delta, connections_path, optimize_method, destination, sources, arena_id, max_delta_weight, field_width_scale, max_iter, verbose, dry_run, plot): """ :param config: str (path to .yaml file) :param coordinates: tuple of float :param gid: int :param field_width: float :param peak_rate: float :param input_features_path: str (path to .h5 file) :param input_features_namespaces: str :param output_features_namespace: str :param output_weights_path: str (path to .h5 file) :param output_features_path: str (path to .h5 file) :param initial_weights_path: str (path to .h5 file) :param reference_weights_path: str (path to .h5 file) :param h5types_path: str (path to .h5 file) :param synapse_name: str :param initial_weights_namespace: str :param output_weights_namespace: str :param reference_weights_are_delta: bool :param connections_path: str (path to .h5 file) :param destination: str (population name) :param sources: list of str (population name) :param arena_id: str :param max_delta_weight: float :param field_width_scale: float :param max_iter: int :param verbose: bool :param dry_run: bool :param interactive: bool :param plot: bool """ utils.config_logging(verbose) logger = utils.get_script_logger(__file__) env = Env(config_file=config) if not dry_run: if output_weights_path is None: raise RuntimeError( 'Missing required argument: output_weights_path.') if not os.path.isfile(output_weights_path): if initial_weights_path is not None and os.path.isfile( initial_weights_path): input_file_path = initial_weights_path elif h5types_path is not None and os.path.isfile(h5types_path): input_file_path = h5types_path else: raise RuntimeError( 'Missing required source for h5types: either an initial_weights_path or an ' 'h5types_path must be provided.') with h5py.File(output_weights_path, 'a') as output_file: with h5py.File(input_file_path, 'r') as input_file: input_file.copy('/H5Types', output_file) this_input_features_namespaces = [ '%s %s' % (input_features_namespace, arena_id) for input_features_namespace in input_features_namespaces ] features_attr_names = ['Arena Rate Map'] spatial_resolution = env.stimulus_config['Spatial Resolution'] # cm arena = env.stimulus_config['Arena'][arena_id] default_run_vel = arena.properties['default run velocity'] # cm/s arena_x, arena_y = stimulus.get_2D_arena_spatial_mesh( arena, spatial_resolution) dim_x = len(arena_x) dim_y = len(arena_y) if gid is None: target_gids = [] else: target_gids = [gid] dst_input_features = defaultdict(dict) num_fields = len(coordinates) this_field_width = np.array([field_width] * num_fields, dtype=np.float32) this_scaled_field_width = np.array([field_width * field_width_scale] * num_fields, dtype=np.float32) this_peak_rate = np.array([peak_rate] * num_fields, dtype=np.float32) this_x0 = np.array([x for x, y in coordinates], dtype=np.float32) this_y0 = np.array([y for x, y in coordinates], dtype=np.float32) this_rate_map = np.asarray(get_rate_map(this_x0, this_y0, this_field_width, this_peak_rate, arena_x, arena_y), dtype=np.float32) target_map = np.asarray(get_rate_map(this_x0, this_y0, this_scaled_field_width, this_peak_rate, arena_x, arena_y), dtype=np.float32) selectivity_type = env.selectivity_types['place'] dst_input_features[destination][target_gid] = { 'Selectivity Type': np.array([selectivity_type], dtype=np.uint8), 'Num Fields': np.array([num_fields], dtype=np.uint8), 'Field Width': this_field_width, 'Peak Rate': this_peak_rate, 'X Offset': this_x0, 'Y Offset': this_y0, 'Arena Rate Map': this_rate_map.ravel() } initial_weights_by_syn_id_dict = dict() selection = [target_gid] if initial_weights_path is not None: initial_weights_iter = \ read_cell_attribute_selection(initial_weights_path, destination, namespace=initial_weights_namespace, selection=selection) syn_weight_attr_dict = dict(initial_weights_iter) syn_ids = syn_weight_attr_dict[target_gid]['syn_id'] weights = syn_weight_attr_dict[target_gid][synapse_name] for (syn_id, weight) in zip(syn_ids, weights): initial_weights_by_syn_id_dict[int(syn_id)] = float(weight) logger.info( 'destination: %s; gid %i; read initial synaptic weights for %i synapses' % (destination, target_gid, len(initial_weights_by_syn_id_dict))) reference_weights_by_syn_id_dict = None if reference_weights_path is not None: reference_weights_by_syn_id_dict = dict() reference_weights_iter = \ read_cell_attribute_selection(reference_weights_path, destination, namespace=reference_weights_namespace, selection=selection) syn_weight_attr_dict = dict(reference_weights_iter) syn_ids = syn_weight_attr_dict[target_gid]['syn_id'] weights = syn_weight_attr_dict[target_gid][synapse_name] for (syn_id, weight) in zip(syn_ids, weights): reference_weights_by_syn_id_dict[int(syn_id)] = float(weight) logger.info( 'destination: %s; gid %i; read reference synaptic weights for %i synapses' % (destination, target_gid, len(reference_weights_by_syn_id_dict))) source_gid_set_dict = defaultdict(set) syn_ids_by_source_gid_dict = defaultdict(list) initial_weights_by_source_gid_dict = dict() if reference_weights_by_syn_id_dict is None: reference_weights_by_source_gid_dict = None else: reference_weights_by_source_gid_dict = dict() (graph, edge_attr_info) = read_graph_selection(file_name=connections_path, selection=[target_gid], namespaces=['Synapses']) syn_id_attr_index = None for source, edge_iter in viewitems(graph[destination]): if source not in sources: continue this_edge_attr_info = edge_attr_info[destination][source] if 'Synapses' in this_edge_attr_info and \ 'syn_id' in this_edge_attr_info['Synapses']: syn_id_attr_index = this_edge_attr_info['Synapses']['syn_id'] for (destination_gid, edges) in edge_iter: assert destination_gid == target_gid source_gids, edge_attrs = edges syn_ids = edge_attrs['Synapses'][syn_id_attr_index] count = 0 for i in range(len(source_gids)): this_source_gid = int(source_gids[i]) source_gid_set_dict[source].add(this_source_gid) this_syn_id = int(syn_ids[i]) if this_syn_id not in initial_weights_by_syn_id_dict: this_weight = \ env.connection_config[destination][source].mechanisms['default'][synapse_name]['weight'] initial_weights_by_syn_id_dict[this_syn_id] = this_weight syn_ids_by_source_gid_dict[this_source_gid].append(this_syn_id) if this_source_gid not in initial_weights_by_source_gid_dict: initial_weights_by_source_gid_dict[this_source_gid] = \ initial_weights_by_syn_id_dict[this_syn_id] if reference_weights_by_source_gid_dict is not None: reference_weights_by_source_gid_dict[this_source_gid] = \ reference_weights_by_syn_id_dict[this_syn_id] count += 1 logger.info( 'destination: %s; gid %i; set initial synaptic weights for %d inputs from source population ' '%s' % (destination, destination_gid, count, source)) syn_count_by_source_gid_dict = dict() for source_gid in syn_ids_by_source_gid_dict: syn_count_by_source_gid_dict[source_gid] = len( syn_ids_by_source_gid_dict[source_gid]) input_rate_maps_by_source_gid_dict = dict() for source in sources: source_gids = list(source_gid_set_dict[source]) for input_features_namespace in this_input_features_namespaces: input_features_iter = read_cell_attribute_selection( input_features_path, source, namespace=input_features_namespace, mask=set(features_attr_names), selection=source_gids) count = 0 for gid, attr_dict in input_features_iter: input_rate_maps_by_source_gid_dict[gid] = attr_dict[ 'Arena Rate Map'].reshape((dim_x, dim_y)) count += 1 logger.info('Read %s feature data for %i cells in population %s' % (input_features_namespace, count, source)) if is_interactive: context.update(locals()) normalized_delta_weights_dict, arena_LS_map = \ synapses.generate_structured_weights(target_map=target_map, initial_weight_dict=initial_weights_by_source_gid_dict, input_rate_map_dict=input_rate_maps_by_source_gid_dict, syn_count_dict=syn_count_by_source_gid_dict, max_delta_weight=max_delta_weight, arena_x=arena_x, arena_y=arena_y, reference_weight_dict=reference_weights_by_source_gid_dict, reference_weights_are_delta=reference_weights_are_delta, reference_weights_namespace=reference_weights_namespace, optimize_method=optimize_method, verbose=verbose, plot=plot) output_syn_ids = np.empty(len(initial_weights_by_syn_id_dict), dtype='uint32') output_weights = np.empty(len(initial_weights_by_syn_id_dict), dtype='float32') i = 0 for source_gid, this_weight in viewitems(normalized_delta_weights_dict): for syn_id in syn_ids_by_source_gid_dict[source_gid]: output_syn_ids[i] = syn_id output_weights[i] = this_weight i += 1 output_weights_dict = { target_gid: { 'syn_id': output_syn_ids, synapse_name: output_weights } } logger.info('destination: %s; gid %i; generated %s for %i synapses' % (destination, target_gid, output_weights_namespace, len(output_weights))) if not dry_run: this_output_weights_namespace = '%s %s' % (output_weights_namespace, arena_id) logger.info('Destination: %s; appending %s ...' % (destination, this_output_weights_namespace)) append_cell_attributes(output_weights_path, destination, output_weights_dict, namespace=this_output_weights_namespace) logger.info('Destination: %s; appended %s' % (destination, this_output_weights_namespace)) output_weights_dict.clear() if output_features_path is not None: this_output_features_namespace = '%s %s' % ( output_features_namespace, arena_id) cell_attr_dict = dst_input_features[destination] cell_attr_dict[target_gid]['Arena State Map'] = np.asarray( arena_LS_map.ravel(), dtype=np.float32) logger.info('Destination: %s; appending %s ...' % (destination, this_output_features_namespace)) append_cell_attributes(output_features_path, destination, cell_attr_dict, namespace=this_output_features_namespace) if is_interactive: context.update(locals())
def main(config, coordinates, field_width, gid, input_features_path, input_features_namespaces, initial_weights_path, output_features_namespace, output_features_path, output_weights_path, reference_weights_path, h5types_path, synapse_name, initial_weights_namespace, output_weights_namespace, reference_weights_namespace, connections_path, destination, sources, non_structured_sources, non_structured_weights_namespace, non_structured_weights_path, arena_id, field_width_scale, max_delta_weight, max_opt_iter, max_weight_decay_fraction, optimize_method, optimize_tol, optimize_grad, peak_rate, reference_weights_are_delta, arena_margin, target_amplitude, io_size, chunk_size, value_chunk_size, cache_size, write_size, verbose, dry_run, plot, show_fig, save_fig): """ :param config: str (path to .yaml file) :param input_features_path: str (path to .h5 file) :param initial_weights_path: str (path to .h5 file) :param initial_weights_namespace: str :param output_weights_namespace: str :param connections_path: str (path to .h5 file) :param destination: str :param sources: list of str :param io_size: :param chunk_size: :param value_chunk_size: :param cache_size: :param write_size: :param verbose: :param dry_run: :return: """ utils.config_logging(verbose) logger = utils.get_script_logger(__file__) comm = MPI.COMM_WORLD rank = comm.rank nranks = comm.size if io_size == -1: io_size = comm.size if rank == 0: logger.info('%s: %i ranks have been allocated' % (__file__, comm.size)) env = Env(comm=comm, config_file=config, io_size=io_size) if plot and (not save_fig) and (not show_fig): show_fig = True if (not dry_run) and (rank == 0): if not os.path.isfile(output_weights_path): if initial_weights_path is not None: input_file = h5py.File(initial_weights_path, 'r') elif h5types_path is not None: input_file = h5py.File(h5types_path, 'r') else: raise RuntimeError( 'h5types input path must be specified when weights path is not specified.' ) output_file = h5py.File(output_weights_path, 'w') input_file.copy('/H5Types', output_file) input_file.close() output_file.close() env.comm.barrier() LTD_output_weights_namespace = 'LTD %s %s' % (output_weights_namespace, arena_id) LTP_output_weights_namespace = 'LTP %s %s' % (output_weights_namespace, arena_id) this_input_features_namespaces = [ '%s %s' % (input_features_namespace, arena_id) for input_features_namespace in input_features_namespaces ] selectivity_type_index = { i: n for n, i in viewitems(env.selectivity_types) } target_selectivity_type_name = 'place' target_selectivity_type = env.selectivity_types[ target_selectivity_type_name] features_attrs = defaultdict(dict) source_features_attr_names = [ 'Selectivity Type', 'Num Fields', 'Field Width', 'Peak Rate', 'Module ID', 'Grid Spacing', 'Grid Orientation', 'Field Width Concentration Factor', 'X Offset', 'Y Offset' ] target_features_attr_names = [ 'Selectivity Type', 'Num Fields', 'Field Width', 'Peak Rate', 'X Offset', 'Y Offset' ] local_random = np.random.RandomState() seed_offset = int( env.model_config['Random Seeds']['GC Structured Weights']) spatial_resolution = env.stimulus_config['Spatial Resolution'] # cm arena = env.stimulus_config['Arena'][arena_id] default_run_vel = arena.properties['default run velocity'] # cm/s gid_count = 0 start_time = time.time() target_gid_set = None if len(gid) > 0: target_gid_set = set(gid) all_sources = sources + non_structured_sources connection_gen_list = [ NeuroH5ProjectionGen(connections_path, source, destination, namespaces=['Synapses'], comm=comm) \ for source in all_sources ] output_features_dict = {} LTP_output_weights_dict = {} LTD_output_weights_dict = {} for iter_count, attr_gen_package in enumerate( zip_longest(*connection_gen_list)): local_time = time.time() this_gid = attr_gen_package[0][0] if not all([ attr_gen_items[0] == this_gid for attr_gen_items in attr_gen_package ]): raise Exception( 'Rank: %i; destination: %s; this_gid not matched across multiple attribute ' 'generators: %s' % (rank, destination, [attr_gen_items[0] for attr_gen_items in attr_gen_package])) if (target_gid_set is not None) and (this_gid not in target_gid_set): continue if this_gid is None: selection = [] logger.info('Rank: %i received None' % rank) else: selection = [this_gid] local_random.seed(int(this_gid + seed_offset)) has_structured_weights = False dst_input_features_attr_dict = {} for input_features_namespace in this_input_features_namespaces: input_features_iter = read_cell_attribute_selection( input_features_path, destination, namespace=input_features_namespace, mask=set(target_features_attr_names), comm=env.comm, selection=selection) count = 0 for gid, attr_dict in input_features_iter: dst_input_features_attr_dict[gid] = attr_dict count += 1 if rank == 0: logger.info( 'Read %s feature data for %i cells in population %s' % (input_features_namespace, count, destination)) arena_margin_size = 0. arena_margin = max(arena_margin, 0.) target_selectivity_features_dict = {} target_selectivity_config_dict = {} target_field_width_dict = {} for gid in selection: target_selectivity_features_dict[ gid] = dst_input_features_attr_dict.get(gid, {}) target_selectivity_features_dict[gid][ 'Selectivity Type'] = np.asarray([target_selectivity_type], dtype=np.uint8) num_fields = target_selectivity_features_dict[gid]['Num Fields'][0] if coordinates[0] is not None: num_fields = 1 target_selectivity_features_dict[gid]['X Offset'] = np.asarray( [coordinates[0]], dtype=np.float32) target_selectivity_features_dict[gid]['Y Offset'] = np.asarray( [coordinates[1]], dtype=np.float32) target_selectivity_features_dict[gid][ 'Num Fields'] = np.asarray([num_fields], dtype=np.uint8) if field_width is not None: target_selectivity_features_dict[gid][ 'Field Width'] = np.asarray([field_width] * num_fields, dtype=np.float32) else: this_field_width = target_selectivity_features_dict[gid][ 'Field Width'] target_selectivity_features_dict[gid][ 'Field Width'] = this_field_width[:num_fields] if peak_rate is not None: target_selectivity_features_dict[gid][ 'Peak Rate'] = np.asarray([peak_rate] * num_fields, dtype=np.float32) input_cell_config = stimulus.get_input_cell_config( target_selectivity_type, selectivity_type_index, selectivity_attr_dict=target_selectivity_features_dict[gid]) if input_cell_config.num_fields > 0: arena_margin_size = max( arena_margin_size, np.max(input_cell_config.field_width) * arena_margin) target_field_width_dict[gid] = input_cell_config.field_width target_selectivity_config_dict[gid] = input_cell_config has_structured_weights = True arena_x, arena_y = stimulus.get_2D_arena_spatial_mesh( arena, spatial_resolution, margin=arena_margin_size) for gid, input_cell_config in viewitems( target_selectivity_config_dict): target_map = np.asarray(input_cell_config.get_rate_map( arena_x, arena_y, scale=field_width_scale), dtype=np.float32) target_selectivity_features_dict[gid][ 'Arena Rate Map'] = target_map if not has_structured_weights: selection = [] initial_weights_by_syn_id_dict = defaultdict(lambda: dict()) initial_weights_by_source_gid_dict = defaultdict(lambda: dict()) if initial_weights_path is not None: initial_weights_iter = \ read_cell_attribute_selection(initial_weights_path, destination, namespace=initial_weights_namespace, selection=selection) initial_weights_gid_count = 0 initial_weights_syn_count = 0 for this_gid, syn_weight_attr_dict in initial_weights_iter: syn_ids = syn_weight_attr_dict['syn_id'] weights = syn_weight_attr_dict[synapse_name] for (syn_id, weight) in zip(syn_ids, weights): initial_weights_by_syn_id_dict[this_gid][int( syn_id)] = float(weight) initial_weights_gid_count += 1 initial_weights_syn_count += len(syn_ids) logger.info( 'destination: %s; read initial synaptic weights for %i gids and %i syns' % (destination, initial_weights_gid_count, initial_weights_syn_count)) if len(non_structured_sources) > 0: non_structured_weights_by_syn_id_dict = defaultdict(lambda: dict()) non_structured_weights_by_source_gid_dict = defaultdict( lambda: dict()) else: non_structured_weights_by_syn_id_dict = None if non_structured_weights_path is not None: non_structured_weights_iter = \ read_cell_attribute_selection(initial_weights_path, destination, namespace=non_structured_weights_namespace, selection=selection) non_structured_weights_gid_count = 0 non_structured_weights_syn_count = 0 for this_gid, syn_weight_attr_dict in non_structured_weights_iter: syn_ids = syn_weight_attr_dict['syn_id'] weights = syn_weight_attr_dict[synapse_name] for (syn_id, weight) in zip(syn_ids, weights): non_structured_weights_by_syn_id_dict[this_gid][int( syn_id)] = float(weight) non_structured_weights_gid_count += 1 non_structured_weights_syn_count += len(syn_ids) logger.info( 'destination: %s; read non-structured synaptic weights for %i gids and %i syns' % ( destination, non_structured_weights_gid_count, non_structured_weights_syn_count, )) reference_weights_by_syn_id_dict = None reference_weights_by_source_gid_dict = defaultdict(lambda: dict()) if reference_weights_path is not None: reference_weights_by_syn_id_dict = defaultdict(lambda: dict()) reference_weights_iter = \ read_cell_attribute_selection(reference_weights_path, destination, namespace=reference_weights_namespace, selection=selection) reference_weights_gid_count = 0 for this_gid, syn_weight_attr_dict in reference_weights_iter: syn_ids = syn_weight_attr_dict['syn_id'] weights = syn_weight_attr_dict[synapse_name] for (syn_id, weight) in zip(syn_ids, weights): reference_weights_by_syn_id_dict[this_gid][int( syn_id)] = float(weight) logger.info( 'destination: %s; read reference synaptic weights for %i gids' % (destination, reference_weights_gid_count)) syn_count_by_source_gid_dict = defaultdict(int) source_gid_set_dict = defaultdict(set) syn_ids_by_source_gid_dict = defaultdict(list) structured_syn_id_count = 0 if has_structured_weights: for source, (destination_gid, (source_gid_array, conn_attr_dict)) in zip_longest( all_sources, attr_gen_package): syn_ids = conn_attr_dict['Synapses']['syn_id'] count = 0 this_initial_weights_by_syn_id_dict = None this_initial_weights_by_source_gid_dict = None this_reference_weights_by_syn_id_dict = None this_reference_weights_by_source_gid_dict = None this_non_structured_weights_by_syn_id_dict = None this_non_structured_weights_by_source_gid_dict = None if destination_gid is not None: this_initial_weights_by_syn_id_dict = initial_weights_by_syn_id_dict[ destination_gid] this_initial_weights_by_source_gid_dict = initial_weights_by_source_gid_dict[ destination_gid] if reference_weights_by_syn_id_dict is not None: this_reference_weights_by_syn_id_dict = reference_weights_by_syn_id_dict[ destination_gid] this_reference_weights_by_source_gid_dict = reference_weights_by_source_gid_dict[ destination_gid] this_non_structured_weights_by_syn_id_dict = non_structured_weights_by_syn_id_dict[ destination_gid] this_non_structured_weights_by_source_gid_dict = non_structured_weights_by_source_gid_dict[ destination_gid] for i in range(len(source_gid_array)): this_source_gid = source_gid_array[i] this_syn_id = syn_ids[i] if this_syn_id in this_initial_weights_by_syn_id_dict: this_syn_wgt = this_initial_weights_by_syn_id_dict[ this_syn_id] if this_source_gid not in this_initial_weights_by_source_gid_dict: this_initial_weights_by_source_gid_dict[ this_source_gid] = this_syn_wgt if this_reference_weights_by_syn_id_dict is not None: this_reference_weights_by_source_gid_dict[this_source_gid] = \ this_reference_weights_by_syn_id_dict[this_syn_id] elif this_syn_id in this_non_structured_weights_by_syn_id_dict: this_syn_wgt = this_non_structured_weights_by_syn_id_dict[ this_syn_id] if this_source_gid not in this_non_structured_weights_by_source_gid_dict: this_non_structured_weights_by_source_gid_dict[ this_source_gid] = this_syn_wgt source_gid_set_dict[source].add(this_source_gid) syn_ids_by_source_gid_dict[this_source_gid].append( this_syn_id) syn_count_by_source_gid_dict[this_source_gid] += 1 count += 1 if source not in non_structured_sources: structured_syn_id_count += len(syn_ids) logger.info( 'Rank %i; destination: %s; gid %i; %d edges from source population %s' % (rank, destination, this_gid, count, source)) input_rate_maps_by_source_gid_dict = {} if len(non_structured_sources) > 0: non_structured_input_rate_maps_by_source_gid_dict = {} else: non_structured_input_rate_maps_by_source_gid_dict = None for source in all_sources: if has_structured_weights: source_gids = list(source_gid_set_dict[source]) else: source_gids = [] if rank == 0: logger.info( 'Reading %s feature data for %i cells in population %s...' % (input_features_namespace, len(source_gids), source)) for input_features_namespace in this_input_features_namespaces: input_features_iter = read_cell_attribute_selection( input_features_path, source, namespace=input_features_namespace, mask=set(source_features_attr_names), comm=env.comm, selection=source_gids) count = 0 for gid, attr_dict in input_features_iter: this_selectivity_type = attr_dict['Selectivity Type'][0] this_selectivity_type_name = selectivity_type_index[ this_selectivity_type] input_cell_config = stimulus.get_input_cell_config( this_selectivity_type, selectivity_type_index, selectivity_attr_dict=attr_dict) this_arena_rate_map = np.asarray( input_cell_config.get_rate_map(arena_x, arena_y), dtype=np.float32) if source in non_structured_sources: non_structured_input_rate_maps_by_source_gid_dict[ gid] = this_arena_rate_map else: input_rate_maps_by_source_gid_dict[ gid] = this_arena_rate_map count += 1 if rank == 0: logger.info( 'Read %s feature data for %i cells in population %s' % (input_features_namespace, count, source)) if has_structured_weights: if is_interactive: context.update(locals()) save_fig_path = None if save_fig is not None: save_fig_path = '%s/Structured Weights %s %d.png' % ( save_fig, destination, this_gid) normalized_LTP_delta_weights_dict, LTD_delta_weights_dict, arena_LS_map = \ synapses.generate_structured_weights(target_map=target_selectivity_features_dict[this_gid]['Arena Rate Map'], initial_weight_dict=this_initial_weights_by_source_gid_dict, reference_weight_dict=this_reference_weights_by_source_gid_dict, reference_weights_are_delta=reference_weights_are_delta, reference_weights_namespace=reference_weights_namespace, input_rate_map_dict=input_rate_maps_by_source_gid_dict, non_structured_input_rate_map_dict=non_structured_input_rate_maps_by_source_gid_dict, non_structured_weights_dict=this_non_structured_weights_by_source_gid_dict, syn_count_dict=syn_count_by_source_gid_dict, max_delta_weight=max_delta_weight, max_opt_iter=max_opt_iter, max_weight_decay_fraction=max_weight_decay_fraction, target_amplitude=target_amplitude, arena_x=arena_x, arena_y=arena_y, optimize_method=optimize_method, optimize_tol=optimize_tol, optimize_grad=optimize_grad, verbose=verbose, plot=plot, show_fig=show_fig, save_fig=save_fig_path, fig_kwargs={'gid': this_gid, 'field_width': target_field_width_dict[this_gid]}) gc.collect() this_selectivity_dict = target_selectivity_features_dict[this_gid] output_features_dict[this_gid] = { fld: this_selectivity_dict[fld] for fld in [ 'Selectivity Type', 'Num Fields', 'Field Width', 'Peak Rate', 'X Offset', 'Y Offset' ] } output_features_dict[this_gid]['Arena State Map'] = np.asarray( arena_LS_map.ravel(), dtype=np.float32) output_syn_ids = np.empty(structured_syn_id_count, dtype='uint32') LTD_output_weights = np.empty(structured_syn_id_count, dtype='float32') LTP_output_weights = np.empty(structured_syn_id_count, dtype='float32') i = 0 for source_gid in normalized_LTP_delta_weights_dict: for syn_id in syn_ids_by_source_gid_dict[source_gid]: output_syn_ids[i] = syn_id LTP_output_weights[i] = normalized_LTP_delta_weights_dict[ source_gid] LTD_output_weights[i] = LTD_delta_weights_dict[source_gid] i += 1 LTP_output_weights_dict[this_gid] = { 'syn_id': output_syn_ids, synapse_name: LTP_output_weights } LTD_output_weights_dict[this_gid] = { 'syn_id': output_syn_ids, synapse_name: LTD_output_weights } logger.info( 'Rank %i; destination: %s; gid %i; generated structured weights for %i inputs in %.2f ' 's' % (rank, destination, this_gid, len(output_syn_ids), time.time() - local_time)) gid_count += 1 if iter_count % write_size == 0: if not dry_run: append_cell_attributes(output_weights_path, destination, LTD_output_weights_dict, namespace=LTD_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) append_cell_attributes(output_weights_path, destination, LTP_output_weights_dict, namespace=LTP_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) count = comm.reduce(len(LTP_output_weights_dict), op=MPI.SUM, root=0) if rank == 0: logger.info( 'Destination: %s; appended weights for %i cells' % (destination, count)) if output_features_path is not None: if output_features_namespace is None: output_features_namespace = '%s Selectivity' % target_selectivity_type_name.title( ) this_output_features_namespace = '%s %s' % ( output_features_namespace, arena_id) logger.info(str(output_features_dict)) append_cell_attributes( output_features_path, destination, output_features_dict, namespace=this_output_features_namespace) count = comm.reduce(len(output_features_dict), op=MPI.SUM, root=0) if rank == 0: logger.info( 'Destination: %s; appended selectivity features for %i cells' % (destination, count)) LTP_output_weights_dict.clear() LTD_output_weights_dict.clear() output_features_dict.clear() gc.collect() env.comm.barrier() if not dry_run: append_cell_attributes(output_weights_path, destination, LTD_output_weights_dict, namespace=LTD_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) append_cell_attributes(output_weights_path, destination, LTP_output_weights_dict, namespace=LTP_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) count = comm.reduce(len(LTP_output_weights_dict), op=MPI.SUM, root=0) if rank == 0: logger.info('Destination: %s; appended weights for %i cells' % (destination, count)) if output_features_path is not None: if output_features_namespace is None: output_features_namespace = 'Selectivity Features' this_output_features_namespace = '%s %s' % ( output_features_namespace, arena_id) append_cell_attributes(output_features_path, destination, output_features_dict, namespace=this_output_features_namespace) count = comm.reduce(len(output_features_dict), op=MPI.SUM, root=0) if rank == 0: logger.info( 'Destination: %s; appended selectivity features for %i cells' % (destination, count)) env.comm.barrier() global_count = comm.gather(gid_count, root=0) if rank == 0: logger.info( 'destination: %s; %i ranks assigned structured weights to %i cells in %.2f s' % (destination, comm.size, np.sum(global_count), time.time() - start_time))
def main(config, coordinates, field_width, gid, input_features_path, input_features_namespaces, initial_weights_path, output_features_namespace, output_features_path, output_weights_path, reference_weights_path, h5types_path, synapse_name, initial_weights_namespace, output_weights_namespace, reference_weights_namespace, connections_path, destination, sources, non_structured_sources, non_structured_weights_namespace, non_structured_weights_path, arena_id, field_width_scale, max_opt_iter, max_weight_decay_fraction, optimize_tol, peak_rate, reference_weights_are_delta, arena_margin, target_amplitude, io_size, chunk_size, value_chunk_size, cache_size, write_size, verbose, dry_run, plot, show_fig, save_fig, debug): """ :param config: str (path to .yaml file) :param input_features_path: str (path to .h5 file) :param initial_weights_path: str (path to .h5 file) :param initial_weights_namespace: str :param output_weights_namespace: str :param connections_path: str (path to .h5 file) :param destination: str :param sources: list of str :param io_size: :param chunk_size: :param value_chunk_size: :param write_size: :param verbose: :param dry_run: :return: """ utils.config_logging(verbose) script_name = __file__ logger = utils.get_script_logger(script_name) comm = MPI.COMM_WORLD rank = comm.rank nranks = comm.size if io_size == -1: io_size = comm.size if rank == 0: logger.info(f'{comm.size} ranks have been allocated') env = Env(comm=comm, config_file=config, io_size=io_size) env.comm.barrier() if plot and (not save_fig) and (not show_fig): show_fig = True if (not dry_run) and (rank == 0): if not os.path.isfile(output_weights_path): if initial_weights_path is not None: input_file = h5py.File(initial_weights_path, 'r') elif h5types_path is not None: input_file = h5py.File(h5types_path, 'r') else: raise RuntimeError( 'h5types input path must be specified when weights path is not specified.' ) output_file = h5py.File(output_weights_path, 'w') input_file.copy('/H5Types', output_file) input_file.close() output_file.close() env.comm.barrier() LTD_output_weights_namespace = f'LTD {output_weights_namespace} {arena_id}' LTP_output_weights_namespace = f'LTP {output_weights_namespace} {arena_id}' this_input_features_namespaces = [ f'{input_features_namespace} {arena_id}' for input_features_namespace in input_features_namespaces ] selectivity_type_index = { i: n for n, i in viewitems(env.selectivity_types) } target_selectivity_type_name = 'place' target_selectivity_type = env.selectivity_types[ target_selectivity_type_name] features_attrs = defaultdict(dict) source_features_attr_names = [ 'Selectivity Type', 'Num Fields', 'Field Width', 'Peak Rate', 'Module ID', 'Grid Spacing', 'Grid Orientation', 'Field Width Concentration Factor', 'X Offset', 'Y Offset' ] target_features_attr_names = [ 'Selectivity Type', 'Num Fields', 'Field Width', 'Peak Rate', 'X Offset', 'Y Offset' ] seed_offset = int( env.model_config['Random Seeds']['GC Structured Weights']) spatial_resolution = env.stimulus_config['Spatial Resolution'] # cm arena = env.stimulus_config['Arena'][arena_id] default_run_vel = arena.properties['default run velocity'] # cm/s gid_count = 0 start_time = time.time() target_gid_set = None if len(gid) > 0: target_gid_set = set(gid) projections = [(source, destination) for source in sources] graph_info = read_graph_info(connections_path, namespaces=['Connections', 'Synapses'], read_node_index=True) for projection in projections: if projection not in graph_info: raise RuntimeError( f'Projection {projection[0]} -> {projection[1]} is not present in connections file.' ) if target_gid_set is None: target_gid_set = set(graph_info[projection][1]) all_sources = sources + non_structured_sources src_input_features_attr_dict = {source: {} for source in all_sources} for source in sorted(all_sources): this_src_input_features_attr_dict = {} for this_input_features_namespace in this_input_features_namespaces: logger.info( f'Rank {rank}: Reading {this_input_features_namespace} feature data for cells in population {source}' ) input_features_dict = scatter_read_cell_attributes( input_features_path, source, namespaces=[this_input_features_namespace], mask=set(source_features_attr_names), comm=env.comm, io_size=env.io_size) for gid, attr_dict in input_features_dict[ this_input_features_namespace]: this_src_input_features_attr_dict[gid] = attr_dict src_input_features_attr_dict[ source] = this_src_input_features_attr_dict source_gid_count = env.comm.reduce( len(this_src_input_features_attr_dict), op=MPI.SUM, root=0) if rank == 0: logger.info( f'Rank {rank}: Read feature data for {source_gid_count} cells in population {source}' ) dst_gids = [] if target_gid_set is not None: for i, gid in enumerate(target_gid_set): if i % nranks == rank: dst_gids.append(gid) dst_input_features_attr_dict = {} for this_input_features_namespace in this_input_features_namespaces: feature_count = 0 gid_count = 0 logger.info( f'Rank {rank}: reading {this_input_features_namespace} feature data for {len(dst_gids)} cells in population {destination}' ) input_features_iter = scatter_read_cell_attribute_selection( input_features_path, destination, namespace=this_input_features_namespace, mask=set(target_features_attr_names), selection=dst_gids, io_size=env.io_size, comm=env.comm) for gid, attr_dict in input_features_iter: gid_count += 1 if (len(coordinates) > 0) or (attr_dict['Num Fields'][0] > 0): dst_input_features_attr_dict[gid] = attr_dict feature_count += 1 logger.info( f'Rank {rank}: read {this_input_features_namespace} feature data for ' f'{gid_count} / {feature_count} cells in population {destination}') feature_count = env.comm.reduce(feature_count, op=MPI.SUM, root=0) env.comm.barrier() if rank == 0: logger.info( f'Read {this_input_features_namespace} feature data for {feature_count} cells in population {destination}' ) feature_dst_gids = list(dst_input_features_attr_dict.keys()) all_feature_gids_per_rank = comm.allgather(feature_dst_gids) all_feature_gids = sorted( [item for sublist in all_feature_gids_per_rank for item in sublist]) request_dst_gids = [] for i, gid in enumerate(all_feature_gids): if i % nranks == rank: request_dst_gids.append(gid) dst_input_features_attr_dict = exchange_input_features( env.comm, request_dst_gids, dst_input_features_attr_dict) dst_gids = list(dst_input_features_attr_dict.keys()) if rank == 0: logger.info( f"Rank {rank} feature dict is {dst_input_features_attr_dict}") dst_count = env.comm.reduce(len(dst_gids), op=MPI.SUM, root=0) logger.info(f"Rank {rank} has {len(dst_gids)} feature gids") if rank == 0: logger.info(f'Total {dst_count} feature gids') max_dst_count = env.comm.allreduce(len(dst_gids), op=MPI.MAX) env.comm.barrier() max_iter_count = max_dst_count output_features_dict = {} LTP_output_weights_dict = {} LTD_output_weights_dict = {} non_structured_output_weights_dict = {} for iter_count in range(max_iter_count): gc.collect() local_time = time.time() selection = [] if len(dst_gids) > 0: dst_gid = dst_gids.pop() selection.append(dst_gid) logger.info(f'Rank {rank} received gid {dst_gid}') env.comm.barrier() arena_margin_size = 0. arena_margin = max(arena_margin, 0.) target_selectivity_features_dict = {} target_selectivity_config_dict = {} target_field_width_dict = {} for destination_gid in selection: arena_margin_size = init_selectivity_config( destination_gid, spatial_resolution, arena, arena_margin, arena_margin_size, coordinates, field_width, field_width_scale, peak_rate, target_selectivity_type, selectivity_type_index, dst_input_features_attr_dict, target_selectivity_features_dict, target_selectivity_config_dict, target_field_width_dict, logger=logger) arena_x, arena_y = stimulus.get_2D_arena_spatial_mesh( arena, spatial_resolution, margin=arena_margin_size) selection = list(target_selectivity_features_dict.keys()) initial_weights_by_source_gid_dict = defaultdict(lambda: dict()) initial_weights_by_syn_id_dict = \ read_weights(initial_weights_path, initial_weights_namespace, synapse_name, destination, selection, env.comm, env.io_size, defaultdict(lambda: dict()), logger=logger if rank == 0 else None) non_structured_weights_by_source_gid_dict = defaultdict(lambda: dict()) non_structured_weights_by_syn_id_dict = None if len(non_structured_sources) > 0: non_structured_weights_by_syn_id_dict = \ read_weights(non_structured_weights_path, non_structured_weights_namespace, synapse_name, destination, selection, env.comm, env.io_size, defaultdict(lambda: dict()), logger=logger if rank == 0 else None) reference_weights_by_syn_id_dict = None reference_weights_by_source_gid_dict = defaultdict(lambda: dict()) if reference_weights_path is not None: reference_weights_by_syn_id_dict = \ read_weights(reference_weights_path, reference_weights_namespace, synapse_name, destination, selection, env.comm, env.io_size, defaultdict(lambda: dict()), logger=logger if rank == 0 else None) source_gid_set_dict = defaultdict(set) syn_count_by_source_gid_dict = defaultdict(lambda: defaultdict(int)) syn_ids_by_source_gid_dict = defaultdict(lambda: defaultdict(list)) structured_syn_id_count = defaultdict(int) non_structured_syn_id_count = defaultdict(int) projections = [(source, destination) for source in all_sources] edge_iter_dict, edge_attr_info = scatter_read_graph_selection( connections_path, selection=selection, namespaces=['Synapses'], projections=projections, comm=env.comm, io_size=env.io_size) syn_counts_by_source = init_syn_weight_dicts( destination, non_structured_sources, edge_iter_dict, edge_attr_info, initial_weights_by_syn_id_dict, initial_weights_by_source_gid_dict, non_structured_weights_by_syn_id_dict, non_structured_weights_by_source_gid_dict, reference_weights_by_syn_id_dict, reference_weights_by_source_gid_dict, source_gid_set_dict, syn_count_by_source_gid_dict, syn_ids_by_source_gid_dict, structured_syn_id_count, non_structured_syn_id_count) for source in syn_counts_by_source: for this_gid in syn_counts_by_source[source]: count = syn_counts_by_source[source][this_gid] logger.info( f'Rank {rank}: destination: {destination}; gid {this_gid}; ' f'{count} edges from source population {source}') input_rate_maps_by_source_gid_dict = {} if len(non_structured_sources) > 0: non_structured_input_rate_maps_by_source_gid_dict = {} else: non_structured_input_rate_maps_by_source_gid_dict = None for source in all_sources: source_gids = list(source_gid_set_dict[source]) if rank == 0: logger.info( f'Rank {rank}: getting feature data for {len(source_gids)} cells in population {source}' ) this_src_input_features = exchange_input_features( env.comm, source_gids, src_input_features_attr_dict[source]) count = 0 for this_gid in source_gids: attr_dict = this_src_input_features[this_gid] this_selectivity_type = attr_dict['Selectivity Type'][0] this_selectivity_type_name = selectivity_type_index[ this_selectivity_type] input_cell_config = stimulus.get_input_cell_config( this_selectivity_type, selectivity_type_index, selectivity_attr_dict=attr_dict) this_arena_rate_map = np.asarray( input_cell_config.get_rate_map(arena_x, arena_y), dtype=np.float32) if source in non_structured_sources: non_structured_input_rate_maps_by_source_gid_dict[ this_gid] = this_arena_rate_map else: input_rate_maps_by_source_gid_dict[ this_gid] = this_arena_rate_map count += 1 for destination_gid in selection: if is_interactive: context.update(locals()) save_fig_path = None if save_fig is not None: save_fig_path = f'{save_fig}/Structured Weights {destination} {destination_gid}.png' reference_weight_dict = None if reference_weights_path is not None: reference_weight_dict = reference_weights_by_source_gid_dict[ destination_gid] LTP_delta_weights_dict, LTD_delta_weights_dict, arena_structured_map = \ synapses.generate_structured_weights(destination_gid, target_map=target_selectivity_features_dict[destination_gid]['Arena Rate Map'], initial_weight_dict=initial_weights_by_source_gid_dict[destination_gid], #reference_weight_dict=reference_weight_dict, #reference_weights_are_delta=reference_weights_are_delta, #reference_weights_namespace=reference_weights_namespace, input_rate_map_dict=input_rate_maps_by_source_gid_dict, non_structured_input_rate_map_dict=non_structured_input_rate_maps_by_source_gid_dict, non_structured_weights_dict=non_structured_weights_by_source_gid_dict[destination_gid], syn_count_dict=syn_count_by_source_gid_dict[destination_gid], max_opt_iter=max_opt_iter, max_weight_decay_fraction=max_weight_decay_fraction, target_amplitude=target_amplitude, arena_x=arena_x, arena_y=arena_y, optimize_tol=optimize_tol, verbose=verbose if rank == 0 else False, plot=plot, show_fig=show_fig, save_fig=save_fig_path, fig_kwargs={'gid': destination_gid, 'field_width': target_field_width_dict[destination_gid]}) input_rate_maps_by_source_gid_dict.clear() target_map_flat = target_selectivity_features_dict[ destination_gid]['Arena Rate Map'].flat arena_map_residual_mae = np.mean( np.abs(arena_structured_map - target_map_flat)) output_features_dict[destination_gid] = \ { fld: target_selectivity_features_dict[destination_gid][fld] for fld in ['Selectivity Type', 'Num Fields', 'Field Width', 'Peak Rate', 'X Offset', 'Y Offset',]} output_features_dict[destination_gid][ 'Rate Map Residual Mean Error'] = np.asarray( [arena_map_residual_mae], dtype=np.float32) this_structured_syn_id_count = structured_syn_id_count[ destination_gid] output_syn_ids = np.empty(this_structured_syn_id_count, dtype='uint32') LTD_output_weights = np.empty(this_structured_syn_id_count, dtype='float32') LTP_output_weights = np.empty(this_structured_syn_id_count, dtype='float32') i = 0 for source_gid in LTP_delta_weights_dict: for syn_id in syn_ids_by_source_gid_dict[destination_gid][ source_gid]: output_syn_ids[i] = syn_id LTP_output_weights[i] = LTP_delta_weights_dict[source_gid] LTD_output_weights[i] = LTD_delta_weights_dict[source_gid] i += 1 LTP_output_weights_dict[destination_gid] = { 'syn_id': output_syn_ids, synapse_name: LTP_output_weights } LTD_output_weights_dict[destination_gid] = { 'syn_id': output_syn_ids, synapse_name: LTD_output_weights } this_non_structured_syn_id_count = non_structured_syn_id_count[ destination_gid] i = 0 logger.info( f'Rank {rank}; destination: {destination}; gid {destination_gid}; ' f'generated structured weights for {len(output_syn_ids)} inputs in {time.time() - local_time:.2f} s; ' f'residual error is {arena_map_residual_mae:.2f}') gid_count += 1 gc.collect() env.comm.barrier() if (write_size > 0) and (iter_count % write_size == 0): if not dry_run: append_cell_attributes(output_weights_path, destination, LTD_output_weights_dict, namespace=LTD_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) append_cell_attributes(output_weights_path, destination, LTP_output_weights_dict, namespace=LTP_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) count = env.comm.reduce(len(LTP_output_weights_dict), op=MPI.SUM, root=0) env.comm.barrier() if rank == 0: logger.info( f'Destination: {destination}; appended weights for {count} cells' ) if output_features_path is not None: if output_features_namespace is None: output_features_namespace = f'{target_selectivity_type_name.title()} Selectivity' this_output_features_namespace = f'{output_features_namespace} {arena_id}' append_cell_attributes( output_features_path, destination, output_features_dict, namespace=this_output_features_namespace) count = env.comm.reduce(len(output_features_dict), op=MPI.SUM, root=0) env.comm.barrier() if rank == 0: logger.info( f'Destination: {destination}; appended selectivity features for {count} cells' ) LTP_output_weights_dict.clear() LTD_output_weights_dict.clear() output_features_dict.clear() gc.collect() env.comm.barrier() if (iter_count >= 10) and debug: break env.comm.barrier() if not dry_run: append_cell_attributes(output_weights_path, destination, LTD_output_weights_dict, namespace=LTD_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) append_cell_attributes(output_weights_path, destination, LTP_output_weights_dict, namespace=LTP_output_weights_namespace, comm=env.comm, io_size=env.io_size, chunk_size=chunk_size, value_chunk_size=value_chunk_size) count = comm.reduce(len(LTP_output_weights_dict), op=MPI.SUM, root=0) env.comm.barrier() if rank == 0: logger.info( f'Destination: {destination}; appended weights for {count} cells' ) if output_features_path is not None: if output_features_namespace is None: output_features_namespace = 'Selectivity Features' this_output_features_namespace = f'{output_features_namespace} {arena_id}' append_cell_attributes(output_features_path, destination, output_features_dict, namespace=this_output_features_namespace) count = env.comm.reduce(len(output_features_dict), op=MPI.SUM, root=0) env.comm.barrier() if rank == 0: logger.info( f'Destination: {destination}; appended selectivity features for {count} cells' ) env.comm.barrier() global_count = env.comm.gather(gid_count, root=0) env.comm.barrier() if rank == 0: total_count = np.sum(global_count) total_time = time.time() - start_time logger.info( f'Destination: {destination}; ' f'{env.comm.size} ranks assigned structured weights to {total_count} cells in {total_time:.2f} s' )
def main(config, coordinates, gid, field_width, peak_rate, input_features_path, input_features_namespaces, output_weights_path, output_features_path, weights_path, h5types_path, synapse_name, initial_weights_namespace, structured_weights_namespace, connections_path, destination, sources, arena_id, baseline_weight, field_width_scale, max_iter, verbose, dry_run, interactive): """ :param config: str (path to .yaml file) :param weights_path: str (path to .h5 file) :param initial_weights_namespace: str :param structured_weights_namespace: str :param connections_path: str (path to .h5 file) :param destination: str :param sources: list of str :param verbose: :param dry_run: :return: """ utils.config_logging(verbose) logger = utils.get_script_logger(__file__) env = Env(config_file=config) if output_weights_path is None: if weights_path is None: raise RuntimeError('Output weights path must be specified when weights path is not specified.') output_weights_path = weights_path if (not dry_run): if not os.path.isfile(output_weights_path): if weights_path is not None: input_file = h5py.File(weights_path,'r') elif h5types_path is not None: input_file = h5py.File(h5types_path,'r') else: raise RuntimeError('h5types input path must be specified when weights path is not specified.') output_file = h5py.File(output_weights_path,'w') input_file.copy('/H5Types',output_file) input_file.close() output_file.close() this_input_features_namespaces = ['%s %s' % (input_features_namespace, arena_id) for input_features_namespace in input_features_namespaces] initial_weights_dict = None if weights_path is not None: logger.info('Reading initial weights data from %s...' % weights_path) cell_attributes_dict = read_cell_attribute_selection(weights_path, destination, namespaces=[initial_weights_namespace], selection=[gid]) if initial_weights_namespace in cell_attributes_dict: initial_weights_iter = cell_attributes_dict[initial_weights_namespace] initial_weights_dict = { gid: attr_dict for gid, attr_dict in initial_weights_iter } else: raise RuntimeError('Initial weights namespace %s was not found in file %s' % (initial_weights_namespace, weights_path)) logger.info('Rank %i; destination: %s; read synaptic weights for %i cells' % (env.comm.rank, destination, len(initial_weights_dict))) features_attr_names = ['Num Fields', 'Field Width', 'Peak Rate', 'X Offset', 'Y Offset', 'Arena Rate Map'] local_random = np.random.RandomState() seed_offset = int(env.model_config['Random Seeds']['GC Structured Weights']) local_random.seed(int(gid + seed_offset)) spatial_resolution = env.stimulus_config['Spatial Resolution'] # cm arena = env.stimulus_config['Arena'][arena_id] default_run_vel = arena.properties['default run velocity'] # cm/s x, y = stimulus.get_2D_arena_spatial_mesh(arena, spatial_resolution) plasticity_kernel = lambda x, y, x_loc, y_loc, sx, sy: gauss2d(x-x_loc, y-y_loc, sx=sx, sy=sy) plasticity_kernel = np.vectorize(plasticity_kernel, excluded=[2,3,4,5]) dst_input_features = defaultdict(dict) num_fields = len(coordinates) this_field_width = np.array([field_width]*num_fields, dtype=np.float32) this_peak_rate = np.array([peak_rate]*num_fields, dtype=np.float32) this_x0 = np.array([x for x, y in coordinates], dtype=np.float32) this_y0 = np.array([y for x, y in coordinates], dtype=np.float32) this_rate_map = np.asarray(get_rate_map(this_x0, this_y0, this_field_width, this_peak_rate, x, y), dtype=np.float32) selectivity_type = env.selectivity_types['place'] dst_input_features[destination][gid] = { 'Selectivity Type': np.array([selectivity_type], dtype=np.uint8), 'Num Fields': np.array([num_fields], dtype=np.uint8), 'Field Width': this_field_width, 'Peak Rate': this_peak_rate, 'X Offset': this_x0, 'Y Offset': this_y0, 'Arena Rate Map': this_rate_map.ravel() } selection=[gid] structured_weights_dict = {} source_syn_dict = defaultdict(lambda: defaultdict(list)) syn_weight_dict = {} if weights_path is not None: initial_weights_iter = read_cell_attribute_selection(weights_path, destination, namespace=initial_weights_namespace, selection=selection) syn_weight_attr_dict = dict(initial_weights_iter) syn_ids = syn_weight_attr_dict[gid]['syn_id'] weights = syn_weight_attr_dict[gid][synapse_name] for (syn_id, weight) in zip(syn_ids, weights): syn_weight_dict[int(syn_id)] = float(weight) logger.info('destination: %s; gid %i; received synaptic weights for %i synapses' % (destination, gid, len(syn_weight_dict))) (graph, edge_attr_info) = read_graph_selection(file_name=connections_path, selection=[gid], namespaces=['Synapses']) syn_id_attr_index = None for source, edge_iter in viewitems(graph[destination]): this_edge_attr_info = edge_attr_info[destination][source] if 'Synapses' in this_edge_attr_info and \ 'syn_id' in this_edge_attr_info['Synapses']: syn_id_attr_index = this_edge_attr_info['Synapses']['syn_id'] for (destination_gid, edges) in edge_iter: assert destination_gid == gid source_gids, edge_attrs = edges syn_ids = edge_attrs['Synapses'][syn_id_attr_index] this_source_syn_dict = source_syn_dict[source] count = 0 for i in range(len(source_gids)): this_source_gid = source_gids[i] this_syn_id = syn_ids[i] this_syn_wgt = syn_weight_dict.get(this_syn_id, 0.0) this_source_syn_dict[this_source_gid].append((this_syn_id, this_syn_wgt)) count += 1 logger.info('destination: %s; gid %i; %d synaptic weights from source population %s' % (destination, gid, count, source)) src_input_features = defaultdict(dict) for source in sources: source_gids = list(source_syn_dict[source].keys()) for input_features_namespace in this_input_features_namespaces: input_features_iter = read_cell_attribute_selection(input_features_path, source, namespace=input_features_namespace, mask=set(features_attr_names), selection=source_gids) this_src_input_features = src_input_features[source] count = 0 for gid, attr_dict in input_features_iter: this_src_input_features[gid] = attr_dict count += 1 logger.info('Read %s feature data for %i cells in population %s' % (input_features_namespace, count, source)) this_syn_weights = \ synapses.generate_structured_weights(destination_gid, destination, synapse_name, sources, dst_input_features, src_input_features, source_syn_dict, spatial_mesh=(x,y), plasticity_kernel=plasticity_kernel, field_width_scale=field_width_scale, baseline_weight=baseline_weight, local_random=local_random, interactive=interactive) assert this_syn_weights is not None structured_weights_dict[destination_gid] = this_syn_weights logger.info('destination: %s; gid %i; generated structured weights for %i inputs' % (destination, destination_gid, len(this_syn_weights['syn_id']))) gc.collect() if not dry_run: logger.info('Destination: %s; appending structured weights...' % (destination)) this_structured_weights_namespace = '%s %s' % (structured_weights_namespace, arena_id) append_cell_attributes(output_weights_path, destination, structured_weights_dict, namespace=this_structured_weights_namespace) logger.info('Destination: %s; appended structured weights' % (destination)) structured_weights_dict.clear() if output_features_path is not None: output_features_namespace = 'Place Selectivity %s' % arena_id cell_attr_dict = dst_input_features[destination] logger.info('Destination: %s; appending features...' % (destination)) append_cell_attributes(output_features_path, destination, cell_attr_dict, namespace=output_features_namespace) gc.collect() del(syn_weight_dict) del(src_input_features) del(dst_input_features)