def compare_foodLR_xpos( rootdir : Path = 'data/run/', bodydat : Path = 'body.dat', # params : Path = 'params.json', idx : Optional[int] = None, to_compare : Tuple[str,str] = ('food_left','food_right'), show : bool = True, ): # we will put the data in here dct_weight_to_xpos_foodL : Dict[float,float] = dict() dct_weight_to_xpos_foodR : Dict[float,float] = dict() # get all the directories, loop over them lst_wgt_dirs : List[Path] = os.listdir(rootdir) # filter out only the directories lst_wgt_dirs = list(filter(lambda p : os.path.isdir(joinPath(rootdir, p)), lst_wgt_dirs)) count : int = 1 count_max : int = len(lst_wgt_dirs) for wgt_dir in lst_wgt_dirs: # figure out the weight wgt : float = float(wgt_dir.split('_')[-1]) print(f' >> loading data for weight = {wgt} \t ({count} / {count_max})') # get data for both sides data_L : NDArray[(Any,Any), CoordsRotArr] = read_body_data(joinPath(rootdir,wgt_dir,to_compare[0],bodydat)) data_R : NDArray[(Any,Any), CoordsRotArr] = read_body_data(joinPath(rootdir,wgt_dir,to_compare[1],bodydat)) # get the index -- this only happens once, if at all if idx is None: idx = data_L.shape[0] - 1 # store distance dct_weight_to_xpos_foodL[wgt] = data_L[idx][0]['x'] dct_weight_to_xpos_foodR[wgt] = data_R[idx][0]['x'] count += 1 # plot plt.plot(*split_dict_arrs(dct_weight_to_xpos_foodL), 'o', label = 'food left') plt.plot(*split_dict_arrs(dct_weight_to_xpos_foodR), 'o', label = 'food right') plt.xlabel('connection strength') plt.ylabel('x-axis position at end of run') plt.title(rootdir) plt.legend() if show: plt.show()
def plot_act( rootdir: str = 'data/run/act.dat', names: Union[str, List[str], None] = None, strict_fname: bool = True, show: bool = True, ): """plot activations of worm neurons ### Parameters: - `rootdir : str` file to look for, expects tsv forums (defaults to `'data/run/act.dat'`) - `names : Union[str,List[str],None]` comma-separated (or regular) list of strings. will attempt to match using regex (defaults to `None`) - `strict_fname : bool` set this to false if you want to use a rootdir other than `'act.dat'` (defaults to `True`) """ # fix rootdir if only dir given if rootdir.endswith('/') or (strict_fname and not rootdir.endswith('act.dat')): rootdir = joinPath(rootdir, 'act.dat') print(rootdir) # split names if isinstance(names, str): names = names.split(',') print(f'> raw names: {names}') # read data data_raw = pd.read_csv(rootdir, sep=' ').to_records(index=False) fields_raw: List[str] = list(data_raw.dtype.fields.keys()) # data_raw = np.genfromtxt(rootdir, delimiter = ' ', dtype = np.float).T # print(data_raw.shape, fields_raw) names_new: List[str] = pattern_match_names(names, fields_raw) # dont plot the time if 't' in names_new: names_new.remove('t') # plot T: NDArray = data_raw['t'] V: Dict[str, NDArray] = {x: data_raw[x] for x in names_new} for v_name, v_arr in V.items(): # print(v_name, v_arr.shape, v_arr.dtype) plt.plot(T, v_arr, label=v_name) plt.title(rootdir) plt.xlabel('time') plt.ylabel('neuron output') plt.legend() if show: plt.show()
def run_genetic_algorithm_loadJSON(cfgfile: Path): raise NotImplementedError( "this function wont work yet due to some parameters being callables") # get the specified json file with open(cfgfile, 'r') as f_json: config: Dict[str, Any] = json.load(f_json) # copy the read-in contents to the run's folder if "rootdir" not in config: raise KeyError("missing 'rootdir' key!") mkdir(config["rootdir"]) with open(joinPath(config["rootdir"], 'run_config.json'), 'w') as f_out: json.dump(config, f_out) # run the main function, passing params run_genetic_algorithm(**config)
def plot_net( params: str, nrvsys: Union[str, List[str]] = ["Head", "VentralCord"], strict_fname: bool = False, show_weights: bool = True, spring_layout: bool = False, ): if params.endswith('/') or (strict_fname and not params.endswith('params.json')): params = joinPath(params, 'params.json') # figure out which nervous systems to plot if isinstance(nrvsys, str): nrvsys = nrvsys.split(',') # load network with open(params, 'r') as fin: data = json.load(fin) # create network G = nx.DiGraph() edges_byType: Dict[str, list] = { 'ele': list(), 'chem': list(), } weights_byType: Dict[str, dict] = { 'ele': dict(), 'chem': dict(), } for ns in nrvsys: for nrn in data[ns]["neurons"]: G.add_node(nrn) if ("n_units" in data[ns]) and (int(data[ns]["n_units"]) > 1): raise NotImplementedError() n_units = int(data[ns]["n_units"]) for u in range(n_units): for conn in data[ns]["connections"]: G.add_edge(conn["from"], conn["to"]) else: for conn in data[ns]["connections"]: G.add_edge(conn["from"], conn["to"]) edges_byType[conn["type"]].append((conn["from"], conn["to"])) weights_byType[conn["type"]][(conn["from"], conn["to"])] = conn["weight"] print(G.nodes()) print(G.edges()) if spring_layout: pos: Dict[str, Tuple[float, float]] = nx.spring_layout(G) else: pos = DEFAULT_POS nx.draw_networkx_nodes(G, pos, node_size=1500, node_color='#E3FFB2') nx.draw_networkx_labels(G, pos) # draw chem (directed) nx.draw_networkx_edges( G, pos, edgelist=edges_byType['chem'], edge_color='r', arrows=True, arrowsize=30, connectionstyle='arc3,rad=0.1', min_target_margin=20, ) # draw ele (undirected) nx.draw_networkx_edges(G, pos, edgelist=edges_byType['ele'], edge_color='b', arrows=False) # draw weights if show_weights: nx.draw_networkx_edge_labels( G, pos, edge_labels=weights_byType['chem'], ) nx.draw_networkx_edge_labels( G, pos, edge_labels=weights_byType['ele'], ) plt.title(params) plt.show()
def multi_food_run( rootdir: Path = 'data/run/', foodPos: Union[None, str, Tuple[float, float]] = (-0.003, 0.005), angle: Optional[float] = 1.57, **kwargs, ): """runs multiple trials of the simulation with food on left, right, and absent runs each of the following: ```python dct_runs : Dict[str,str] = { 'food_none/' : 'DISABLE', 'food_left/' : f'{-food_x},{food_y}', 'food_right/' : f'{food_x},{food_y}', } ``` with `food_x`, `food_y` extracted from `foodPos` parameter, or `params` json file if `foodPos is None` ### Parameters: - `rootdir : Path` output path, will create folders for each food position inside this directory (defaults to `'data/run/'`) - `foodPos : Optional[str]` food position tuple (defaults to `None`) ### Raises: - `TypeError` : if `foodPos` cant be read - `KeyError` : shouldn't ever be raised -- state *should* be inacessible """ # get food position if foodPos is None: # from params json with open(kwargs['params'], 'r') as fin_json: params_json: dict = json.load(fin_json) food_x = params_json["ChemoReceptors"]["foodPos"]["x"] food_y = params_json["ChemoReceptors"]["foodPos"]["y"] else: # or from CLI (takes priority, if given) if isinstance(foodPos, str): food_x, food_y = foodPos.split(',') elif isinstance(foodPos, tuple): food_x, food_y = foodPos else: raise TypeError( f'couldnt read foodpos, expected str or tuple: {foodPos}' ) food_x = float(food_x) food_y = float(food_y) # take absolute value for left/right to match food_x = abs(food_x) # make sure we dont pass the food pos further down if 'foodPos' in kwargs: raise KeyError( f'"foodPos" still specified? this should be inacessible') # create output dir mkdir(rootdir) # save state dump_state(locals(), rootdir) # set up the different runs dct_runs: Dict[str, str] = { 'food_none/': 'DISABLE', 'food_left/': f'{-food_x},{food_y}', 'food_right/': f'{food_x},{food_y}', } # dictionary of running processes dct_procs: dict = dict() # start each process for name, foodPos in dct_runs.items(): # make the output dir out_path: str = joinPath(rootdir, name) mkdir(out_path) # set up the command by passing kwargs down cmd: List[str] = genCmd_singlerun( output=out_path, foodPos=foodPos, angle=angle, **kwargs, ).split(' ') print(cmd) # run the process, write stderr and stdout to the log file with open(out_path + 'log.txt', 'w') as f_log: p = subprocess.Popen( cmd, stderr=subprocess.STDOUT, stdout=f_log, ) # store process in dict for later dct_procs[name] = p # wait for all of them to finish for name, p in dct_procs.items(): p.wait() if p.returncode: print( f' >> ERROR: process terminated with exit code 1, check log.txt for:\n\t{str(p.args)}' ) else: print(f' >> process complete: {name}')
def sweep_param( rootdir: Path = 'data/run/', param_key: Union[tuple, str] = 'ChemoReceptors.alpha', param_range: Union[dict, tuple, str] = '0.0,1.0,lin,3', params: Path = 'input/params.json', multi_food: bool = False, ASK_CONTINUE: bool = True, **kwargs, ): # create output dir mkdir(rootdir) # save state dump_state(locals(), rootdir) # open base json with open(params, 'r') as fin_json: params_data: dict = json.load(fin_json) # convert input string-lists # (useful as shorthand when using python-fire CLI) # split up path to parameter by dot param_key_tup: Tuple[str, ...] = (tuple(param_key.split('.')) if isinstance( param_key, str) else tuple(param_key)) param_key_sdot: str = '.'.join(param_key_tup) # convert into a dict param_range_dict: Dict[str, Any] = strList_to_dict( in_data=param_range, keys_list=['min', 'max', 'scale', 'npts'], type_map={ 'min': float, 'max': float, 'npts': int }, ) print(f'>> parameter to modify: {param_key_sdot}') print(f'>> range of values: {param_range_dict}') param_fin_dict: dict = params_data param_fin_key: str = '' try: param_fin_dict, param_fin_key = keylist_access_nested_dict( params_data, param_key_tup) except KeyError as ex: print( f'\n{param_key_sdot} was not a valid parameter for the params file read from {params}. Be sure that the parameter you want to modify exists in the json file.\n' ) raise ex exit(1) # figure out the range of values to try param_vals: NDArray = SPACE_GENERATOR_MAPPING[ param_range_dict['scale']]( param_range_dict['min'], param_range_dict['max'], param_range_dict['npts'], ) count: int = 1 count_max: int = len(param_vals) print( f'> will modify parameter: {param_key_sdot}\n\t>> {param_fin_dict}\t-->\t{param_fin_key}' ) print(f'> will try {len(param_vals)} values:\n\t>> {param_vals}') if ASK_CONTINUE: input('press enter to continue...') # run for each value of connection strength for pv in param_vals: print(f'> running for param val {pv} \t ({count} / {count_max})') # make dir outpath: str = f"{rootdir}{param_key_sdot}_{pv:.5}/" outpath_params: str = joinPath(outpath, 'in-params.json') mkdir(outpath) # set value param_fin_dict[param_fin_key] = pv # save modified params with open(outpath_params, 'w') as fout: json.dump(params_data, fout, indent='\t') # run if multi_food: Launchers.multi_food_run(rootdir=outpath, params=outpath_params, **kwargs) else: cmd: str = genCmd_singlerun( output=outpath, params=outpath_params, **kwargs, ) print(cmd) # run the process, write stderr and stdout to the log file with open(outpath + 'log.txt', 'w') as f_log: p = subprocess.Popen( cmd, stderr=subprocess.STDOUT, stdout=f_log, ) count += 1
def sweep_conn_weight( rootdir: Path = 'data/run/', conn_key: Union[dict, tuple, str] = 'Head,AWA,RIM,chem', conn_range: Union[dict, tuple, str] = '0.0,1.0,lin,3', params: Path = 'input/params.json', special_scaling_map: Optional[Dict[str, float]] = None, ASK_CONTINUE: bool = True, **kwargs, ): # create output dir mkdir(rootdir) # save state dump_state(locals(), rootdir) # open base json with open(params, 'r') as fin_json: params_data: dict = json.load(fin_json) # convert input string-lists to dictionaries # (useful as shorthand when using python-fire CLI) conn_key = strList_to_dict( in_data=conn_key, keys_list=['NS', 'from', 'to', 'type'], ) conn_range = strList_to_dict( in_data=conn_range, keys_list=['min', 'max', 'scale', 'npts'], type_map={ 'min': float, 'max': float, 'npts': int }, ) print(f'>> connection to modify: {conn_key}') print(f'>> range of values: {conn_range}') # find the appropriate connection to modify conn_idxs: List[Optional[int]] = find_conn_idx_regex( params_data=params_data, conn_key=conn_key, # special_scaling_map = special_scaling_map, ) if None in conn_idxs: # REVIEW: this is probably not good behavior # if the connection doesnt exist, add it params_data[conn_key['NS']]['connections'].append({ 'from': conn_key['from'], 'to': conn_key['to'], 'type': conn_key['type'], 'weight': float('nan'), }) # if the connection still doesn't exist, something has gone wrong conn_idxs = [ find_conn_idx( params_data[conn_key['NS']]['connections'], conn_key, ) ] if (None in conn_idxs) or (len(conn_idxs) == 0): raise KeyError( f'couldnt find connection index -- this state should be innaccessible. list: {conn_idxs}' ) # figure out the range of values to try weight_vals: NDArray = SPACE_GENERATOR_MAPPING[conn_range['scale']]( conn_range['min'], conn_range['max'], conn_range['npts'], ) count: int = 1 count_max: int = len(weight_vals) print('> will modify connections:') for cidx in conn_idxs: print('\t>> ' + str(params_data[conn_key['NS']]['connections'][cidx])) print('> will try weights:') print(f'\t>> {weight_vals}') if ASK_CONTINUE: input('press enter to continue...') # set up for scaling the weight wgt_scale: float = 1.0 if special_scaling_map is None: special_scaling_map = dict() # run for each value of connection strength for wgt in weight_vals: print(f'> running for weight {wgt} \t ({count} / {count_max})') # make dir outpath: str = f"{rootdir}{conn_key['from']}-{conn_key['to'].replace('*','x')}_{wgt:.5}/" outpath_params: str = joinPath(outpath, 'in-params.json') mkdir(outpath) # set weights for cidx in conn_idxs: # scale the weight if the neuron name is in the map cidx_nrn_to: str = params_data[ conn_key['NS']]['connections'][cidx]['to'] if cidx_nrn_to in special_scaling_map: wgt_scale = special_scaling_map[cidx_nrn_to] else: wgt_scale = 1.0 # set the new weight params_data[conn_key['NS']]['connections'][cidx][ 'weight'] = wgt * wgt_scale # save modified params with open(outpath_params, 'w') as fout: json.dump(params_data, fout, indent='\t') # run Launchers.multi_food_run(rootdir=outpath, params=outpath_params, **kwargs) count += 1