def run(in_parms): "Runs AMBER with in_parms" parms = copy.deepcopy(in_parms) name = parms['output_name'] config = name + ".config" if util.is_same_dict_in_file(in_parms, config): print "simulation already run." return input_top = parms['topology'] new_top = name + '.top' shutil.copy(input_top, new_top) input_crd = parms['input_crds'] if input_crd.endswith('.crd'): new_crd = name + '.in.crd' else: new_crd = name + '.in.rst' shutil.copy(input_crd, new_crd) if 'n_step_minimization' in parms: rst = name + ".crd" else: rst = name + ".rst" trj = name + ".trj" vel = name + ".vel" ene = name + ".ene" inf = name + ".inf" sander_out = name + ".sander.out" sander_in = name + ".sander.in" open(sander_in, "w").write(make_sander_input_file(parms)) cmd = "sander -O -i %s -o %s -p %s -c %s -r %s -x %s -v %s -e %s -inf %s" \ % (sander_in, sander_out, new_top, new_crd, rst, trj, vel, ene, inf) if parms['constraint_pdb']: cmd += " -ref %s" % parms['constraint_pdb'].replace('.pdb', '.crd') sh_script = name + '.sh' open(sh_script, "w").write(cmd) S_IRWXU = stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR os.chmod(sh_script, S_IRWXU) stopwatch = util.Timer() os.system(cmd) stopwatch.stop() open(name + '.time', 'w').write(stopwatch.str()) error_log = open(sander_out, "r").readlines() is_error = False for line in error_log: if 'FATAL' in line: is_error = True break if len(error_log) <= 1: is_error = True if not is_error: util.write_dict(in_parms, config)
def edit_prefs_main(prefs_path): ''' Displays the main preference editing menu to the user. ''' # Prepare to show main preference editing menu. main_edit_opts = [ 'General preferences', 'Manual preferences', 'Auto preferences', 'Save preferences', 'Return to: Main Menu' ] keep_going = True while keep_going == True: # Show main preference editing menu. choice = util.prompt_options('Edit Preferences - Main Menu', main_edit_opts) # Handle user selection. if choice == 0: # General prefs. edit_prefs_general() elif choice == 1: # Manual stoichiometry prefs. print('Manual stoichiometry preferences coming later...') elif choice == 2: # Auto stoichiometry prefs. print('Auto stoichiometry preferences coming later...') elif choice == 3: # Save preferences manually. util.write_dict(prefs, os.path.abspath(prefs_path)) print('Preferences saved.\n') elif choice == 4: # Return to Main Menu. keep_going = False print('Rock on! Returning to main menu.\n')
def run_shutdown_tasks(prefs_path): # Get access to global variables. global prefs # Autosave preferences if possible. if prefs['autosave_prefs']: util.write_dict(prefs, os.path.abspath(prefs_path)) print('Preferences autosaved.') print('')
def pulse(in_parms, n_step_per_pulse, reset_vel_func): config = in_parms['output_name'] + ".config" if util.is_same_dict_in_file(in_parms, config): print "simulation already run." return name = in_parms['output_name'] shutil.copy(in_parms['topology'], name + '.psf') if in_parms['constraint_pdb']: shutil.copy(in_parms['constraint_pdb'], name + '.constraint') n_pulse = in_parms['n_step_dynamics'] // n_step_per_pulse n_step_list = [n_step_per_pulse for i in range(n_pulse)] n_excess_step = in_parms['n_step_dynamics'] % n_step_per_pulse if n_excess_step > 0: n_pulse += 1 n_step_list.append(n_excess_step) pulse_in_coor = util.insert_path(in_parms['input_crds'], '..') pulse_in_vel = util.insert_path(in_parms['input_vels'], '..') stopwatch = util.Timer() pulses = ["pulse%d" % i for i in range(n_pulse)] for pulse, n_step in zip(pulses, n_step_list): util.goto_dir(pulse) pulse_parms = copy.deepcopy(in_parms) pulse_parms['topology'] = util.insert_path(in_parms['topology'], '..') pulse_parms['n_step_dynamics'] = n_step if 'constraint_pdb' in pulse_parms and pulse_parms['constraint_pdb']: pulse_parms['constraint_pdb'] = util.insert_path(in_parms['constraint_pdb'], '..') pulse_parms['input_crds'] = pulse_in_coor pulse_parms['input_vels'] = pulse_in_vel soup = SoupFromRestartFiles(pulse_parms['topology'], pulse_parms['input_crds'], pulse_parms['input_vels']) reset_vel_func(soup) pulse_parms['input_vels'] = name + ".in.vel" write_soup_to_vel(soup, pulse_parms['input_vels']) run(pulse_parms) pulse_in_coor = '../%s/%s.coor' % (pulse, name) pulse_in_vel = '../%s/%s.vel' % (pulse, name) os.chdir('..') stopwatch.stop() open(name + '.time', 'w').write(stopwatch.str()) merge_simulations(name, pulses) for pulse in pulses: os.system('rm -rf %s' % pulse) util.write_dict(in_parms, config)
def firing_rate(folder, cells, trials = [], out = 'firing_rate_data.txt', \ param_file = 'param_spike_detect'): ''' data = firing_rate(folder, cells, trials = [], data_out = 'firing_rate_data.txt') Measure firing rate data and output in two forms, one with firing rate of the same current input averaged and the other with raw firing rate for each trial. output a dictionary with each element having data for one cell, the key is the cell index. data = {cell_idx: [[[stims], [averaged firing rate]], [[trial indices], [stims], [firing rate]]]} parameters: folder (String) - directory to the folder of the trace data files cells (array_like) - indices of cells to analyze trials (array_like) - trial numbers, if not provided analyze all the trials in the folder out (String) - output data file directory param_file (String) - spike detection parameter file return: data (dictionary) - firing rate data ''' data = {} params = get_params(param_file) for cell in cells: print('Cell: ', cell) ctrials = trials[:] _stims = [] _rates = [] if not len(ctrials): data_files = os.listdir(folder) for data_file in data_files: matched = re.match('Cell' + \ '_{:04d}_0*([1-9][0-9]*)\.ibw'.format(int(cell)), data_file) if matched: ctrials.append(int(matched.group(1))) for trial in ctrials: file_dir = folder + os.sep + util.gen_name(cell, trial) trace, sr, stim_i = util.load_wave(file_dir) _stims.append(stim_i[2]) _rates.append(len(spike_detect(trace, sr, params, stim_i[1], \ stim_i[0] + stim_i[1]))) raw = [ctrials, _stims, _rates] _stims = np.array(_stims) _rates = np.array(_rates) stims = np.unique(_stims) ave = np.zeros(len(stims)) for i, current in enumerate(stims): ave[i] = sum(_rates[np.nonzero(_stims == current)[0]]) / \ len(np.nonzero(_stims == current)[0]) ave_data = [stims.tolist(), ave.tolist()] data[cell] = [ave_data, raw] util.write_dict(out, data) return data
def run_simulation_with_parameters(parms): """ Carries out simulations based on parms """ # For housekeeping, the parms dictionary is written to a # .config file. As an Exception is thrown if simulation failed, # the existence of an equivalent .config file is an indicator # that the simulation has already successfully run. config = parms['output_basename'] + ".config" if util.is_same_dict_in_file(parms, config): print "Skipping: simulation already run." return md_module = get_md_module(parms['force_field']) md_module.run(parms) # No exceptions were thrown - write .config file. util.write_dict(config, parms)
def fromfile(self, file_path): print('Processing file %s (%s)...' % (file_path, multi_processing.get_pid())) word2id = dict() # key: word <-> value: index id2word = dict() encoded_text = [] str_file = open(file_path, 'r').read() str_list = str_file.split(".") for sent in str_list: t_sent = sent.split(' ') encoded_sent = [] for word in t_sent: if not word or word == '': continue if word not in word2id: id = len(word2id) word2id[word] = id id2word[id] = word encoded_sent.append(word2id[word]) encoded_text.append(encoded_sent) file_basename = multi_processing.get_file_name(file_path) # names like "AA", "AB", ... if ("\\" in multi_processing.get_file_folder(file_path)): parent_folder_name = multi_processing.get_file_folder( file_path).split("\\")[-1] else: parent_folder_name = multi_processing.get_file_folder( file_path).split("/")[-1] # Write the encoded_text # if not self.output_folder.endswith("\\"): # self.output_folder += "\\" util.write_to_pickle( encoded_text, os.path.join( self.output_folder, "encoded_text_" + parent_folder_name + "_" + file_basename + ".pickle")) # Write the dictionary util.write_dict( os.path.join( self.output_folder, "dict_" + parent_folder_name + "_" + file_basename + ".dicloc"), word2id)
def run(in_parms): "Read parms and creates the appropriate NAMD input files for simulation" name = in_parms['output_name'] input = name + ".in" output = name + ".out" config = name + ".config" parms = copy.deepcopy(in_parms) if util.is_same_dict_in_file(parms, config): print "simulation already run." return shutil.copy(parms['topology'], name + '.psf') if parms['constraint_pdb']: shutil.copy(parms['constraint_pdb'], name + '.constraint') if 'force_field' in parms: if parms['force_field'] == "CHARMM": parms['parameter'] = 'parms/charmm22.parameter' parms['psf_type'] = 'paraTypeCharmm on' elif parms['force_field'] == "OPLS": parms['parameter'] = 'parms/opls.parameter' parms['psf_type'] = 'paraTypeXplor on' else: raise "Can't identify force-field" parms['module_dir'] = module_dir open(input, "w").write(make_namd_input_file(parms)) stopwatch = util.Timer() os.system("namd2 %s > %s" % (input, output)) stopwatch.stop() open(name + '.time', 'w').write(stopwatch.str()) for line in open(output, "r"): if 'ERROR' in line and 'Info' not in line: raise "NAMD failure: '%s'" % line.replace("\n", "") util.write_dict(in_parms, config)
def main(): # Step 1: get links for projects if len(sys.argv) > 1 and sys.argv[1] == "list": driver = webdriver.Firefox() projects = list_projects.get_project_links(driver) util.write_list(projects) driver.close() else: projects = util.read_list() # Step 2: get data for every project output = dict() output["records"] = {"record": []} for project_index, project_link in enumerate(projects): output["records"]["record"].append( get_data.get_data_from_url(project_link, project_index + 1)) print("Crawled:\t%d/%d" % (project_index + 1, len(projects))) # Have mercy on KickStarter :) time.sleep(MERCY_TIME) # Write into JSON file util.write_dict(output)
def merge_local_dict(self, process_num): # Take all files in the folder starting with "dict_" but not "dict_merged.txt" # print(os.listdir(self.output_folder)) files = [ os.path.join(self.output_folder, name) for name in os.listdir(self.output_folder) if (os.path.isfile(os.path.join(self.output_folder, name)) and name.startswith("dict_") and (name != 'dict_merged.txt')) ] # print(files) if len(files) == 1: all_keys = self.read_first_column_file_to_build_set(files[0]) else: # Fix process_num if len(files) // 2 < process_num: process_num = len(files) // 2 print('process_num set to', process_num, 'for local dict merging') # multiprocessing files_list = multi_processing.chunkify(lst=files, n=process_num) # print('##',process_num) p = Pool(process_num) sub_merged_dicts = p.starmap(self.local_dicts_merger_worker, zip(files_list)) p.close() p.join() print('All sub-processes done.') all_keys = set() for sub_merged_dict in sub_merged_dicts: all_keys |= sub_merged_dict result = dict(zip(all_keys, range(len(all_keys)))) util.write_dict(os.path.join(self.output_folder, 'dict_merged.txt'), result) return result
def pulse( force_field, in_basename, basename, n_step, pulse_fn, n_step_per_pulse=100, restraint_pdb="", restraint_force=None): """ Runs a pulse simulation that uses the restart-file modification strategy to manage a steered-molecular dynamics simulation. The pulsed approacha pplies external forces in pulses, which is practically carried out be running short constant-energy simulations and directly modifying the restart velocities between each simulation. Pulse simulations has certain advantages: for instance, the system can respond to the forces between pulses, and the incredibly flexibility in applying forces. The disadvantage is the costly setup which is hopefully, mitigated by this library. Reference: Bosco K. Ho and David A. Agard (2010) "An improved strategy for generating forces in steered molecular dynamics: the mechanical unfolding of titin, e2lip3 and ubiquitin" PLoS ONE 5(9):e13068. """ # Grab the simulation prameters for a constant energy # simulation. Constant energy is preferred as we want to ensure # energy changes come only from our velocity modification. top, crds, vels = get_restart_files(in_basename) # use dummy top and crds, which will be overriden overall_config_parms = fetch_simulation_parameters( force_field, top, crds, restraint_pdb, 'constant_energy', basename, restraint_force) overall_config_parms.update({ 'input_md_name': in_basename, 'input_vels': vels, 'n_step_dynamics': n_step, 'n_step_per_snapshot': n_step_per_pulse // 2, 'n_step_per_pulse': n_step_per_pulse }) # Check if the simulation has already run as the config # file is not written until the very end config = basename + ".config" if util.is_same_dict_in_file(overall_config_parms, config): print "Skipping: pulsing simulation already run." return # The overall_config_parms will be written out at the end. # We make a copy for internal use pulse_parms = copy.deepcopy(overall_config_parms) # Calculate steps for each pulse, esp for last step n_pulse = pulse_parms['n_step_dynamics'] / n_step_per_pulse n_step_list = [n_step_per_pulse for i in range(n_pulse)] n_excess_step = pulse_parms['n_step_dynamics'] % n_step_per_pulse if n_excess_step > 0: n_pulse += 1 n_step_list.append(n_excess_step) # Prepare restart files for first step pulse_parms['topology'] = os.path.abspath(pulse_parms['topology']) in_basename = pulse_parms['input_md_name'] pulse_parms['input_md_name'] = os.path.abspath(in_basename) # Now loop through pulses timer = util.Timer() save_dir = os.getcwd() pulses = ["pulse%d" % i for i in range(n_pulse)] for pulse, n_step in zip(pulses, n_step_list): print "Pulse: %s/%d" % (pulse, n_pulse) os.chdir(save_dir) util.goto_dir(pulse) pulse_parms['n_step_dynamics'] = n_step soup = soup_from_restart_files(pulse_parms['input_md_name']) # Apply forces by modifying the velocities directly pulse_fn(soup) crds, vels = write_soup_to_crds_and_vels( force_field, soup, basename + '.pulse.in') pulse_parms['input_crds'] = crds pulse_parms['input_vels'] = vels run_simulation_with_parameters(pulse_parms) # Setup new restart files based on just-finished pulse pulse_parms['input_md_name'] = os.path.abspath(basename) os.chdir(save_dir) merge_simulations(force_field, basename, pulses) # cleanup pulses after merging util.clean_fname(*pulses) # everything worked, no exceptions thrown open(basename+'.time', 'w').write(timer.str()+'\n') util.write_dict(config, overall_config_parms)
"grompp": "", "editconf": "", "genion": "", "genbox": "", "vmd": "", "psfgen": "", "namd2": "", "flipdcd": "", "mod9v8": "" } home_dir = os.path.expanduser('~') binaries_fname = os.path.join(home_dir, '.pdbremix.config') if not os.path.isfile(binaries_fname): util.write_dict(binaries_fname, binaries) else: binaries = util.read_dict(binaries_fname) def binary(bin, arg_str='', out_name=None, in_fname=None): """ Runs an external binary, handles arguments, writes out equivalent .sh file, log file, and can pipe in in_fname. """ if bin in binaries and binaries[bin]: bin = binaries[bin] else: util.check_program(bin) if arg_str: util.run_with_output_file(