def main_init(args): """Initialize a FlowProject from a template. The available templates are defined in the template module. """ if not args.alias.isidentifier(): raise ValueError( "The alias '{}' is not a valid Python identifier and can therefore " "not be used as a FlowProject alias.".format(args.alias) ) try: get_project() except LookupError: init_project(name=args.alias) print( "Initialized signac project with name '{}' in " "current directory.".format(args.alias), file=sys.stderr, ) try: return template.init(alias=args.alias, template=args.template) except OSError as error: raise RuntimeError( f"Error occurred while trying to initialize a flow project: {error}" )
def main(): module_path = pathlib.Path(__file__).absolute().parent talys_root = module_path / ".." / ".." / "projects" / "talys" hfb_qrpa_root = module_path / ".." / ".." / "projects" / "talys" / "hfb_qrpa" talys = sg.get_project(root=talys_root) hfb_qrpa = sg.get_project(root=hfb_qrpa_root) logger.info("talys project: %s" % talys.root_directory()) logger.info("hfb_qrpa project: %s" % hfb_qrpa.root_directory()) xs_left = get_cross_section(proj=talys) xs_right = get_cross_section(proj=hfb_qrpa) right_suffix = "_talys" xs_df = xs_left.join(xs_right, rsuffix=right_suffix) rate_left = get_neutron_capture_rate(proj=talys) rate_right = get_neutron_capture_rate(proj=hfb_qrpa) rate_df = rate_left.join(rate_right, rsuffix=right_suffix) dataframe = xs_df.reset_index(level="neutron_energy").join(rate_df) return dataframe
def _gromacs_str(op_name, gro_name, sys_name, job): """Helper function, returns grompp command string for operation """ if op_name == 'em': mdp = signac.get_project().fn('src/util/mdp_files/{}.mdp'.format(op_name)) else: mdp = signac.get_project().fn('src/util/mdp_files/{}-{}.mdp'.format(op_name, job.sp.T)) #mdp = signac.get_project().fn('src/util/mdp_files/{}.mdp'.format(op_name)) cmd = ('gmx grompp -f {mdp} -c {gro}.gro -p {sys}.top -o {op}.tpr --maxwarn 1 && gmx mdrun -deffnm {op} -cpi {op}.cpt -ntmpi 1') return workspace_command(cmd.format(mdp=mdp,op=op_name, gro=gro_name, sys=sys_name))
def main(): if len(sys.argv) != 2: print("Usage: python extract_r32_vle.py [iteration number]") exit(1) else: iternum = sys.argv[1] R32 = R32Constants() run_path = "/scratch365/rdefever/hfcs-fffit/hfcs-fffit/runs/" itername = "r32-vle-iter" + str(iternum) project_path = run_path + itername csv_name = "csv/" + itername + "-results.csv" property_names = [ "liq_density", "vap_density", "Hvap", "Pvap", "liq_enthalpy", "vap_enthalpy", ] project = signac.get_project(project_path) save_signac_results(project, R32.param_names, property_names, csv_name)
def main(args): # Get the project handle project = signac.get_project() # Default to all jobs if the jobid argument was ommitted. if not len(args.jobid): args.jobid = [job.get_id() for job in project.find_jobs()] logger = logging.getLogger() formatter = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') # Iterate through all ids and run the specifed job. for jobid in args.jobid: job = project.open_job(id=jobid) try: # The opreation is assumed to be defined in the operations module. operation = getattr(operations, args.operation) except AttributeError: raise KeyError("Unknown operation '{}'.".format(args.operation)) else: # Log output of this job. filehandler = logging.FileHandler(filename=job.fn('run.log')) filehandler.setFormatter(formatter) logger.addHandler(filehandler) try: operation(job) finally: logger.removeHandler(filehandler) return 0
def fetch_geodata(continental_only=True): import pandas as pd import signac project = signac.get_project() if not project.isfile('airport_geodata.csv'): import urllib.request url = 'https://datahub.io/core/airport-codes/r/airport-codes.csv' urllib.request.urlretrieve(url, project.fn('airport_geodata.csv')) print('Reading airport geodata...') geodata = pd.read_csv(project.fn('airport_geodata.csv')) geodata = geodata.dropna(subset=['iata_code']) geodata = geodata[geodata['iso_country'] == 'US'] geodata = geodata[['name', 'iata_code', 'coordinates']].set_index('iata_code') coords = geodata['coordinates'].str.split(', ', expand=True) geodata['lon'] = pd.to_numeric(coords[0]) geodata['lat'] = pd.to_numeric(coords[1]) geodata = geodata.drop(columns=['coordinates']) # Ignore airports with erroneous data near (0, 0) geodata = geodata[geodata.lon < -65] if continental_only: # Drop data for Alaska and Hawaii geodata = geodata[geodata.lon > -130] return geodata
def generated(job): if job.sp.job_type == "child": # Get current project project = signac.get_project("./") # Find all jobs with the same statepoint as the parent parent_jobs = project.find_jobs(job.sp.parent_statepoint) if len(parent_jobs) == 1: parent_job = parent_jobs.next() else: raise SystemError( "Found {} parent jobs, instead of one. Check" " the workspace for inconsistencies.".format(len(parent_jobs)) ) parent_completed = RhacoProject.generated(parent_job) if parent_completed: # Copy the generated morphology shutil.copyfile( os.path.join(parent_job._wd, "output.hoomdxml"), os.path.join(job._wd, "output.hoomdxml"), ) # Also copy the generate stdout shutil.copyfile( os.path.join(parent_job._wd, "generate_stdout.log"), os.path.join(job._wd, "generate_stdout.log"), ) else: return False return job.isfile("output.hoomdxml")
def main(): pore_area = 2 * 29.472 * 29.777 * u.angstrom**2 project = signac.get_project("../") mus = [] nmols = [] runs = [] for job in project: runs.append(job.sp.run) mus.append(job.sp.mu * u.kJ/u.mol) thermo = ThermoProps(job.fn("gcmc.out.prp")) nmols.append(thermo.prop("Nmols_2", start=200000000).mean()) mus = u.unyt_array(mus * u.kJ/u.mol) nmols = np.asarray(nmols) runs = np.asarray(runs) df = pd.DataFrame( columns=["mu-cassandra_kJmol", "run", "nmols_per_nm^2"] ) df["mu-cassandra_kJmol"] = mus.to_value("kJ/mol") df["run"] = runs df["nmols"] = nmols df["nmols_per_nm^2"] = nmols / pore_area.to_value(u.nm**2) df.to_csv("results.csv")
def init_project(): # Initialize project project = signac.get_project("./") # Define temperature temperature = 298.0 * u.K # Define chemical potentials mus = [ -47.0 * u.kJ / u.mol, -46.0 * u.kJ / u.mol, ] # Run for 300 M steps nsteps_gcmc = 300000000 # For reproducibility np.random.seed(594875) for mu in mus: for run in range(3): # Define the state point state_point = { "T": float(temperature.in_units(u.K).value), "mu": float(mu.in_units(u.kJ / u.mol).value), "nsteps_gcmc": nsteps_gcmc, "seed1" : np.random.randint(10**8), "seed2" : np.random.randint(10**8), "run": run, } job = project.open_job(state_point) job.init()
def build_quip_command_line(job, do_gradients=False): cmd = 'quip' args = [] # May use a separate test atoms file later args.append('atoms_filename={:s}'.format( os.path.join(signac.get_project().root_directory(), 'xyz_files', job.doc.atoms_filename))) args.append( 'descriptor_str={{soap atom_sigma={sp.atom_width:f} l_max={sp.l_max:d} ' 'n_max={sp.n_max:d} cutoff={sp.cutoff:f} ' 'cutoff_transition_width={sp.cutoff_transition_width:f} ' 'energy_scale={sp.energy_scale:f} add_species n_species={n_species:d} ' 'species_Z={{{{ {species_str:s} }}}} n_sparse={sp.n_sparse:d} ' 'sparse_method=cur_points covariance_type=dot_product ' 'soap_exponent={sp.soap_zeta:d} }}'.format( sp=job.sp, n_species=len(job.doc.global_species), species_str=(' '.join(str(num) for num in job.doc.global_species)))) if do_gradients: args.append('do_grad_descriptor') args.append('timing') return [ cmd, ] + args
def convert_and_ndx(job): second_ndx = pathlib.Path(signac.get_project().root_directory() + "/src/second-ndx.txt") convert = "echo 0 | gmx_dp trjconv -s init.gro -f minimize.xtc -o minimize.gro -b 1.0 -e 1.0" ndx = "gmx_dp make_ndx -f minimize.gro -n init.ndx -o init2.ndx < {}".format( second_ndx) return "cd {}; {}; {}".format(job.workspace(), convert, ndx)
def fix_overlaps(job): cmds = pathlib.Path( signac.get_project().root_directory() + "/src/util/mdp_files" ) return "cd {}; mpirun -np 4 lmp_mpi -in {}/in.minimize -log {}/minimize.log".format( job.workspace(), str(cmds.absolute()), job.workspace() )
def parse_args(parser: argparse.ArgumentParser): parser.add_argument("--use_signac", default=False, action="store_true") parser.add_argument("--signac_root", default=None, dest="_signac_root", help="Root path of signac job for experiment.") parser.add_argument("--verbose", "-v", action="store_true") parser.add_argument("--help", "-h", action="help") parser.add_argument("--exp_tags", default=[], nargs="+", dest="_exp_tags") args = parser.parse_args() args.objects = dict(function_hooks=parser.function_hooks) if args.use_signac: # Signac functionality import signac project = signac.get_project(root=args._signac_root) args.objects["signac_project"] = project job_dict = { name: value for name, value in vars(args).items() if (not name.startswith("_")) and (name != "objects") } args.objects["signac_job"] = project.open_job(job_dict).init() args.objects["signac_job"].doc["exp_tags"] = args._exp_tags # list of func(args, model, train_sequence, test_sequence) args.objects["pretrain_callbacks"] = deque() args.objects["pre_epoch_callbacks"] = deque() args.objects["post_epoch_callbacks"] = deque() args.objects["post_train_callbacks"] = deque() while len(parser.function_hooks["argparse"]) > 0: function_ptr = parser.function_hooks["argparse"].popleft() function_ptr(args) return args
def mdrun_nvt(job): nvt_mdp_path = pathlib.Path(signac.get_project().root_directory() + "/src/util/mdp_files/nvt.mdp") grompp = "gmx_dp grompp -f {} -c em.gro -p init.top -n init2.ndx -o nvt.tpr -maxwarn 1".format( nvt_mdp_path) nvt = _mdrun_str("nvt") return "cd {}; {}; srun -n 1 {}".format(job.workspace(), grompp, nvt)
def main(): module_path = pathlib.Path(__file__).absolute().parent rpa_root = module_path / ".." / ".." / "projects" / "rpa" rpa = sg.get_project(root=rpa_root) logger.info("rpa project: %s" % rpa.root_directory()) dataframes = [] for job in rpa.find_jobs({"proton_number": proton_number}): temp = "finite" if job.sp.temperature > 0 else "zero" fname = job.fn(code.out_file(temp, "isovector", "lorentzian")) small_df = read(fname) df2 = pd.concat( [small_df], keys=[(proton_number, job.sp.neutron_number, job.sp.temperature)], names=["proton_number", "neutron_number", "temperature"], ).reset_index() dataframes.append(df2) big_df = ( pd.concat(dataframes) .set_index( ["proton_number", "neutron_number", "temperature", "excitation_energy"] ) .sort_index() ) return big_df
def mdrun_em(job): em_mdp_path = pathlib.Path(signac.get_project().root_directory() + "/src/util/mdp_files/em.mdp") grompp = "gmx_dp grompp -f {} -c minimize.gro -p init.top -n init2.ndx -o em.tpr -maxwarn 1".format( em_mdp_path) em = _mdrun_str("em") return "cd {}; {}; srun -n 1 {}".format(job.workspace(), grompp, em)
def build_gap_fit_command_line(job): cmd = 'gap_fit' args = [] args.append('at_file={:s}'.format( os.path.join(signac.get_project().root_directory(), 'xyz_files', job.doc.atoms_filename))) args.append( 'gap={{soap atom_sigma={sp.atom_width:f} l_max={sp.l_max:d} ' 'n_max={sp.n_max:d} cutoff={sp.cutoff:f} ' 'cutoff_transition_width={sp.cutoff_transition_width:f} ' 'energy_scale={sp.energy_scale:f} add_species n_species={n_species:d} ' 'species_Z={{{{ {species_str:s} }}}} n_sparse={sp.n_sparse:d} ' 'sparse_method=cur_points covariance_type=dot_product ' 'soap_exponent={sp.soap_zeta:d} }}'.format( sp=job.sp, n_species=len(job.doc.global_species), species_str=(' '.join(str(num) for num in job.doc.global_species)))) args.append( 'default_kernel_regularisation={{ {0.energy_reg:f} {0.force_reg:f}' ' 1.0 1.0 }}'.format(job.sp)) args.append('energy_parameter_name={0.energy_key:s}'.format(job.doc)) args.append('force_parameter_name={0.force_key:s}'.format(job.doc)) # We're not fitting with virials, no matter what args.append('virial_parameter_name=none') args.append('e0_method=average') args.append('gp_file={:s}'.format(job.fn('potential.xml'))) return [ cmd, ] + args
def test_project_workspace(self): self.call('python -m signac init my_project'.split()) self.assertEqual(str(signac.get_project()), 'my_project') self.assertEqual( os.path.realpath( self.call('python -m signac project --workspace'.split()).strip()), os.path.realpath(os.path.join(self.tmpdir.name, 'workspace')))
def main(): talys_proj = signac.init_project("talys", workspace="workspace") logger.info("talys project: %s" % talys_proj.workspace()) rpa_proj = signac.get_project(root="../rpa/") logger.info("rpa project: %s" % rpa_proj.workspace()) for psf, jobs in rpa_proj.find_jobs({ "proton_number": 50 }).groupbydoc("photon_strength_function"): for rpa_job in jobs: logger.info(f"Processing %s.." % rpa_job.workspace()) sp = rpa_proj.open_job(id=rpa_job.id).statepoint() for yn in "y", "n": sp.update( dict( # flag for calculation of astrophysics reaction rate astro=yn)) talys_job = talys_proj.open_job(sp).init() util.copy_file(source=rpa_job.fn(psf), destination=talys_job.fn(psf)) talys_job.doc.setdefault("photon_strength_function", psf) talys_api.energy_file(talys_job) talys_api.input_file(talys_job)
def __init__(self, config=None, project=None, modules=None): if config is None: config = {} config['PAGINATION'] = config.get('PAGINATION', True) config['PER_PAGE'] = config.get('PER_PAGE', 25) self.config = config self.app = self.create_app(config) cache.init_app(self.app) if modules is None: modules = [] if project is None: self.project = signac.get_project() else: self.project = project # Try to update the project cache. Requires signac 0.9.2 or later. try: self.project.update_cache() except Exception: pass self.assets = self.create_assets() self.register_routes(self) self.modules = modules for module in self.modules: module.register_assets(self) module.register_routes(self)
def _gromacs_str(mdp, op_name, gro_name): """Helper function, returns grompp command string for operation """ mdp = signac.get_project().fn("files/{}".format(mdp)) cmd = "gmx grompp -f {mdp} -c {gro_name}.gro -p init.top -o {op_name}.tpr --maxwarn 1 && gmx mdrun -deffnm {op_name} -ntmpi 1" return workspace_command( cmd.format(mdp=mdp, op_name=op_name, gro_name=gro_name) )
def mdrun_compress(job): compress_mdp_path = pathlib.Path(signac.get_project().root_directory() + "/src/util/mdp_files/compress.mdp") grompp = "gmx_dp grompp -f {} -c nvt.gro -p init.top -n init2.ndx -o compress.tpr -maxwarn 2".format( compress_mdp_path) compress = _mdrun_str("compress") return "cd {}; {}; srun -n 1 -v {} -px compress_pullx.xvg -pf compress_pullf.xvg".format( job.workspace(), grompp, compress)
def em_grompp(job): em_mdp_path = pathlib.Path( signac.get_project().root_directory() + "/src/util/mdp_files/em.mdp" ) #msg = "{}".format(job.workspace()) msg = "cd {}; mpirun -np 1 gmx grompp -f {} -c minimize.gro -p init.top \ -n init.ndx -o em.tpr -maxwarn 1".format(job.workspace(), em_mdp_path) return msg
def getSplitProject(featureJob: signac.Project.Job): try: splitProject = signac.get_project(root=featureJob.workspace(), search=False) except LookupError: splitProject = signac.init_project("SyntheticExperimentSplits", root=featureJob.workspace(), workspace="splits") return splitProject
def getFeatureProject(graphJob: signac.Project.Job): try: featureProject = signac.get_project(root=graphJob.workspace(), search=False) except LookupError: featureProject = signac.init_project("SyntheticExperimentFeatures", root=graphJob.workspace(), workspace="features") return featureProject
def main(): repo = git.Repo(search_parent_directories=True) commit = repo.head.commit with open('define_template_test_project.py', 'r') as file: PROJECT_DEFINITION = file.read() if not os.path.exists(DOC_DIR): os.makedirs(DOC_DIR) project = signac.get_project(ext.PROJECT_DIR) sort_keys = list(sorted(project.detect_schema())) sort_attrs = ['sp.' + '.'.join(key) for key in sort_keys] def sort_key(job): values = [] for attr in sort_attrs: try: values.append(attrgetter(attr)(job)) except AttributeError: pass return [0 if v is None else v for v in values] environments = project.detect_schema()['environment'][str] for env in sorted(environments): env_name = env.split('.')[-1] document = docx.Document() # Add code style style = document.styles.add_style('Code', WD_STYLE_TYPE.PARAGRAPH) style.font.name = 'Monaco' style.font.size = Pt(8) style = document.styles.add_style('CodeChar', WD_STYLE_TYPE.CHARACTER) style.font.name = 'Monaco' style.font.size = Pt(8) document.add_heading(env_name, level=0) p = document.add_paragraph("Output at commit ") p.add_run('{}'.format(commit), style='CodeChar') document.add_heading("FlowProject Definition", level=1) p = document.add_paragraph(PROJECT_DEFINITION, style='Code') document.add_page_break() document.add_heading("Operations without bundling", level=1) query = {'environment': env, 'parameters.bundle': {'$exists': False}} for job in sorted(project.find_jobs(query), key=sort_key): process_job(document, job) document.add_heading("Operations with bundling", level=1) query = {'environment': env, 'parameters.bundle': {'$exists': True}} for job in sorted(project.find_jobs(query), key=sort_key): process_job(document, job) fn = os.path.join(DOC_DIR, "{env}.docx".format(env=env_name)) document.save(fn) print("Generated document '{}'.".format(fn))
def access_proj_job(rootProjJob: signac.Project, *pathsegs): if len(pathsegs) == 0: return rootProjJob else: iter_pathseg = iter(pathsegs) pathseg = next(iter_pathseg) if pathseg == "/": with rootProjJob: try: rootProjJob = signac.get_project(root=next(iter_pathseg), search=False) except StopIteration: return signac.get_project(root=".", search=False) else: if type(rootProjJob) == signac.Project.Job: rootProjJob = signac.get_project(root=rootProjJob.workspace(), search=False) rootProjJob = rootProjJob.open_job(id=pathseg) return access_proj_job(rootProjJob, *list(iter_pathseg))
def _grompp_str(op_name, gro_name, checkpoint_file=None): """Helper function, returns grompp command string for operation.""" mdp_file = signac.get_project().fn('mdp_files/{op}.mdp'.format(op=op_name)) cmd = '{gmx} grompp -f {mdp_file} -c {gro_file} {checkpoint} -o {op}.tpr -p'.format( gmx=gmx_exec, mdp_file=mdp_file, op=op_name, gro_file=gro_name, checkpoint='' if checkpoint_file is None else ('-t ' + checkpoint_file)) return workspace_command(cmd)
def shear_25nN_grompp(job): shear_25nN_mdp_path = pathlib.Path(signac.get_project().root_directory() + "/src/util/mdp_files/shear_25nN.mdp") msg = "cd {}; mpirun -np 1 gmx_sp grompp -f {} -c {} -p {} -n {} -o {} -maxwarn 1".format( job.workspace(), shear_25nN_mdp_path, "compress.gro", "init.top", "init2.ndx", "shear_25nN.tpr", ) return msg
def mdrun_shear_25nN(job): if not job.isfile('shear_25nN.tpr'): shear_25nN_mdp_path = pathlib.Path( signac.get_project().root_directory() + "/src/util/mdp_files/shear_25nN.mdp") grompp = "gmx_dp grompp -f {} -c compress.gro -p init.top -n init2.ndx -o shear_25nN.tpr -maxwarn 1; ".format( shear_25nN_mdp_path) else: grompp = "" shear = _mdrun_str("shear_25nN") return "cd {}; {}srun -n 1 -v {} -px shear_25nN_pullx.xvg -pf shear_25nN_pullf.xvg".format( job.workspace(), grompp, shear)