outfile_name = 'energies_output.out' if not job.isfile(outfile_name): return False else: return has_libatoms_exit_message(job.fn(outfile_name)) def has_force_output(job): outfile_name = 'forces_output.out' if not job.isfile(outfile_name): return False else: return has_libatoms_exit_message(job.fn(outfile_name)) timings = FlowProject.make_group(name='timings') @timings @FlowProject.operation @FlowProject.pre(gap_fit_success) @FlowProject.post(has_energy_output) @directives(omp_num_threads=1) def time_quip_energies(job): old_dir = os.getcwd() quip_cmdline = build_quip_command_line(job) print(quip_cmdline) try: os.chdir(job.workspace()) with open('energies_output.out', 'w') as outfile: subprocess.run(quip_cmdline, stdout=outfile)
import sklearn from sklearn.neural_network import MLPRegressor from sklearn.preprocessing import MinMaxScaler from sklearn.metrics import mean_squared_error, mean_absolute_error TARGETS = ['COF', 'intercept'] IDENTIFIERS = ['terminal_group_1', 'terminal_group_2', 'terminal_group_3', 'backbone', 'frac-1', 'frac-2'] # get project root directory proj = signac.get_project() root_dir = proj.root_directory() # define group that combines the different operations neural_network = FlowProject.make_group(name='neural_network') @FlowProject.label def models_trained(job): # post condition for train_evaluate # checks if pickle files for models and scalers are in job workspace # check external hard drive for models for target in TARGETS: if not os.path.isfile('/mnt/d/neural-networks-with-signac/workspace/' + job.id + '/{}_trained.pickle'.format(target)): return False if not os.path.isfile(job.fn('{}_scaler.pickle'.format(target))): return False return True
import signac from flow import FlowProject import os from simtk import unit from cg_openmm.cg_model.cgmodel import CGModel from cg_openmm.parameters.reweight import get_temperature_list from cg_openmm.simulation.rep_exch import * from openmmtools.cache import global_context_cache import numpy as np import simtk.openmm as openmm import pickle from cg_openmm.thermo.calc import * global_context_cache.platform = openmm.Platform.getPlatformByName("CUDA") replica_exchange_group = FlowProject.make_group(name='replica_exchange') analysis_group = FlowProject.make_group(name='analysis') proj_directory = os.getcwd() @FlowProject.label def run_replica_exchange_done(job): output_directory = os.path.join(job.workspace(), "output") output_data = os.path.join(output_directory, "output.nc") rep_exch_completed = 0 if os.path.isfile(output_data): rep_exch_status = ReplicaExchangeSampler.read_status(output_data) rep_exch_completed = rep_exch_status.is_completed return rep_exch_completed
# A Signac-Flow project for one-component VLE simulations import json import os import pickle import shutil import subprocess import numpy as np import signac from flow import FlowProject, directive # Melt, cool, volume, swap equilibrate = FlowProject.make_group(name="equilibrate") project = signac.get_project() @FlowProject.label def is_equil(job): """ For job tasks in the 'equilibrate' group, we only want to run the task if it """ return job.sp.run == "equil" @FlowProject.label def job_marked_complete(job): """ For jobs that may have multiple steps, mark completion