def upstream(self): # upstream tasks may have been resumed so update the local # count and filter out the tasks that have already finished for task in super(GenerationalWorkQueueStream, self).upstream: self._count[task.uuid] = task.generation if self._is_submittable(task): logger.info('%-15s' % 'Continuing', task.uuid, 'from generation', self._gen(task)) yield task
def save(self, prefix=None, name='experiment.yaml'): prefix = self.name if prefix is None else prefix pxul.os.ensure_dir(prefix) ypath = os.path.join(prefix, name) with open(ypath, 'w') as fd: logger.info('Saving experiments:', fd.name) yaml.dump(self, fd, default_flow_style=False) logger.info1('Writing children') for k, r in self.runs.iteritems(): r.save(prefix=os.path.join(self.name, str(k)))
def main(opts): cfg = state.Config.load() st = state.State.load() for h in sorted(st.keys()): logger.info('{:.<30s} {} / {}'.format( cfg.aliases[h], st[h].generation + 1, cfg.generations, ))
def main(opts): cfg = state.Config.load() st = state.State.load() for h in sorted(st.keys()): logger.info('{:.<30s} {} / {}'.format( cfg.aliases[h], st[h].generation + 1, cfg.generations, ) )
def process(self, task): if self._is_submittable(task): logger.info('%-15s' % 'Extending', task.uuid, 'to generation', self._gen(task) + 1) self._incr(task) task.extend() self.submit(task) yield None else: logger.info('%-15s' % 'Stopping', task.uuid, 'at generation', self._gen(task)) del self._count[task.uuid] yield task
def main(opts): if os.path.exists(state.CONFIG): logger.info1('Loading previous state from', state.CONFIG) cfg = state.Config.load(path=state.CONFIG) else: cfg = state.Config() cfg.update( backend = opts.backend, generations = opts.generations, time = opts.time, outputfreq = opts.outputfreq, cpus = opts.cpus, binaries = opts.binaries, seed = opts.seed ) cfg.write() logger.info('Configured:\n', str(cfg))
def cat_traj_parts(parts, out): logger.info('Writing', out, '\n' + '\n'.join(parts)) gmx.trjcat(f = ' '.join(parts), o = out)
def prepare(self, pdb, ff = 'amber03', water = 'tip3p', ignh = True, mdp_min_vac = None, mdp_min_sol = None, mdp_run = None, iter_gammas = None, iter_steps = 500, eq_steps = 500, seed = None): cwd = os.getcwd() logger.info('Preparing %s with %s in %s' % (os.path.relpath(pdb, cwd), ff, cwd)) pdb = os.path.abspath(pdb) wa = os.path.join(self.workarea) name = os.path.splitext(os.path.basename(pdb))[0] mdp_min_vac = mdp_defaults.minimize_vacuum() if mdp_min_vac is None else mdp_min_vac.copy() mdp_min_sol = mdp_defaults.minimize_solvated() if mdp_min_sol is None else mdp_min_sol.copy() mdp_run = mdp_defaults.explicit_solvent() if mdp_run is None else mdp_run.copy() if seed is not None: mdp_run.seed(seed) self.name = name self._cn = name with pxul.os.StackDir(wa): self.initialize(pdb, ff=ff, water=water, ignh=ignh) self.minimize_vacuum(mdp_min_vac) self.solvate(mdp_min_sol) self.relax(copy.deepcopy(mdp_run), gammas=iter_gammas, steps=iter_steps) self.equilibrate(copy.deepcopy(mdp_run), steps=eq_steps) if not name: name = self.name conf = suffix.gro(name), suffix.gro(self.pn) top = suffix.top(name), self.top itp = suffix.itp(name), 'posre.itp' mdp = suffix.mdp(name) for new, old in [conf, top, itp]: shutil.copy(os.path.join(self.workarea, old), new) logger.info1('Saved file %s' % os.path.abspath(new)) with open(mdp, 'w') as fd: fd.write(str(mdp_run)) logger.info1('Saved file', os.path.abspath(fd.name)) # create tpr with velocities logger.info1('Creating run tpr') conf = conf[0] top = top[0] mdout = suffix.mdp('{}_mdout'.format(name)) tpr = suffix.tpr(name) gmx.grompp(f=mdp, c=conf, po=mdout, p=top, o=tpr, t=suffix.trr(os.path.join(self.workarea, self.pn))) return dict(conf=conf,top=top,mdout=mdout,tpr=tpr)
def add(self, spec): self.sims.add(spec) logger.info('Added specification:\n%s' % spec)
def prepare(self, pdb, ff='amber03', water='tip3p', ignh=True, mdp_min_vac=None, mdp_min_sol=None, mdp_run=None, iter_gammas=None, iter_steps=500, eq_steps=500, seed=None): cwd = os.getcwd() logger.info('Preparing %s with %s in %s' % (os.path.relpath(pdb, cwd), ff, cwd)) pdb = os.path.abspath(pdb) wa = os.path.join(self.workarea) name = os.path.splitext(os.path.basename(pdb))[0] mdp_min_vac = mdp_defaults.minimize_vacuum( ) if mdp_min_vac is None else mdp_min_vac.copy() mdp_min_sol = mdp_defaults.minimize_solvated( ) if mdp_min_sol is None else mdp_min_sol.copy() mdp_run = mdp_defaults.explicit_solvent( ) if mdp_run is None else mdp_run.copy() if seed is not None: mdp_run.seed(seed) self.name = name self._cn = name with pxul.os.StackDir(wa): self.initialize(pdb, ff=ff, water=water, ignh=ignh) self.minimize_vacuum(mdp_min_vac) self.solvate(mdp_min_sol) self.relax(copy.deepcopy(mdp_run), gammas=iter_gammas, steps=iter_steps) self.equilibrate(copy.deepcopy(mdp_run), steps=eq_steps) if not name: name = self.name conf = suffix.gro(name), suffix.gro(self.pn) top = suffix.top(name), self.top itp = suffix.itp(name), 'posre.itp' mdp = suffix.mdp(name) for new, old in [conf, top, itp]: shutil.copy(os.path.join(self.workarea, old), new) logger.info1('Saved file %s' % os.path.abspath(new)) with open(mdp, 'w') as fd: fd.write(str(mdp_run)) logger.info1('Saved file', os.path.abspath(fd.name)) # create tpr with velocities logger.info1('Creating run tpr') conf = conf[0] top = top[0] mdout = suffix.mdp('{}_mdout'.format(name)) tpr = suffix.tpr(name) gmx.grompp(f=mdp, c=conf, po=mdout, p=top, o=tpr, t=suffix.trr(os.path.join(self.workarea, self.pn))) return dict(conf=conf, top=top, mdout=mdout, tpr=tpr)
def task(self, tpr, x=None, v=None, t=None, outputdir=None, seed=None, digest=None): outdir = outputdir or tpr + '.mdq' pxul.os.ensure_dir(outdir) logger.debug('Ensured', outdir, 'exists') tpr2 = os.path.join(outdir, 'topol.tpr') shutil.copy(tpr, tpr2) logger.debug(tpr, '->', tpr2) gendir = os.path.join(outdir, '0') pxul.os.ensure_dir(gendir) logger.debug('Ensured', gendir, 'exists') gps = dict(x = os.path.join(gendir, SCRIPT_INPUT_NAMES['x']), v = os.path.join(gendir, SCRIPT_INPUT_NAMES['v']), t = os.path.join(gendir, SCRIPT_INPUT_NAMES['t'])) if x is not None: shutil.copy(x, gps['x']) logger.debug(x, '->', gps['x']) if v is not None: shutil.copy(v, gps['v']) logger.debug(v, '->', gps['v']) if t is not None: if type(t) is float: with open(gps['t'], 'w') as fd: fd.write(str(t)) logger.debug('Wrote', t, 'to', gps['t']) elif type(t) is str: shutil.copy(t, gps['t']) logger.debug(t, '->', gps['t']) else: raise ValueError, 'Illegal state: invalid time spec %s' % t for sel, key in SELECTIONS.iteritems(): logger.info1('Getting', sel, 'from', tpr2) guamps_get(f=tpr2, s=sel, o=gps[key]) if seed: logger.info1('Setting seed', seed) tpr_set_scalar(tpr2, 'ld_seed', seed) dt = tpr_get_scalar(tpr2, 'deltat', float) if self._picoseconds: nsteps = int(self._picoseconds / dt) logger.info1('Running for', self._picoseconds, 'ps as', nsteps, 'nsteps') tpr_set_scalar(tpr2, 'nsteps', nsteps) if self._outputfreq: freq = int(self._outputfreq / dt) # FIXME nstenergy, see badi/guamps#27 for attr in 'nstxout nstxtcout nstfout nstvout nstlog'.split(): logger.info1('Setting output frequency', self._outputfreq, 'for', attr, 'as', freq, 'steps', 'in', tpr2) tpr_set_scalar(tpr2, attr, freq) if not digest: logger.info1('Computing digest for', tpr2) sha256 = hashlib.sha256() sha256.update(open(tpr2, 'rb').read()) digest = sha256.hexdigest() task = Task(x=gps['x'], v=gps['v'], t=gps['t'], tpr=tpr2, outputdir=outdir, cpus=self._cpus, digest=digest) task.add_binary(self._mdrun) task.add_binary(self._guamps_get) task.add_binary(self._guamps_set) if self._keep_trajfiles: task.keep_trajfiles() logger.info('Prepared', digest, 'from', tpr) for k in self.__dict__: logger.info(10*' ', k.lstrip('_'), '=', getattr(self, k)) return task
def task(self, tpr, x=None, v=None, t=None, outputdir=None, seed=None, digest=None): outdir = outputdir or tpr + '.mdq' pxul.os.ensure_dir(outdir) logger.debug('Ensured', outdir, 'exists') tpr2 = os.path.join(outdir, 'topol.tpr') shutil.copy(tpr, tpr2) logger.debug(tpr, '->', tpr2) gendir = os.path.join(outdir, '0') pxul.os.ensure_dir(gendir) logger.debug('Ensured', gendir, 'exists') gps = dict(x=os.path.join(gendir, SCRIPT_INPUT_NAMES['x']), v=os.path.join(gendir, SCRIPT_INPUT_NAMES['v']), t=os.path.join(gendir, SCRIPT_INPUT_NAMES['t'])) if x is not None: shutil.copy(x, gps['x']) logger.debug(x, '->', gps['x']) if v is not None: shutil.copy(v, gps['v']) logger.debug(v, '->', gps['v']) if t is not None: if type(t) is float: with open(gps['t'], 'w') as fd: fd.write(str(t)) logger.debug('Wrote', t, 'to', gps['t']) elif type(t) is str: shutil.copy(t, gps['t']) logger.debug(t, '->', gps['t']) else: raise ValueError, 'Illegal state: invalid time spec %s' % t for sel, key in SELECTIONS.iteritems(): logger.info1('Getting', sel, 'from', tpr2) guamps_get(f=tpr2, s=sel, o=gps[key]) if seed: logger.info1('Setting seed', seed) tpr_set_scalar(tpr2, 'ld_seed', seed) dt = tpr_get_scalar(tpr2, 'deltat', float) if self._picoseconds: nsteps = int(self._picoseconds / dt) logger.info1('Running for', self._picoseconds, 'ps as', nsteps, 'nsteps') tpr_set_scalar(tpr2, 'nsteps', nsteps) if self._outputfreq: freq = int(self._outputfreq / dt) # FIXME nstenergy, see badi/guamps#27 for attr in 'nstxout nstxtcout nstfout nstvout nstlog'.split(): logger.info1('Setting output frequency', self._outputfreq, 'for', attr, 'as', freq, 'steps', 'in', tpr2) tpr_set_scalar(tpr2, attr, freq) if not digest: logger.info1('Computing digest for', tpr2) sha256 = hashlib.sha256() sha256.update(open(tpr2, 'rb').read()) digest = sha256.hexdigest() task = Task(x=gps['x'], v=gps['v'], t=gps['t'], tpr=tpr2, outputdir=outdir, cpus=self._cpus, digest=digest) task.add_binary(self._mdrun) task.add_binary(self._guamps_get) task.add_binary(self._guamps_set) if self._keep_trajfiles: task.keep_trajfiles() logger.info('Prepared', digest, 'from', tpr) for k in self.__dict__: logger.info(10 * ' ', k.lstrip('_'), '=', getattr(self, k)) return task