Ejemplo n.º 1
0
def checkErr(stde, rsrc, tpr, persDir):
    """Check whether an error condition is recoverable.

       Returns True if there is an issue, False if the error is recoverable"""
    if not os.path.exists(stde):
        # we assume it's a worker error
        return False
    inf=open(stde, 'r')
    fatalErr=False
    OK=True
    for line in inf:
        if re.match(r'.*Fatal error.*', line):
            fatalErr=True
            log.debug("Found fatal error")
            OK=False
        if fatalErr:
            if re.match(r'.*domain decomposition.*', line):
                # the number of cores is wrong
                log.debug("Found domain decomp error")
                confFile=os.path.join(persDir, 'conf.gro')
                extractConf(tpr, confFile)
                tune.tune(rsrc, confFile, tpr, persDir, rsrc.max.get('cores')-1)
                OK=True
                break
    inf.close()
    return not OK
Ejemplo n.º 2
0
def checkErr(stde, rsrc, tpr, persDir):
    """Check whether an error condition is recoverable.

       Returns True if there is an issue, False if the error is recoverable"""
    if not os.path.exists(stde):
        # we assume it's a worker error
        return False
    inf = open(stde, 'r')
    fatalErr = False
    OK = True
    for line in inf:
        if re.match(r'.*Fatal error.*', line):
            fatalErr = True
            log.debug("Found fatal error")
            OK = False
        if fatalErr:
            if re.match(r'.*domain decomposition.*', line):
                # the number of cores is wrong
                log.debug("Found domain decomp error")
                confFile = os.path.join(persDir, 'conf.gro')
                extractConf(tpr, confFile)
                tune.tune(rsrc, confFile, tpr, persDir,
                          rsrc.max.get('cores') - 1)
                OK = True
                break
    inf.close()
    return not OK
Ejemplo n.º 3
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-d', '--dataset', type=str, help='Dataset')
    parser.add_argument('-m', '--model', type=str, help='Model variant. supported DualOutputRNN or Conv1d')

    args, _ = parser.parse_known_args()

    # tune hyperparameter
    args.train_on = "train"
    args.test_on = "valid"
    args.batchsize = TUNE_BATCHSIZE
    tune(args)

    # update hyperparameter csv
    update_hyperparameter_csv(args.model, args.dataset)

    # train and evaluate
    args.train_on = "trainvalid"
    args.test_on = "test"
    args.no_visdom = False
    args.test_every_n_epochs = 1
    args.batchsize = TRAIN_BATCHSIZE

    train(args)

    # update sota csv files
    update_result_csv(args.model, args.dataset)
Ejemplo n.º 4
0
def tune_fn(inp):
    cmdnames = cmds.GromacsCommands()
    if inp.testing():
        # if there are no inputs, we're testing wheter the command can run
        #cpc.util.plugin.testCommand("grompp -version")
        #cpc.util.plugin.testCommand("mdrun -version")
        return
    fo = inp.getFunctionOutput()
    persDir = inp.getPersistentDir()
    mdpfile = procSettings(inp, inp.getOutputDir())
    # copy the topology and include files
    topfile = os.path.join(inp.getOutputDir(), 'topol.top')
    shutil.copy(inp.getInput('top'), topfile)
    incl = inp.getInput('include')
    if incl is not None and len(incl) > 0:
        for i in range(len(incl)):
            filename = inp.getInput('include[%d]' % i)
            if filename is not None:
                # same name, but in one directory.
                nname = os.path.join(inp.getOutputDir(),
                                     os.path.split(filename)[1])
                shutil.copy(filename, nname)
    # and execute grompp
    cmdlist = cmdnames.grompp.split()
    cmdlist += [
        "-f",
        mdpfile,
        "-quiet",
        "-c",
        inp.getInput('conf'),
        "-p",
        'topol.top',  # we made sure it's there
        "-o",
        "topol.tpr"
    ]
    if inp.hasInput('ndx'):
        cmdlist.append('-n')
        cmdlist.append(inp.getInput('ndx'))
    proc = subprocess.Popen(cmdlist,
                            stdin=None,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT,
                            cwd=inp.getOutputDir())
    (stdo, stde) = proc.communicate(None)
    if proc.returncode != 0:
        #raise GromacsError("Error running grompp: %s"%
        #                   (open(stdoutfn,'r').read()))
        fo.setError("Error running grompp: %s, %s" % (stdo, stde))
        return fo
    rsrc = Resources()
    tune.tune(rsrc, inp.getInput('conf'),
              os.path.join(inp.getOutputDir(), 'topol.tpr'), persDir)
    fo.setOut('mdp', FileValue(mdpfile))
    fo.setOut('resources', rsrc.setOutputValue())
    return fo
Ejemplo n.º 5
0
def tune_fn(inp):
    cmdnames = cmds.GromacsCommands()
    if inp.testing():
        # if there are no inputs, we're testing wheter the command can run
        #cpc.util.plugin.testCommand("grompp -version")
        #cpc.util.plugin.testCommand("mdrun -version")
        return
    fo=inp.getFunctionOutput()
    persDir=inp.getPersistentDir()
    mdpfile=procSettings(inp, inp.getOutputDir())
    # copy the topology and include files 
    topfile=os.path.join(inp.getOutputDir(), 'topol.top')
    shutil.copy(inp.getInput('top'), topfile)
    incl=inp.getInput('include')
    if incl is not None and len(incl)>0:
        for i in range(len(incl)):
            filename=inp.getInput('include[%d]'%i)
            if filename is not None:
                # same name, but in one directory.
                nname=os.path.join(inp.getOutputDir(), os.path.split(filename)[1])
                shutil.copy(filename, nname)
    # and execute grompp
    cmdlist = cmdnames.grompp.split()
    cmdlist += ["-f", mdpfile,
                "-quiet",
                "-c", inp.getInput('conf'),
                "-p", 'topol.top', # we made sure it's there
                "-o", "topol.tpr" ]
    if inp.hasInput('ndx'):
        cmdlist.append('-n')
        cmdlist.append(inp.getInput('ndx'))
    proc=subprocess.Popen(cmdlist, 
                          stdin=None,
                          stdout=subprocess.PIPE,
                          stderr=subprocess.STDOUT,
                          cwd=inp.getOutputDir())
    (stdo, stde) = proc.communicate(None)
    if proc.returncode != 0:
        #raise GromacsError("Error running grompp: %s"%
        #                   (open(stdoutfn,'r').read()))
        fo.setError("Error running grompp: %s, %s"%
                           (stdo, stde))
        return fo
    rsrc=Resources()
    tune.tune(rsrc, inp.getInput('conf'), 
              os.path.join(inp.getOutputDir(), 'topol.tpr'), persDir)
    fo.setOut('mdp', FileValue(mdpfile))
    fo.setOut('resources', rsrc.setOutputValue())
    return fo
Ejemplo n.º 6
0
def main(input_wav, prediction_mid_name):

    # Setup useful variables
    cpp_input = "input"
    cpp_output = f"{cpp_input}_tuned"
    output_wav = f'{".".join(input_wav.split(".")[:-1])}_tuned.wav'

    print("Processing...")

    # Find optimal tunings from the midi file
    midi = pio.load_midi(prediction_mid_name)
    pio.parse_and_write_to_file(midi, cpp_input)
    subprocess.run(["c++/freqs", cpp_input], check=True)
    tuned_score = pio.read_tuned_file(cpp_output)
    subprocess.run(["rm", cpp_input, cpp_output], check=True)
    tunings = tune.tunings_at_events(midi, tuned_score)

    print("Tuning...")

    # Tune the wav file based on the tunings found
    fs, data = wavfile.read(input_wav)
    new_data = tune.tune(data, fs, tunings)
    wavfile.write(output_wav, fs, new_data)

    print()
    print("Done")
    print()
Ejemplo n.º 7
0
def classify():
    """
    Method which calls for every layer the tune method in order to find the right parameters
    :return:
    """
    file = './data/tuning/flickr8k-speaker.txt'
    tune.tune(val_mfcc, val_spk, file)
    tune.tune(val_conv, val_spk, file)
    amount_layers = val_rec.shape[1]
    for i in range(amount_layers):
        layer = val_rec[:, i, :]
        tune.tune(layer, val_spk, file)
    tune.tune(val_emb, val_spk, file)
Ejemplo n.º 8
0
def classify():
    """
    Method which calls for every layer the tune method in order to find the right parameters
    :return:
    """
    file = '../data/tuning/places-speaker.txt'
    test_size = 0.4
    tune.tune(val_mfcc, val_spk_int, file, test_size)
    tune.tune(val_conv, val_spk_int, file, test_size)
    amount_layers = val_rec.shape[1]
    for i in range(0, amount_layers):
        layer = val_rec[:, i, :]
        tune.tune(layer, val_spk_int, file, test_size)
    tune.tune(val_emb, val_spk_int, file, test_size)
Ejemplo n.º 9
0
def main():
    opts = get_opts()

    ## Q3.1 - Hyperparameter tunning
    # print("Q3.1 - Hyper Parameter tunning")
    # alpha = [25, 125]
    # filter_scales = [[1, 2], [1, 2, 4]]
    # K = [10, 50]
    # L = [3, 2, 1]
    # tune.tune(alpha, filter_scales, K, L)
    # results = tune.get_results(opts, sorted=True)
    # tuning.display_results(results)

    ## Q3.2 - Custom
    print("Q3.2 - Custom system with default parameters")
    alpha = [25]
    filter_scales = [[1, 2]]
    K = [10]
    L = [1]
    # Evaluating default vs D
    D = [1, 5]
    tune.tune(alpha, filter_scales, K, L, D)
    results = tune.get_results(opts, sorted=True)
    tune.display_results(results)

    print("Q3.2 - Custom system with best parameters")
    alpha = [125]
    filter_scales = [[1, 2]]
    K = [50]
    L = [3]

    # Evaluating default vs D
    D = [1, 10]
    # Evaluating default vs 0.8
    tune.tune(alpha, filter_scales, K, L, D)
    results = tune.get_results(opts, sorted=True)
    tune.display_results(results)
Ejemplo n.º 10
0
    trn = Task1data()
    trn.load('train', normalize=args.normalize_scores)
    if args.command == 'tune':
        trn.load('dev', normalize=args.normalize_scores)
        trn_text, trn_out, trn_stuids = trn.get_data2(args.outcome + ['rank'],
                        aggregate=args.aggregate, return_extra=args.extra_features)
        logfile = sys.stdout
        for i, outcome in enumerate(args.outcome):
            if args.log_prefix:
                logfile = open(
                        "{}-{}.log".format(args.log_prefix, outcome), "at")
            trn_data = (trn_text, trn_out[:, i])
            if args.extra_features:
                trn_data = (trn_text[0], trn_out[:, i], trn_text[1])
            tune(TextRegressor, args.params, trn_data,
                    init_args={'regressor': args.regressor}, save=logfile,
                    k=5, optimize='rank_corr', max_iter=args.max_iter)
            if args.log_prefix: logfile.close()

    if args.command == 'predict':
        if args.test_data:
            trn.load('dev', normalize=args.normalize_scores)
            tst = Task1data()
            tst.load('test', normalize=args.normalize_scores)
        else:
            tst = Task1data()
            tst.load('dev', normalize=args.normalize_scores)

        tst_text, tst_out, tst_stuids = tst.get_data2(
                args.outcome + ['rank'], aggregate=args.aggregate,
                    return_extra=args.extra_features) 
Ejemplo n.º 11
0
def mdrun(inp):
    cmdnames = cmds.GromacsCommands()
    if inp.testing():
        # if there are no inputs, we're testing wheter the command can run
        cpc.util.plugin.testCommand("%s -version" % cmdnames.trjcat)
        cpc.util.plugin.testCommand("%s -version" % cmdnames.eneconv)
        cpc.util.plugin.testCommand("%s -version" % cmdnames.gmxdump)
        return
    persDir=inp.getPersistentDir()
    outDir=inp.getOutputDir()
    fo=inp.getFunctionOutput()
    rsrc=Resources(inp.getInputValue("resources"))
    rsrcFilename=os.path.join(persDir, 'rsrc.dat')
    # check whether we need to reinit
    pers=cpc.dataflow.Persistence(os.path.join(persDir,
                                               "persistent.dat"))
    init=False
    lasttpr=pers.get('lasttpr')
    newtpr=inp.getInput('tpr')
    #if inp.getInputValue('tpr').isUpdated():
    if newtpr!= lasttpr:
        lasttpr=newtpr
        # there was no previous command.
        # purge the persistent directory, by moving the confout files to a
        # backup directory
        log.debug("(Re)initializing mdrun")
        confout=glob.glob(os.path.join(persDir, "run_???"))
        if len(confout)>0:
            backupDir=os.path.join(persDir, "backup")
            try:
                os.mkdir(backupDir)
            except OSError:
                pass
            for conf in confout:
                try:
                    os.rename(conf, os.path.join(backupDir,
                                                 os.path.split(conf)[-1]))
                except OSError:
                    pass
        init=True
        pers.set('lasttpr', lasttpr)
    elif inp.cmd is None:
        return fo
    if init:
        if rsrc.max.get('cores') is None:
            confFile=os.path.join(persDir, 'conf.gro')
            extractConf(newtpr, confFile)
            tune.tune(rsrc, confFile, newtpr, persDir)
        if inp.cmd is not None:
            log.debug("Canceling commands")
            fo.cancelPrevCommands()
        pers.set('initialized', True)
    else:
        if rsrc.max.get('cores') is None:
            rsrc.load(rsrcFilename)
    if inp.cmd is not None:
        log.debug("Return code was %s"%str(inp.cmd.getReturncode()))
    # try to find out whether the run has already finished
    confout=glob.glob(os.path.join(persDir, "run_???", "confout.*gro"))
    if len(confout) > 0:
        confoutDir = os.path.dirname(confout[0])
        hasFinalData = checkConfoutDir(confoutDir)
        if hasFinalData:
            log.debug("Extracting data. ")
            # confout exists. we're finished. Concatenate all the runs if
            # we need to, but first create the output dict
            extractData(confout, outDir, persDir, fo)
            return fo

    tfc=TrajFileCollection(persDir)
    lastDir = tfc.getLastDir()
    # first check whether we got an error code back
    if (inp.cmd is not None) and inp.cmd.getReturncode()!=0:
        # there was a problem. Check the log
        if lastDir:
            stde=os.path.join(lastDir, "stderr")
            if checkErr(stde, rsrc, newtpr, persDir):
                if os.path.exists(stde):
                    stdef=open(stde, 'r')
                    errmsg=unicode(stdef.read(), errors='ignore')
                    stdef.close()
                    raise MdrunError("Error running mdrun: %s"%errmsg)
        else:
            log.debug("An error has occured, but no lastDir was found.")

        # now check whether any of the last 4 iterations produced
        # trajectories
        trajlist=tfc.getTrajList()
        if len(trajlist) > 4:
            ret=False
            for j in range(4):
                haveTraj=(len(trajlist[-j-1]) > 0)
                ret=ret or haveTraj  #prevtraj[-j-1]
            if not ret:
                if lastDir:
                    stde=os.path.join(lastDir, "stderr")
                    if os.path.exists(stde):
                        stdef=open(stde, 'r')
                        errmsg=unicode(stdef.read(), errors='ignore')
                        stdef.close()
                    else:
                        errmsg=""
                    raise MdrunError("Error running mdrun. No trajectories: %s"%
                                    errmsg)
                else:
                    raise MdrunError("Error running mdrun. No trajectories and no lastDir was found.")
    # Make a new directory with the continuation of this run
    #newdirname=currundir #"run_%03d"%(i+1)
    newdirname=tfc.getNewRunDir()
    log.debug("Making a new directory for this run: %s" % newdirname)
    try:
        os.mkdir(newdirname)
    except OSError:
        log.debug("Directory already exists.")
        pass
    tpr=newtpr
    src=os.path.join(inp.getBaseDir(), tpr)
    dst=os.path.join(newdirname,"topol.tpr")
    shutil.copy(src,dst)
    # handle command line inputs
    if inp.getInput('cmdline_options') is not None:
        cmdlineOpts=shlex.split(inp.getInput('cmdline_options'))
    else:
        cmdlineOpts=[]
    if inp.getInput('priority') is not None:
        prio=inp.getInput('priority')
    else:
        prio=0
    lastcpt=tfc.getLastCpt()
    # copy the checkpoint to the new cmd dir
    if lastcpt is not None:
        shutil.copy(lastcpt, os.path.join(newdirname,"state.cpt"))
        log.debug("Continuing from checkpoint")
    # now add to the priority if this run has already been started
    completed=tfc.getFractionCompleted(tpr)
    if completed > 0:
        log.debug("Fraction completed: %s" % completed)
        # Already finished, but no confout.gro?
        if completed >= 1:
            log.debug("Iteration finished, but the final coordinates were not written.")
            if tfc.trajlist[-1].get('edr') or tfc.trajlist[-1].get('xtc') or tfc.trajlist[-1].get('trr'):
                log.debug("Last run produced output files without gaps (but no confout.gro). Generating coordinates from checkpoint.")
                confout=tfc.checkpointToConfout()
                if confout:
                    log.debug("Extracting data.")
                    extractData([confout], outDir, persDir, fo)
                    return fo
            else:
                log.debug("Last run did not produce any output files. Cannot generate coordinates from checkpoint.")
        # now the priority ranges from 1 to 4, depending on how
        # far along the simulation is.
        prio += 1+int(3*(completed))
        log.debug("Setting new priority to %d because it's in progress"%
                  prio)
    # we can always add state.cpt, even if it doesn't exist.
    args=["-quiet", "-s", "topol.tpr", "-noappend", "-cpi", "state.cpt",
           "-rcon", "0.7"  ]
    args.extend(cmdlineOpts)
    # for the new neighbor search scheme in Gromacs 4.6, set this env
    # variable

    # any expected output files.
    newFileNr=tfc.getLastTrajNr()+1
    outputFiles=[ "traj.part%04d.xtc"%newFileNr,
                  "traj.part%04d.trr"%newFileNr,
                  "confout.part%04d.gro"%newFileNr,
                  "ener.part%04d.edr"%newFileNr,
                  "dhdl.part%04d.xvg"%newFileNr,
                  "pullx.part%04d.xvg"%newFileNr,
                  "pullf.part%04d.xvg"%newFileNr,
                  "md.part%04d.log"%newFileNr,
                  "state.cpt", "state_prev.cpt" ]
    log.debug("Expected output files: %s"%outputFiles)
    cmd=cpc.command.Command(newdirname, "gromacs/mdrun",args,
                            minVersion=cpc.command.Version("4.5"),
                            addPriority=prio,
                            outputFiles=outputFiles)
    if inp.hasInput("resources") and inp.getInput("resources") is not None:
        #log.debug("resources is %s"%(inp.getInput("resources")))
        #rsrc=Resources(inp.getInputValue("resources"))
        rsrc.updateCmd(cmd)
    log.debug("Adding command")
    fo.addCommand(cmd)
    if inp.getInputValue('tpr').isUpdated() and inp.cmd is not None:
        log.debug("Canceling commands")
        fo.cancelPrevCommands()

    # and save for further invocations
    rsrc.save(rsrcFilename)
    pers.write()
    return fo
Ejemplo n.º 12
0
def mdrun(inp):
    cmdnames = cmds.GromacsCommands()
    if inp.testing():
        # if there are no inputs, we're testing wheter the command can run
        cpc.util.plugin.testCommand("%s -version" % cmdnames.trjcat)
        cpc.util.plugin.testCommand("%s -version" % cmdnames.eneconv)
        cpc.util.plugin.testCommand("%s -version" % cmdnames.gmxdump)
        return
    persDir = inp.getPersistentDir()
    outDir = inp.getOutputDir()
    fo = inp.getFunctionOutput()
    rsrc = Resources(inp.getInputValue("resources"))
    rsrcFilename = os.path.join(persDir, 'rsrc.dat')
    # check whether we need to reinit
    pers = cpc.dataflow.Persistence(os.path.join(persDir, "persistent.dat"))
    init = False
    lasttpr = pers.get('lasttpr')
    newtpr = inp.getInput('tpr')
    #if inp.getInputValue('tpr').isUpdated():
    if newtpr != lasttpr:
        lasttpr = newtpr
        # there was no previous command.
        # purge the persistent directory, by moving the confout files to a
        # backup directory
        log.debug("(Re)initializing mdrun")
        confout = glob.glob(os.path.join(persDir, "run_???"))
        if len(confout) > 0:
            backupDir = os.path.join(persDir, "backup")
            try:
                os.mkdir(backupDir)
            except OSError:
                pass
            for conf in confout:
                try:
                    os.rename(conf,
                              os.path.join(backupDir,
                                           os.path.split(conf)[-1]))
                except OSError:
                    pass
        init = True
        pers.set('lasttpr', lasttpr)
    elif inp.cmd is None:
        return fo
    if init:
        if rsrc.max.get('cores') is None:
            confFile = os.path.join(persDir, 'conf.gro')
            extractConf(newtpr, confFile)
            tune.tune(rsrc, confFile, newtpr, persDir)
        if inp.cmd is not None:
            log.debug("Canceling commands")
            fo.cancelPrevCommands()
        pers.set('initialized', True)
    else:
        if rsrc.max.get('cores') is None:
            rsrc.load(rsrcFilename)
    if inp.cmd is not None:
        log.debug("Return code was %s" % str(inp.cmd.getReturncode()))
    # try to find out whether the run has already finished
    confout = glob.glob(os.path.join(persDir, "run_???", "confout.*gro"))
    if len(confout) > 0:
        confoutDir = os.path.dirname(confout[0])
        hasFinalData = checkConfoutDir(confoutDir)
        if hasFinalData:
            log.debug("Extracting data. ")
            # confout exists. we're finished. Concatenate all the runs if
            # we need to, but first create the output dict
            extractData(confout, outDir, persDir, fo)
            return fo

    tfc = TrajFileCollection(persDir)
    lastDir = tfc.getLastDir()
    # first check whether we got an error code back
    if (inp.cmd is not None) and inp.cmd.getReturncode() != 0:
        # there was a problem. Check the log
        if lastDir:
            stde = os.path.join(lastDir, "stderr")
            if checkErr(stde, rsrc, newtpr, persDir):
                if os.path.exists(stde):
                    stdef = open(stde, 'r')
                    errmsg = unicode(stdef.read(), errors='ignore')
                    stdef.close()
                    raise MdrunError("Error running mdrun: %s" % errmsg)
        else:
            log.debug("An error has occured, but no lastDir was found.")

        # now check whether any of the last 4 iterations produced
        # trajectories
        trajlist = tfc.getTrajList()
        if len(trajlist) > 4:
            ret = False
            for j in range(4):
                haveTraj = (len(trajlist[-j - 1]) > 0)
                ret = ret or haveTraj  #prevtraj[-j-1]
            if not ret:
                if lastDir:
                    stde = os.path.join(lastDir, "stderr")
                    if os.path.exists(stde):
                        stdef = open(stde, 'r')
                        errmsg = unicode(stdef.read(), errors='ignore')
                        stdef.close()
                    else:
                        errmsg = ""
                    raise MdrunError(
                        "Error running mdrun. No trajectories: %s" % errmsg)
                else:
                    raise MdrunError(
                        "Error running mdrun. No trajectories and no lastDir was found."
                    )
    # Make a new directory with the continuation of this run
    #newdirname=currundir #"run_%03d"%(i+1)
    newdirname = tfc.getNewRunDir()
    log.debug("Making a new directory for this run: %s" % newdirname)
    try:
        os.mkdir(newdirname)
    except OSError:
        log.debug("Directory already exists.")
        pass
    tpr = newtpr
    src = os.path.join(inp.getBaseDir(), tpr)
    dst = os.path.join(newdirname, "topol.tpr")
    shutil.copy(src, dst)
    # handle command line inputs
    if inp.getInput('cmdline_options') is not None:
        cmdlineOpts = shlex.split(inp.getInput('cmdline_options'))
    else:
        cmdlineOpts = []
    if inp.getInput('priority') is not None:
        prio = inp.getInput('priority')
    else:
        prio = 0
    lastcpt = tfc.getLastCpt()
    # copy the checkpoint to the new cmd dir
    if lastcpt is not None:
        shutil.copy(lastcpt, os.path.join(newdirname, "state.cpt"))
        log.debug("Continuing from checkpoint")
    # now add to the priority if this run has already been started
    completed = tfc.getFractionCompleted(tpr)
    if completed > 0:
        log.debug("Fraction completed: %s" % completed)
        # Already finished, but no confout.gro?
        if completed >= 1:
            log.debug(
                "Iteration finished, but the final coordinates were not written."
            )
            if tfc.trajlist[-1].get('edr') or tfc.trajlist[-1].get(
                    'xtc') or tfc.trajlist[-1].get('trr'):
                log.debug(
                    "Last run produced output files without gaps (but no confout.gro). Generating coordinates from checkpoint."
                )
                confout = tfc.checkpointToConfout()
                if confout:
                    log.debug("Extracting data.")
                    extractData([confout], outDir, persDir, fo)
                    return fo
            else:
                log.debug(
                    "Last run did not produce any output files. Cannot generate coordinates from checkpoint."
                )
        # now the priority ranges from 1 to 4, depending on how
        # far along the simulation is.
        prio += 1 + int(3 * (completed))
        log.debug("Setting new priority to %d because it's in progress" % prio)
    # we can always add state.cpt, even if it doesn't exist.
    args = [
        "-quiet", "-s", "topol.tpr", "-noappend", "-cpi", "state.cpt", "-rcon",
        "0.7"
    ]
    args.extend(cmdlineOpts)
    # for the new neighbor search scheme in Gromacs 4.6, set this env
    # variable

    # any expected output files.
    newFileNr = tfc.getLastTrajNr() + 1
    outputFiles = [
        "traj.part%04d.xtc" % newFileNr,
        "traj.part%04d.trr" % newFileNr,
        "confout.part%04d.gro" % newFileNr,
        "ener.part%04d.edr" % newFileNr,
        "dhdl.part%04d.xvg" % newFileNr,
        "pullx.part%04d.xvg" % newFileNr,
        "pullf.part%04d.xvg" % newFileNr,
        "md.part%04d.log" % newFileNr, "state.cpt", "state_prev.cpt"
    ]
    log.debug("Expected output files: %s" % outputFiles)
    cmd = cpc.command.Command(newdirname,
                              "gromacs/mdrun",
                              args,
                              minVersion=cpc.command.Version("4.5"),
                              addPriority=prio,
                              outputFiles=outputFiles)
    if inp.hasInput("resources") and inp.getInput("resources") is not None:
        #log.debug("resources is %s"%(inp.getInput("resources")))
        #rsrc=Resources(inp.getInputValue("resources"))
        rsrc.updateCmd(cmd)
    log.debug("Adding command")
    fo.addCommand(cmd)
    if inp.getInputValue('tpr').isUpdated() and inp.cmd is not None:
        log.debug("Canceling commands")
        fo.cancelPrevCommands()

    # and save for further invocations
    rsrc.save(rsrcFilename)
    pers.write()
    return fo