Пример #1
0
def kill_jobs(kill_names, message1='Killing job: ', message2=' early'):
    """This function takes a list of job names and kills the jobs associated with them, if the jobs are active
        
        Parameters
        ----------
            kill_names : list
                List of jobs to kill. 
            message1 : str
                Message prefix to report to stdout.
            message2 : str
                Message suffix to report to stdout.
            
    """
    # This function takes a list of job names and kills the jobs associated with them, if the jobs are active
    if type(kill_names) != list:
        kill_names = [kill_names]
    machine = tools.get_machine()

    active_jobs, active_ids = tools.list_active_jobs(ids=True)
    active_jobs = list(zip(active_jobs, active_ids))

    jobs_to_kill = [[name, id_] for name, id_ in active_jobs
                    if name in kill_names]

    for name, id_ in jobs_to_kill:
        print(message1 + name + message2)
        if machine in ['gibraltar']:
            tools.call_bash('qdel ' + str(id_))
        elif machine in ['comet', 'bridges']:
            tools.call_bash('scancel ' + str(id_))
        else:
            raise ValueError('Sardines.')
Пример #2
0
def resub_scf(outfile_path):
    """Resubmits a job that's having trouble converging the scf with different level shifts (1.0 and 0.1).

    Parameters
    ----------
        outfile_path : str
            The name of an output file.
    
    Returns
    -------
        Resub_flag : bool
            True if resubmitted.
   
    """
    # Resubmits a job that's having trouble converging the scf with different level shifts (1.0 and 0.1)
    history = resub_history()
    history.read(outfile_path)
    resubbed_before = False
    if 'SCF convergence error, level shifts adjusted to aid convergence' in history.notes:
        resubbed_before = True
        history.status = os.path.split(outfile_path)[
                             -1] + ' has been submitted with levels shifted and is still encountering an scf error'
        history.save()
    if 'Needs clean resub' in history.notes:
        resubbed_before = True
        history.status = os.path.split(outfile_path)[
                             -1] + ' job recovery has failed - requesting resub_scf() after clean resubmission round'
        history.save()

    if not resubbed_before:
        save_run(outfile_path, rewrite_inscr=False)
        history = resub_history()
        history.read(outfile_path)
        history.resub_number += 1
        history.status = 'Level shifts adjusted to assist convergence'
        history.needs_resub = True
        history.notes.append('SCF convergence error, level shifts adjusted to aid convergence')
        history.save()

        machine=tools.get_machine()
        root = outfile_path.rsplit('.', 1)[0]
        name = os.path.split(root)[-1]
        directory = os.path.split(outfile_path)[0]
        infile_dict = manager_io.read_infile(outfile_path)

        home = os.getcwd()
        if len(directory) > 0:  # if the string is blank, then we're already in the correct directory
            os.chdir(directory)
        infile_dict['levelshifta'], infile_dict['levelshiftb'] = 1.0, 0.1
        infile_dict['machine'] = machine
        manager_io.write_input(infile_dict)

        manager_io.write_jobscript(name, machine=machine)
        os.chdir(home)
        tools.qsub(root + '_jobscript')
        return True

    else:
        return False
Пример #3
0
def resub_tighter(outfile_path):
    """Resubmits a thermo job with the gradient error problem. Finds the parent job and resubmits it with a tighter scf 
    convergence criteria.

    Parameters
    ----------
        outfile_path : str
            The name of an output file.
    
    Returns
    -------
        Resub_flag : bool
            True if resubmitted.
   
    """
    # Takes the path to the outfile of a thermo job with the gradient error problem
    # Finds the parent job and resubmits it with a tighter scf convergence criteria

    machine=tools.get_machine()
    name = os.path.split(outfile_path)[-1].rsplit('.', 1)[0]
    parent_name = name.rsplit('_', 1)[0]
    parent_directory = os.path.split(os.path.split(outfile_path)[0])[0]
    parent_path = os.path.join(parent_directory, parent_name + '.out')
    ultratight_path = os.path.join(parent_directory, parent_name + '_ultratight', parent_name + '_ultratight.out')

    scr_needs_to_be_saved = False
    if os.path.exists(ultratight_path):  # This ultratight resubmission has happend before, need to archive the results
        save_run(ultratight_path, rewrite_inscr=False, save_scr_flag=False)
        scr_needs_to_be_saved = True  # Need to save the scr AFTER prepping the new ultratight run. This helps keep compatibility with other functions

        history = resub_history()
        history.read(ultratight_path)
        history.resub_number += 1
        history.status = 'Running with tightened convergence thresholds'
        history.needs_resub = False
        history.notes.append('Further tightening convergence thresholds')
        history.save()

    jobscript = tools.prep_ultratight(parent_path)  # Prep tighter convergence run
    if scr_needs_to_be_saved:
        save_scr(ultratight_path, rewrite_inscr=False)
    tools.qsub(jobscript)  # Submit tighter convergence run

    # Set the original thermo run to wait for the ultratight run to finish
    history = resub_history()
    history.read(outfile_path)
    history.waiting = ultratight_path
    history.save()

    return True
Пример #4
0
def clean_resub(outfile_path):
    # Resubmits a job with default parameters, useful for undoing level shift or hfx alterations
    save_run(outfile_path)
    history = resub_history()
    history.read(outfile_path)
    history.resub_number += 1
    history.status = 'Normal'
    history.notes.append('Needs clean resub')
    history.needs_resub = False
    history.save()

    machine=tools.get_machine()
    root = outfile_path.rsplit('.', 1)[0]
    name = os.path.split(root)[-1]
    directory = os.path.split(outfile_path)[0]
    infile_dict = manager_io.read_infile(outfile_path)

    home = os.getcwd()
    if len(directory) > 0:  # if the string is blank, then we're already in the correct directory
        os.chdir(directory)

    if os.path.isfile('inscr/optimized.xyz'):
        coordinates = 'inscr/optimized.xyz'  # Should trigger for optimization runs
    elif os.path.isfile(name + '.xyz'):
        coordinates = name + '.xyz'  # Should trigger for single point runs
    else:
        raise ValueError('No coordinates idenfied for clean in resubmission in directory ' + os.getcwd())

    configure_dict = manager_io.read_configure('in_place', outfile_path)

    infile_dict['coordinates'] = coordinates
    infile_dict['method'] = configure_dict['method']
    infile_dict['levelshifta'], infile_dict['levelshiftb'] = configure_dict['levela'], configure_dict['levelb']
    infile_dict['dispersion'] = configure_dict['dispersion']
    infile_dict['constraints'] = False
    infile_dict['machine'] = machine

    if infile_dict['spinmult'] == 1:
        infile_dict['guess'] = 'inscr/c0'
        manager_io.write_input(infile_dict)
    else:
        infile_dict['guess'] = 'inscr/ca0 inscr/cb0'
        manager_io.write_input(infile_dict)

    manager_io.write_jobscript(name, custom_line='# -fin inscr/', machine=machine)
    os.chdir(home)
    tools.qsub(root + '_jobscript')
    return True
Пример #5
0
def resub_thermo(outfile_path):
    # Similar to simple resub, but specific for addressing thermo gradient errors
    # Checks for the existance of an ultratight version of this run. If it exists, uses the most up to date version for the new thermo run

    save_run(outfile_path, rewrite_inscr=False)
    history = resub_history()
    history.read(outfile_path)
    history.resub_number += 1
    history.status = 'Normal'
    history.notes.append('Resubmitting thermo, possibly with a better initial geo')
    history.needs_resub = False
    history.save()

    machine=tools.get_machine()
    name = os.path.split(outfile_path)[-1]
    name = name.rsplit('.', 1)[0]
    directory = os.path.split(outfile_path)[0]
    parent_name = name.rsplit('_', 1)[0]
    parent_directory = os.path.split(os.path.split(outfile_path)[0])[0]
    ultratight_dir = os.path.join(parent_directory, parent_name + '_ultratight')

    infile_dict = manager_io.read_infile(outfile_path)

    if os.path.exists(ultratight_dir):
        if os.path.exists(os.path.join(ultratight_dir, 'scr', 'optim.xyz')):
            tools.extract_optimized_geo(os.path.join(ultratight_dir, 'scr', 'optim.xyz'))
            shutil.copy(os.path.join(ultratight_dir, 'scr', 'optimized.xyz'), outfile_path.rsplit('.', 1)[0] + '.xyz')
        else:
            raise Exception('Unable to identify the ultratight geometry for run: ' + outfile_path)

        if infile_dict['spinmult'] == 1 and os.path.exists(os.path.join(ultratight_dir, 'scr', 'c0')):
            shutil.copy(os.path.join(ultratight_dir, 'scr', 'c0'), os.path.join(directory, 'c0'))
        elif infile_dict['spinmult'] != 1 and os.path.exists(
                os.path.join(ultratight_dir, 'scr', 'ca0')) and os.path.exists(
            os.path.join(ultratight_dir, 'scr', 'cb0')):
            shutil.copy(os.path.join(ultratight_dir, 'scr', 'ca0'), os.path.join(directory, 'ca0'))
            shutil.copy(os.path.join(ultratight_dir, 'scr', 'cb0'), os.path.join(directory, 'cb0'))
        else:
            raise Exception('Unable to find wavefunction files for ultratight geometry for run: ' + outfile_path)
    else:
        raise Exception(
            'An ultratight run does not exist for this thermo file. Consider calling simple_resub() or resub_tighter() instead of resub_thermo()')

    jobscript = outfile_path.rsplit('.', 1)[0] + '_jobscript'
    tools.qsub(jobscript)
    return True
Пример #6
0
def resub_spin(outfile_path):
    # resubmits a spin contaminated job with blyp to help convergence to a non-spin contaminated solution
    history = resub_history()
    history.read(outfile_path)
    resubbed_before = False
    if 'Spin contaminated, lowering HFX to aid convergence' in history.notes:
        resubbed_before = True
        history.status = os.path.split(outfile_path)[
                             -1] + ' has been submitted with lower HFX and still converges to a spin contaminated solution'
        history.save()
    if 'Needs clean resub' in history.notes:
        resubbed_before = True
        history.status = os.path.split(outfile_path)[
                             -1] + ' job recovery has failed - requesting resub_spin() after clean resubmission round'
        history.save()
    if 'HFXresampling' in outfile_path:
        resubbed_before = True
        history.status = os.path.split(outfile_path)[
                             -1] + ' is spin contaminated, but submitting with lower HFX does not make sense for HFX resampling jobs'
        history.save()

    if not resubbed_before:
        save_run(outfile_path, rewrite_inscr=False)
        history = resub_history()
        history.read(outfile_path)
        history.resub_number += 1
        history.status = 'HFX altered to assist convergence'
        history.needs_resub = True
        history.notes.append('Spin contaminated, lowering HFX to aid convergence')
        history.save()

        machine=tools.get_machine()
        root = outfile_path.rsplit('.', 1)[0]
        name = os.path.split(root)[-1]
        directory = os.path.split(outfile_path)[0]
        infile_dict = manager_io.read_infile(outfile_path)

        home = os.getcwd()
        if len(directory) > 0:  # if the string is blank, then we're already in the correct directory
            os.chdir(directory)

        infile_dict['method'] = 'blyp'
        infile_dict['machine'] = machine
        manager_io.write_input(infile_dict)

        manager_io.write_jobscript(name, machine=machine)
Пример #7
0
def resub_oscillating_scf(outfile_path):
    # Resubmits a job that's having trouble converging the scf with different level shifts (1.0 and 0.1)
    history = resub_history()
    history.read(outfile_path)
    resubbed_before = False
    if 'SCF convergence error, precision and grid adjusted to aid convergence' in history.notes:
        resubbed_before = True
        history.status = os.path.split(outfile_path)[
                             -1] + ' has been submitted with higher precision and grid and is still encountering an scf error'
        history.save()
    if 'Needs clean resub' in history.notes:
        resubbed_before = True
        history.status = os.path.split(outfile_path)[
                             -1] + ' job recovery has failed - requesting resub_oscillating_scf() after clean resubmission round'
        history.save()

    if not resubbed_before:
        save_run(outfile_path, rewrite_inscr=False)
        history = resub_history()
        history.read(outfile_path)
        history.resub_number += 1
        history.status = 'precision and grid adjusted to assist convergence'
        history.notes.append('SCF convergence error, precision and grid adjusted to aid convergence')
        history.save()

        machine=tools.get_machine()
        root = outfile_path.rsplit('.', 1)[0]
        name = os.path.split(root)[-1]
        directory = os.path.split(outfile_path)[0]
        infile_dict = manager_io.read_infile(outfile_path)

        home = os.getcwd()
        if len(directory) > 0:  # if the string is blank, then we're already in the correct directory
            os.chdir(directory)
        infile_dict['precision'], infile_dict['dftgrid'], infile_dict['dynamicgrid'] = "double", 5, "no"
        infile_dict['machine'] = machine
        manager_io.write_input(infile_dict)

        manager_io.write_jobscript(name, machine=machine)
        os.chdir(home)
        tools.qsub(root + '_jobscript')
        return True
    else:
        return False
Пример #8
0
def resub_bad_geo(outfile_path, home_directory):
    """Resubmits a job that's converged to a bad geometry with additional contraints.

    Parameters
    ----------
        outfile_path : str
            The name of an output file.
        home_directory : str
            Path to the base directory of the run.
    
    Returns
    -------
        Resub_flag : bool
            True if resubmitted.
   
    """
    # Resubmits a job that's converged to a bad geometry with additional contraints
    history = resub_history()
    history.read(outfile_path)
    resubbed_before = False
    if 'Bad geometry detected, adding constraints and trying again' in history.notes:
        resubbed_before = True
        history.status = os.path.split(outfile_path)[
                             -1] + " has been submitted with additional constraints and still isn't a good geometry"
        history.save()
    if 'Needs clean resub' in history.notes:
        resubbed_before = True
        history.status = os.path.split(outfile_path)[
                             -1] + ' job recovery has failed - requesting resub_bad_geo after clean resubmission round'
        history.save()

    if not resubbed_before:
        save_run(outfile_path, rewrite_inscr=True)
        history = resub_history()
        history.read(outfile_path)
        history.resub_number += 1
        history.status = 'Constraints added to help convergence'
        history.needs_resub = True
        history.notes.append('Bad geometry detected, adding constraints and trying again')
        history.save()

        machine=tools.get_machine()
        root = outfile_path.rsplit('.', 1)[0]
        name = os.path.split(root)[-1]
        directory = os.path.split(outfile_path)[0]
        infile_dict = manager_io.read_infile(outfile_path)

        if infile_dict['constraints']:
            raise Exception(
                'resub.py does not currently support the use of external atom constraints. These will be overwritten by clean_resub() during job recovery')

        goal_geo = manager_io.read_configure(home_directory, outfile_path)['geo_check']
        if not goal_geo:
            raise Exception(
                'Goal geometry not specified, job ' + outfile_path + ' should not have been labelled bad geo!')
        else:
            metal_index, bonded_atom_indices = moltools.get_metal_and_bonded_atoms(outfile_path, goal_geo)
            # convert indexes from zero-indexed to one-indexed
            metal_index += 1
            bonded_atom_indices = [index + 1 for index in bonded_atom_indices]
            # Convert to TeraChem input syntax
            constraints = ['bond ' + str(metal_index) + '_' + str(index) + '\n' for index in bonded_atom_indices]

        home = os.getcwd()
        if len(directory) > 0:  # if the string is blank, then we're already in the correct directory
            os.chdir(directory)

        infile_dict['constraints'] = constraints
        infile_dict['machine'] = machine
        manager_io.write_input(infile_dict)

        manager_io.write_jobscript(name, machine=machine)
        os.chdir(home)
        tools.qsub(root + '_jobscript')
        return True

    else:
        return False
Пример #9
0
def prep_ligand_breakown(outfile_path, dissociated_ligand_charges = {},dissociated_ligand_spinmults = {}):
    """Prep ligand breakdown.

    Parameters
    ----------
        outfile_path : str
            Path to output file.
        dissociated_ligand_charges : dict, optional
            Charges for dissociated ligands. Default is empty.
        dissociated_ligand_spinmults : dict, optional
            Spin multiplicity for dissociated ligands. Default is empty.

    Returns
    -------
        jobscripts : list
            List of jobscripts for ligand breakdown jobs.
    
    """
    # Given a path to the outfile of a finished run, this preps the files for rigid ligand dissociation energies of all ligands
    # Returns a list of the PATH(s) to the jobscript(s) to start the rigid ligand calculations

    home = os.getcwd()
    machine = tools.get_machine()
    outfile_path = tools.convert_to_absolute_path(outfile_path)

    results = manager_io.read_outfile(outfile_path)
    if not results['finished']:
        raise Exception('This calculation does not appear to be complete! Aborting...')

    infile_dict = manager_io.read_infile(outfile_path)
    charge = int(infile_dict['charge'])
    spinmult = int(infile_dict['spinmult'])

    base = os.path.split(outfile_path)[0]
    name = os.path.split(outfile_path)[-1][:-4]

    breakdown_folder = os.path.join(base, name + '_dissociation')

    if os.path.isdir(breakdown_folder):
        return ['Ligand dissociation directory already exists']

    optimxyz = os.path.join(base, 'scr', 'optim.xyz')
    tools.extract_optimized_geo(optimxyz)

    mol = mol3D()
    mol.readfromxyz(os.path.join(base, 'scr', 'optimized.xyz'))

    ligand_idxs, _, _ = ligand_breakdown(mol, silent=True)

    ligand_syms = []
    for ii in ligand_idxs:
        ligand_syms.append([mol.getAtom(i).symbol() for i in ii])

    ligand_names = name_ligands(ligand_syms)

    if not os.path.isdir(breakdown_folder):
        os.mkdir(breakdown_folder)
    os.chdir(breakdown_folder)

    jobscripts = []
    for ligand in zip(ligand_names, ligand_idxs):

        # Assign charges to use during the breakdown for special cases specified in the configure file
        # All other ligands are assigned charge 0
        if ligand[0] in list(dissociated_ligand_charges.keys()):
            ligand_charge = dissociated_ligand_charges[ligand[0]]
        else:
            ligand_charge = 0
        metal_charge = charge - ligand_charge

        # Assign spin, which always remains with the metal except when the dissociated ligand is defined to have spin (O2 for example)
        if spinmult == 1:  # If the whole complex is restricted, it's components must be restricted as well
            ligand_spin, metal_spin = 1, 1
        else:
            if ligand[0] in list(dissociated_ligand_spinmults.keys()):
                ligand_spin = dissociated_ligand_spinmults[ligand[0]]
            else:
                ligand_spin = 1

            metal_spin = spinmult - ligand_spin + 1  # Derived from spinmult = (2S+1) where S=1/2 per electron

        # Create the necessary files for the metal complex single point
        local_name = name + '_rm_' + ligand[0]
        if os.path.isdir('rm_' + ligand[0]):
            pass
        else:
            os.mkdir('rm_' + ligand[0])
            os.chdir('rm_' + ligand[0])

            local_mol = mol3D()
            local_mol.copymol3D(mol)

            local_mol.deleteatoms(ligand[1])
            local_mol.writexyz(local_name + '.xyz')

            local_infile_dict = copy.copy(infile_dict)
            local_infile_dict['name'] = local_name
            local_infile_dict['coordinates'] = local_name+'.xyz'
            local_infile_dict['charge'], local_infile_dict['spinmult'] = metal_charge, metal_spin
            local_infile_dict['run_type'] = 'energy'
            local_infile_dict['constraints'], local_infile_dict['convergence_thresholds'] = False, False
            local_infile_dict['machine'] = machine

            manager_io.write_input(local_infile_dict)
            manager_io.write_jobscript(local_name, time_limit='12:00:00', machine=machine)
            jobscripts.append(local_name + '.in')
            os.chdir('..')

        # Create the necessary files for the dissociated ligand single point
        local_name = name + '_kp_' + ligand[0]
        if os.path.isdir('kp_' + ligand[0]):
            pass
        else:
            os.mkdir('kp_' + ligand[0])
            os.chdir('kp_' + ligand[0])

            local_mol = mol3D()
            local_mol.copymol3D(mol)
            deletion_indices = list(set(range(local_mol.natoms)) - set(ligand[1]))
            local_mol.deleteatoms(deletion_indices)
            local_mol.writexyz(local_name + '.xyz')

            local_infile_dict = copy.copy(infile_dict)
            local_infile_dict['name'] = local_name
            local_infile_dict['coordinates'] = local_name+'.xyz'
            local_infile_dict['charge'], local_infile_dict['spinmult'] = ligand_charge, ligand_spin
            local_infile_dict['run_type'] = 'energy'
            local_infile_dict['constraints'], local_infile_dict['convergence_thresholds'] = False, False
            local_infile_dict['machine'] = machine
            
            manager_io.write_input(local_infile_dict)
            manager_io.write_jobscript(local_name, time_limit='12:00:00', machine=machine)
            jobscripts.append(local_name + '.in')
            os.chdir('..')
    os.chdir(home)

    return jobscripts
Пример #10
0
#!/usr/bin/env python
import os
import shutil
import glob
import numpy as np
import molSimplify.job_manager.tools as tools
import molSimplify.job_manager.moltools as moltools
from molSimplify.job_manager.classes import resub_history
import molSimplify.job_manager.manager_io as manager_io

# Set machine as global variable
machine = tools.get_machine()


def load_history(PATH):
    # takes the path to either an outfile or the resub_history pickle
    # returns the resub_history class object

    history = resub_history()
    history.read(PATH)
    return history


## Archive the scr file so it isn't overwritten in future resubs
#  @param rewrite_inscr Determines whether to copy this runs wfn and optimized geometry to the inscr directory
def save_scr(outfile_path, rewrite_inscr=True):
    root = os.path.split(outfile_path)[0]
    # print("root: ", root)
    basepath = os.getcwd()
    # print("basepath: ", basepath)
    os.chdir(root)