コード例 #1
0
ファイル: io.py プロジェクト: bopopescu/relax
def delete(file_name, dir=None, fail=True):
    """Deleting the given file, taking into account missing compression extensions.

    @param file_name:       The name of the file to delete.
    @type file_name:        str
    @keyword dir:           The directory containing the file.
    @type dir:              None or str
    @keyword fail:          A flag which if True will cause RelaxFileError to be raised.
    @type fail:             bool
    @raises RelaxFileError: If the file does not exist, and fail is set to true.
    """

    # File path.
    file_path = get_file_path(file_name, dir)

    # Test if the file exists and determine the compression type.
    if access(file_path, F_OK):
        pass
    elif access(file_path + '.bz2', F_OK):
        file_path = file_path + '.bz2'
    elif access(file_path + '.gz', F_OK):
        file_path = file_path + '.gz'
    elif fail:
        raise RelaxFileError(file_path)
    else:
        return

    # Remove the file.
    remove(file_path)
コード例 #2
0
ファイル: io.py プロジェクト: bopopescu/relax
def determine_compression(file_path):
    """Function for determining the compression type, and for also testing if the file exists.

    @param file_path:   The full file path of the file.
    @type file_path:    str
    @return:            A tuple of the compression type and full path of the file (including its extension).  A value of 0 corresponds to no compression.  Bzip2 compression corresponds to a value of 1.  Gzip compression corresponds to a value of 2.
    @rtype:             (int, str)
    """

    # The file has been supplied without its compression extension.
    if access(file_path, F_OK):
        compress_type = 0
        if search('.bz2$', file_path):
            compress_type = 1
        elif search('.gz$', file_path):
            compress_type = 2

    # The file has been supplied with the '.bz2' extension.
    elif access(file_path + '.bz2', F_OK):
        file_path = file_path + '.bz2'
        compress_type = 1

    # The file has been supplied with the '.gz' extension.
    elif access(file_path + '.gz', F_OK):
        file_path = file_path + '.gz'
        compress_type = 2

    # The file doesn't exist.
    else:
        raise RelaxFileError(file_path)

    # Return the compression type.
    return compress_type, file_path
コード例 #3
0
def read(file=None, dir=None, version=None, sample_conditions=None):
    """Read the contents of a BMRB NMR-STAR formatted file.

    @keyword file:              The name of the BMRB STAR formatted file.
    @type file:                 str
    @keyword dir:               The directory where the file is located.
    @type dir:                  None or str
    @keyword version:           The BMRB version to force the reading.
    @type version:              None or str
    @keyword sample_conditions: The sample condition label to read.  Only one sample condition can be read per data pipe.
    @type sample_conditions:    None or str
    """

    # Test if bmrblib is installed.
    if not dep_check.bmrblib_module:
        raise RelaxNoModuleInstallError('BMRB library', 'bmrblib')

    # Test if the current data pipe exists.
    pipe_name = cdp_name()
    if not pipe_name:
        raise RelaxNoPipeError

    # Make sure that the data pipe is empty.
    if not ds[pipe_name].is_empty():
        raise RelaxError("The current data pipe is not empty.")

    # Get the full file path.
    file_path = get_file_path(file_name=file, dir=dir)

    # Fail if the file does not exist.
    if not access(file_path, F_OK):
        raise RelaxFileError(file_path)

    # Read the results.
    api = return_api(pipe_name=pipe_name)
    api.bmrb_read(file_path,
                  version=version,
                  sample_conditions=sample_conditions)
コード例 #4
0
def extract(dir, spin_id=None):
    """Extract the Modelfree4 results out of the 'mfout' file.

    @param dir:         The directory containing the 'mfout' file.
    @type dir:          str or None
    @keyword spin_id:   The spin identification string.
    @type spin_id:      str or None
    """

    # Test if sequence data is loaded.
    if not exists_mol_res_spin_data():
        raise RelaxNoSequenceError

    # Check for the diffusion tensor.
    if not hasattr(cdp, 'diff_tensor'):
        raise RelaxNoTensorError('diffusion')

    # The directory.
    if dir == None:
        dir = pipes.cdp_name()
    if not access(dir, F_OK):
        raise RelaxDirError('Modelfree4', dir)

    # Test if the file exists.
    if not access(dir + sep + 'mfout', F_OK):
        raise RelaxFileError('Modelfree4', dir + sep + 'mfout')

    # Determine the parameter set.
    model_type = determine_model_type()

    # Open the file.
    mfout_file = open(dir + sep + 'mfout', 'r')
    mfout_lines = mfout_file.readlines()
    mfout_file.close()

    # Get the section line positions of the mfout file.
    global_chi2_pos, diff_pos, s2_pos, s2f_pos, s2s_pos, te_pos, rex_pos, chi2_pos = line_positions(
        mfout_lines)

    # Find out if simulations were carried out.
    sims = 0
    for i in range(len(mfout_lines)):
        if search('_iterations', mfout_lines[i]):
            row = mfout_lines[i].split()
            sims = int(row[1])

    # Global data.
    if model_type in ['all', 'diff']:
        # Global chi-squared.
        row = mfout_lines[global_chi2_pos].split()
        cdp.chi2 = float(row[1])

        # Spherical diffusion tensor.
        if cdp.diff_tensor.type == 'sphere':
            # Split the lines.
            tm_row = mfout_lines[diff_pos].split()

            # Set the params.
            cdp.diff_tensor.set(param='tm', value=float(tm_row[2]))

        # Spheroid diffusion tensor.
        else:
            # Split the lines.
            tm_row = mfout_lines[diff_pos].split()
            dratio_row = mfout_lines[diff_pos + 1].split()
            theta_row = mfout_lines[diff_pos + 2].split()
            phi_row = mfout_lines[diff_pos + 3].split()

            # Set the params.
            diffusion_tensor.set([
                float(tm_row[2]),
                float(dratio_row[2]),
                float(theta_row[2]) * 2.0 * pi / 360.0,
                float(phi_row[2]) * 2.0 * pi / 360.0
            ], ['tm', 'Dratio', 'theta', 'phi'])

    # Loop over the sequence.
    pos = 0
    for spin, mol_name, res_num, res_name in spin_loop(spin_id,
                                                       full_info=True):
        # Skip deselected residues.
        if not spin.select:
            continue

        # Get the residue number from the mfout file.
        mfout_res_num = int(mfout_lines[s2_pos + pos].split()[0])

        # Skip the spin if the residue doesn't match.
        if mfout_res_num != res_num:
            continue

        # Test that the model has been set (needed to differentiate between te and ts).
        if not hasattr(spin, 'model'):
            raise RelaxNoModelError

        # Get the S2 data.
        if 's2' in spin.params:
            spin.s2, spin.s2_err = get_mf_data(mfout_lines, s2_pos + pos)

        # Get the S2f data.
        if 's2f' in spin.params or 's2s' in spin.params:
            spin.s2f, spin.s2f_err = get_mf_data(mfout_lines, s2f_pos + pos)

        # Get the S2s data.
        if 's2f' in spin.params or 's2s' in spin.params:
            spin.s2s, spin.s2s_err = get_mf_data(mfout_lines, s2s_pos + pos)

        # Get the te data.
        if 'te' in spin.params:
            spin.te, spin.te_err = get_mf_data(mfout_lines, te_pos + pos)
            spin.te = spin.te / 1e12
            spin.te_err = spin.te_err / 1e12

        # Get the ts data.
        if 'ts' in spin.params:
            spin.ts, spin.ts_err = get_mf_data(mfout_lines, te_pos + pos)
            spin.ts = spin.ts / 1e12
            spin.ts_err = spin.ts_err / 1e12

        # Get the Rex data.
        if 'rex' in spin.params:
            spin.rex, spin.rex_err = get_mf_data(mfout_lines, rex_pos + pos)
            spin.rex = spin.rex / (2.0 * pi *
                                   cdp.spectrometer_frq[cdp.ri_ids[0]])**2
            spin.rex_err = spin.rex_err / (
                2.0 * pi * cdp.spectrometer_frq[cdp.ri_ids[0]])**2

        # Get the chi-squared data.
        if not sims:
            row = mfout_lines[chi2_pos + pos].split()
            spin.chi2 = float(row[1])
        else:
            # The mfout chi2 position (with no sims) plus 2 (for the extra XML) plus the residue position times 22 (because of the simulated SSE rows).
            row = mfout_lines[chi2_pos + 2 + 22 * pos].split()
            spin.chi2 = float(row[1])

        # Increment the residue position.
        pos = pos + 1
コード例 #5
0
def execute(dir, force, binary):
    """Execute Modelfree4.

    BUG:  Control-C during execution causes the cwd to stay as dir.


    @param dir:     The optional directory where the script is located.
    @type dir:      str or None
    @param force:   A flag which if True will cause any pre-existing files to be overwritten by
                    Modelfree4.
    @type force:    bool
    @param binary:  The name of the Modelfree4 binary file.  This can include the path to the
                    binary.
    @type binary:   str
    """

    # Check for the diffusion tensor.
    if not hasattr(cdp, 'diff_tensor'):
        raise RelaxNoTensorError('diffusion')

    # The current directory.
    orig_dir = getcwd()

    # The directory.
    if dir == None:
        dir = pipes.cdp_name()
    if not access(dir, F_OK):
        raise RelaxDirError('Modelfree4', dir)

    # Change to this directory.
    chdir(dir)

    # Catch failures and return to the correct directory.
    try:
        # Python 2.3 and earlier.
        if Popen == None:
            raise RelaxError(
                "The subprocess module is not available in this version of Python."
            )

        # Test if the 'mfin' input file exists.
        if not access('mfin', F_OK):
            raise RelaxFileError('mfin input', 'mfin')

        # Test if the 'mfdata' input file exists.
        if not access('mfdata', F_OK):
            raise RelaxFileError('mfdata input', 'mfdata')

        # Test if the 'mfmodel' input file exists.
        if not access('mfmodel', F_OK):
            raise RelaxFileError('mfmodel input', 'mfmodel')

        # Test if the 'mfpar' input file exists.
        if not access('mfpar', F_OK):
            raise RelaxFileError('mfpar input', 'mfpar')

        # Test if the 'PDB' input file exists.
        if cdp.diff_tensor.type != 'sphere':
            pdb = cdp.structure.structural_data[0].mol[0].file_name
            if not access(pdb, F_OK):
                raise RelaxFileError('PDB', pdb)
        else:
            pdb = None

        # Remove the file 'mfout' and '*.out' if the force flag is set.
        if force:
            for file in listdir(getcwd()):
                if search('out$', file) or search('rotate$', file):
                    remove(file)

        # Test the binary file string corresponds to a valid executable.
        test_binary(binary)

        # Execute Modelfree4.
        if pdb:
            cmd = binary + ' -i mfin -d mfdata -p mfpar -m mfmodel -o mfout -e out -s ' + pdb
        else:
            cmd = binary + ' -i mfin -d mfdata -p mfpar -m mfmodel -o mfout -e out'
        pipe = Popen(cmd,
                     shell=True,
                     stdin=PIPE,
                     stdout=PIPE,
                     stderr=PIPE,
                     close_fds=False)
        out, err = pipe.communicate()

        # Close the pipe.
        pipe.stdin.close()

        # Write to stdout.
        if out:
            # Decode Python 3 byte arrays.
            if hasattr(out, 'decode'):
                out = out.decode()

            # Write.
            sys.stdout.write(out)

        # Write to stderr.
        if err:
            # Decode Python 3 byte arrays.
            if hasattr(err, 'decode'):
                err = err.decode()

            # Write.
            sys.stderr.write(err)

        # Catch errors.
        if pipe.returncode == -signal.SIGSEGV:
            raise RelaxError(
                "Modelfree4 return code 11 (Segmentation fault).\n")
        elif pipe.returncode:
            raise RelaxError("Modelfree4 return code %s.\n" % pipe.returncode)

    # Failure.
    except:
        # Change back to the original directory.
        chdir(orig_dir)

        # Reraise the error.
        raise

    # Change back to the original directory.
    chdir(orig_dir)
コード例 #6
0
def cpmgfit_execute(dir=None, binary='cpmgfit', force=False):
    """Execute CPMGFit for each spin input file.

    @keyword dir:       The directory where the input files are located.  If None, this defaults to the dispersion model name in lowercase.
    @type dir:          str or None
    @keyword binary:    The name of the CPMGFit binary file.  This can include the path to the binary.
    @type binary:       str
    @keyword force:     A flag which if True will cause any pre-existing files to be overwritten by CPMGFit.
    @type force:        bool
    """

    # Test if the current pipe exists.
    check_pipe()

    # Test if sequence data is loaded.
    if not exists_mol_res_spin_data():
        raise RelaxNoSequenceError

    # Test if the experiment type has been set.
    if not hasattr(cdp, 'exp_type'):
        raise RelaxError("The relaxation dispersion experiment type has not been specified.")

    # Test if the model has been set.
    if not hasattr(cdp, 'model_type'):
        raise RelaxError("The relaxation dispersion model has not been specified.")

    # The directory.
    if dir != None and not access(dir, F_OK):
        raise RelaxDirError('CPMGFit', dir)

    # Loop over each spin.
    for spin, spin_id in spin_loop(return_id=True, skip_desel=True):
        # Translate the model.
        function = translate_model(spin.model)

        # The spin input file name.
        file_in = dir + sep + spin_file_name(spin_id=spin_id)
        if not access(file_in, F_OK):
            raise RelaxFileError("spin input", file_in)

        # The spin output file name.
        file_out = dir + sep + spin_file_name(spin_id=spin_id, output=True)

        # Test the binary file string corresponds to a valid executable.
        test_binary(binary)

        # Execute CPMGFit.
        cmd = "%s -grid -xmgr -f %s | tee %s\n" % (binary, file_in, file_out)
        print("\n\n%s" % cmd)
        pipe = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE, close_fds=True)

        # Write to stderr.
        for line in pipe.stderr.readlines():
            # Decode Python 3 byte arrays.
            if hasattr(line, 'decode'):
                line = line.decode()

            # Write.
            sys.stderr.write(line)

        # Write to stdout.
        for line in pipe.stdout.readlines():
            # Decode Python 3 byte arrays.
            if hasattr(line, 'decode'):
                line = line.decode()

            # Write.
            sys.stdout.write(line)
コード例 #7
0
def extract(dir):
    """Extract the data from the Dasha results files.

    @param dir:     The optional directory where the results file is located.
    @type dir:      str or None
    """

    # Test if sequence data is loaded.
    if not exists_mol_res_spin_data():
        raise RelaxNoSequenceError

    # The directory.
    if dir == None:
        dir = pipes.cdp_name()
    if not access(dir, F_OK):
        raise RelaxDirError('Dasha', dir)

    # Loop over the parameters.
    for param in ['s2', 's2f', 's2s', 'te', 'tf', 'ts', 'rex']:
        # The file name.
        file_name = dir + sep + param + '.out'

        # Test if the file exists.
        if not access(file_name, F_OK):
            raise RelaxFileError('Dasha', file_name)

        # Scaling.
        if param in ['te', 'tf', 'ts']:
            scaling = 1e-9
        elif param == 'rex':
            scaling = 1.0 / (2.0 * pi * cdp.spectrometer_frq[cdp.ri_ids[0]])**2
        else:
            scaling = 1.0

        # Read the values.
        data = read_results(file=file_name, scaling=scaling)

        # Set the values.
        for i in range(len(data)):
            value.set(val=data[i][1], param=param, spin_id=data[i][0])
            value.set(val=data[i][0],
                      param=param,
                      spin_id=data[i][0],
                      error=True)

        # Clean up of non-existent parameters (set the parameter to None!).
        for spin in spin_loop():
            # Skip the spin (don't set the parameter to None) if the parameter exists in the model.
            if param in spin.params:
                continue

            # Set the parameter to None.
            setattr(spin, param.lower(), None)

    # Extract the chi-squared values.
    file_name = dir + sep + 'chi2.out'

    # Test if the file exists.
    if not access(file_name, F_OK):
        raise RelaxFileError('Dasha', file_name)

    # Read the values.
    data = read_results(file=file_name)

    # Set the values.
    for i in range(len(data)):
        spin = return_spin(spin_id=data[i][0])
        spin.chi2 = data[i][1]
コード例 #8
0
def execute(dir, force, binary):
    """Execute Dasha.

    @param dir:     The optional directory where the script is located.
    @type dir:      str or None
    @param force:   A flag which if True will cause any pre-existing files to be overwritten by Dasha.
    @type force:    bool
    @param binary:  The name of the Dasha binary file.  This can include the path to the binary.
    @type binary:   str
    """

    # Test the binary file string corresponds to a valid executable.
    test_binary(binary)

    # The current directory.
    orig_dir = getcwd()

    # The directory.
    if dir == None:
        dir = pipes.cdp_name()
    if not access(dir, F_OK):
        raise RelaxDirError('Dasha', dir)

    # Change to this directory.
    chdir(dir)

    # Catch failures and return to the correct directory.
    try:
        # Test if the 'dasha_script' script file exists.
        if not access('dasha_script', F_OK):
            raise RelaxFileError('dasha script', 'dasha_script')

        # Python 2.3 and earlier.
        if Popen == None:
            raise RelaxError(
                "The subprocess module is not available in this version of Python."
            )

        # Execute Dasha.
        pipe = Popen(binary,
                     shell=True,
                     stdin=PIPE,
                     stdout=PIPE,
                     stderr=PIPE,
                     close_fds=False)

        # Get the contents of the script and pump it into Dasha.
        script = open('dasha_script')
        lines = script.readlines()
        script.close()
        for line in lines:
            # Encode to a Python 3 byte array.
            if hasattr(line, 'encode'):
                line = line.encode()

            # Write out.
            pipe.stdin.write(line)

        # Close the pipe.
        pipe.stdin.close()

        # Write to stdout.
        for line in pipe.stdout.readlines():
            # Decode Python 3 byte arrays.
            if hasattr(line, 'decode'):
                line = line.decode()

            # Write.
            sys.stdout.write(line)

        # Write to stderr.
        for line in pipe.stderr.readlines():
            # Decode Python 3 byte arrays.
            if hasattr(line, 'decode'):
                line = line.decode()

            # Write.
            sys.stderr.write(line)

    # Failure.
    except:
        # Change back to the original directory.
        chdir(orig_dir)

        # Reraise the error.
        raise

    # Change back to the original directory.
    chdir(orig_dir)

    # Print some blank lines (aesthetics)
    sys.stdout.write("\n\n")