import sys, os, time

from libtbx import group_args

import libtbx.load_env
data_dir = libtbx.env.under_dist(module_name="mmtbx",
                                 path="regression",
                                 test=os.path.isdir)

from mmtbx.domains_from_pae import get_domain_selections_from_pae_matrix

pae_file = os.path.join(data_dir, 'pae.json')
model_file = os.path.join(data_dir, 'pdbs', 'pae_model.pdb')

from iotbx.data_manager import DataManager
dm = DataManager()
distance_model = dm.get_model(model_file)
distance_model.add_crystal_symmetry_if_necessary()


def tst_01(log=sys.stdout):

    args = group_args(group_args_type='parameters',
                      pae_file=pae_file,
                      library='networkx',
                      pae_power=2.0,
                      pae_cutoff=5.0,
                      resolution=1.0,
                      select_range=False)

    selections = get_domain_selections_from_pae_matrix(
Esempio n. 2
0
def test_model_and_restraint():

    # from 3tpj
    model_str = '''
CRYST1  104.428  128.690   76.662  90.00  90.00  90.00 C 2 2 21
ATOM   5877  O   URE A 403     -37.796 -38.296   5.693  1.00 15.43           O
ATOM   5878  C   URE A 403     -36.624 -38.509   5.800  1.00 20.53           C
ATOM   5879  N2  URE A 403     -36.191 -39.836   6.120  1.00 27.82           N
ATOM   5880  N1  URE A 403     -35.679 -37.450   5.644  1.00 21.36           N
ATOM   5881 HN11 URE A 403     -34.792 -37.617   5.732  1.00 25.63           H
ATOM   5882 HN12 URE A 403     -35.965 -36.613   5.445  1.00 25.63           H
ATOM   5883 HN21 URE A 403     -35.307 -40.015   6.211  1.00 33.38           H
ATOM   5884 HN22 URE A 403     -36.801 -40.499   6.221  1.00 33.38           H
'''

    restraint_str = '''
#
data_comp_list
loop_
_chem_comp.id
_chem_comp.three_letter_code
_chem_comp.name
_chem_comp.group
_chem_comp.number_atoms_all
_chem_comp.number_atoms_nh
_chem_comp.desc_level
URE URE Unknown                   ligand 8 4 .
#
data_comp_URE
#
loop_
_chem_comp_atom.comp_id
_chem_comp_atom.atom_id
_chem_comp_atom.type_symbol
_chem_comp_atom.type_energy
_chem_comp_atom.partial_charge
_chem_comp_atom.x
_chem_comp_atom.y
_chem_comp_atom.z
URE        C       C   C     .          0.4968   -0.0000   -0.0000
URE        O       O   O     .          1.7184   -0.0000   -0.0000
URE        N1      N   NH2   .         -0.2180   -0.0000    1.2381
URE        N2      N   NH2   .         -0.2180    0.0000   -1.2381
URE        HN11    H   HNH2  .          0.2355   -0.0000    2.0237
URE        HN12    H   HNH2  .         -1.1251    0.0000    1.2382
URE        HN21    H   HNH2  .          0.2355    0.0000   -2.0237
URE        HN22    H   HNH2  .         -1.1251   -0.0000   -1.2382
#
loop_
_chem_comp_bond.comp_id
_chem_comp_bond.atom_id_1
_chem_comp_bond.atom_id_2
_chem_comp_bond.type
_chem_comp_bond.value_dist
_chem_comp_bond.value_dist_esd
URE  C       O      double        1.222 0.020
URE  C       N1     single        1.430 0.020
URE  C       N2     single        1.430 0.020
URE  N1      HN11   single        0.907 0.020
URE  N1      HN12   single        0.907 0.020
URE  N2      HN21   single        0.907 0.020
URE  N2      HN22   single        0.907 0.020
#
loop_
_chem_comp_angle.comp_id
_chem_comp_angle.atom_id_1
_chem_comp_angle.atom_id_2
_chem_comp_angle.atom_id_3
_chem_comp_angle.value_angle
_chem_comp_angle.value_angle_esd
URE  N2      C       N1           120.00 3.000
URE  N2      C       O            120.00 3.000
URE  N1      C       O            120.00 3.000
URE  HN12    N1      HN11         120.00 3.000
URE  HN12    N1      C            120.00 3.000
URE  HN11    N1      C            120.00 3.000
URE  HN22    N2      HN21         120.00 3.000
URE  HN22    N2      C            120.00 3.000
URE  HN21    N2      C            120.00 3.000
#
loop_
_chem_comp_tor.comp_id
_chem_comp_tor.id
_chem_comp_tor.atom_id_1
_chem_comp_tor.atom_id_2
_chem_comp_tor.atom_id_3
_chem_comp_tor.atom_id_4
_chem_comp_tor.value_angle
_chem_comp_tor.value_angle_esd
_chem_comp_tor.period
URE CONST_01      HN11    N1      C       O              0.00   0.0 0
URE CONST_02      HN12    N1      C       O            180.00   0.0 0
URE CONST_03      HN21    N2      C       O             -0.00   0.0 0
URE CONST_04      HN22    N2      C       O            180.00   0.0 0
URE CONST_05      HN21    N2      C       N1           180.00   0.0 0
URE CONST_06      HN22    N2      C       N1            -0.00   0.0 0
URE CONST_07      HN11    N1      C       N2          -180.00   0.0 0
URE CONST_08      HN12    N1      C       N2            -0.00   0.0 0
#
loop_
_chem_comp_plane_atom.comp_id
_chem_comp_plane_atom.plane_id
_chem_comp_plane_atom.atom_id
_chem_comp_plane_atom.dist_esd
URE plan-1  C      0.020
URE plan-1  O      0.020
URE plan-1  N1     0.020
URE plan-1  N2     0.020
URE plan-1  HN11   0.020
URE plan-1  HN12   0.020
URE plan-1  HN21   0.020
URE plan-1  HN22   0.020
'''

    model_filename = 'ure.pdb'
    restraint_filename = 'ure.cif'

    dm = DataManager(['model', 'restraint'])
    dm.write_model_file(model_str, filename=model_filename, overwrite=True)
    dm.write_restraint_file(restraint_str,
                            filename=restraint_filename,
                            overwrite=True)

    # fails because no restraints are loaded
    dm.process_model_file(model_filename)
    model = dm.get_model()
    try:
        model.get_restraints_manager()
    except Sorry:
        pass

    # automatically add restraints
    dm.process_restraint_file(restraint_filename)
    model = dm.get_model()
    model.get_restraints_manager()

    os.remove(model_filename)
    os.remove(restraint_filename)
Esempio n. 3
0
def test_01():

    # Source data

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_ccp4 = os.path.join(data_dir, 'data', 'non_zero_origin_map.ccp4')
    data_pdb = os.path.join(data_dir, 'data', 'non_zero_origin_model.pdb')
    data_ncs_spec = os.path.join(data_dir, 'data',
                                 'non_zero_origin_ncs_spec.ncs_spec')

    # DataManager

    dm = DataManager(['ncs_spec', 'model', 'real_map', 'phil'])
    dm.set_overwrite(True)

    # Read in map and model and ncs

    map_file = data_ccp4
    dm.process_real_map_file(map_file)
    mm = dm.get_real_map(map_file)

    model_file = data_pdb
    dm.process_model_file(model_file)
    model = dm.get_model(model_file)

    ncs_file = data_ncs_spec
    dm.process_ncs_spec_file(ncs_file)
    ncs = dm.get_ncs_spec(ncs_file)

    ncs_dc = ncs.deep_copy()

    mmmn = match_map_model_ncs()
    mmmn.add_map_manager(mm)
    mmmn.add_model(model)
    mmmn.add_ncs_object(ncs)

    # Save it
    mmmn_dc = mmmn.deep_copy()

    # Make sure we can add an ncs object that is either shifted or not
    mmmn_dcdc = mmmn.deep_copy()
    new_mmmn = match_map_model_ncs()
    new_mmmn.add_map_manager(mmmn_dcdc.map_manager())
    new_mmmn.add_model(mmmn_dcdc.model())
    new_mmmn.add_ncs_object(mmmn_dcdc.ncs_object())
    assert new_mmmn.ncs_object().shift_cart() == new_mmmn.map_manager(
    ).shift_cart()

    mmmn_dcdc = mmmn.deep_copy()
    new_mmmn = match_map_model_ncs()
    new_mmmn.add_map_manager(mmmn_dcdc.map_manager())
    new_mmmn.add_model(mmmn_dcdc.model())
    new_mmmn.add_ncs_object(ncs_dc)
    assert new_mmmn.ncs_object().shift_cart() == new_mmmn.map_manager(
    ).shift_cart()

    original_ncs = mmmn.ncs_object()
    assert approx_equal(
        (24.0528, 11.5833, 20.0004),
        tuple(original_ncs.ncs_groups()[0].translations_orth()[-1]),
        eps=0.1)

    assert tuple(mmmn._map_manager.origin_shift_grid_units) == (0, 0, 0)

    # Shift origin to (0,0,0)
    mmmn = mmmn_dc.deep_copy()  # fresh version of match_map_model_ncs
    mmmn.shift_origin()
    new_ncs = mmmn.ncs_object()
    assert tuple(mmmn._map_manager.origin_shift_grid_units) == (100, 100, 100)

    mmmn.write_model('s.pdb')
    mmmn.write_map('s.mrc')

    shifted_ncs = mmmn.ncs_object()
    assert approx_equal(
        (-153.758, -74.044, -127.487),
        tuple(shifted_ncs.ncs_groups()[0].translations_orth()[-1]),
        eps=0.1)

    # Shift a model and shift it back

    mmmn = mmmn_dc.deep_copy()  # fresh version of match_map_model_ncs
    model = mmmn.model()
    shifted_model = mmmn.shift_model_to_match_working_map(model=model)
    model_in_original_position = mmmn.shift_model_to_match_original_map(
        model=shifted_model)
    assert (approx_equal(
        model.get_sites_cart(),  # not a copy
        shifted_model.get_sites_cart()))
    assert approx_equal(model.get_sites_cart(),
                        model_in_original_position.get_sites_cart())

    # Generate a map and model

    import sys
    mmm = map_model_manager(log=sys.stdout)
    mmm.generate_map()
    model = mmm.model()
    mm = mmm.map_manager()
    assert approx_equal(model.get_sites_cart()[0], (14.476, 10.57, 8.34),
                        eps=0.01)
    assert approx_equal(mm.map_data()[10, 10, 10], -0.0195, eps=0.001)
    # Save it
    mmm_dc = mmm.deep_copy()

    # Check on wrapping
    assert not mm.wrapping(
    )  # this one should not wrap because it is zero at edges

    # Make a new one with no buffer so it is not zero at edges
    mmm = map_model_manager()
    mmm.generate_map(box_cushion=0)
    mm = mmm.map_manager()
    # check its compatibility with wrapping
    assert mm.is_consistent_with_wrapping()
    mmm.show_summary()

    # now box it
    sel = mmm.model().selection("resseq 221:221")
    new_model = mmm.model().deep_copy().select(sel)
    new_mmm = map_model_manager(model=new_model, map_manager=mm.deep_copy())
    new_mmm.box_all_maps_around_model_and_shift_origin()
    new_mm = new_mmm.map_manager()

    assert not new_mm.wrapping()
    assert not new_mm.is_consistent_with_wrapping()

    # now box it with selection
    new_mmm_1 = map_model_manager(model=mmm.model().deep_copy(),
                                  map_manager=mm.deep_copy())
    new_mmm_1.box_all_maps_around_model_and_shift_origin(
        selection_string="resseq 221:221")
    new_mm_1 = new_mmm_1.map_manager()

    assert not new_mm_1.wrapping()
    assert not new_mm_1.is_consistent_with_wrapping()
    assert new_mm_1.map_data().all() == new_mm.map_data().all()

    # create map_model_manager with just half-maps
    mm1 = mm.deep_copy()
    mm2 = mm.deep_copy()
    map_data = mm2.map_data()
    map_data += 1.
    new_mmm = map_model_manager(model=mmm.model().deep_copy(),
                                map_manager_1=mm1,
                                map_manager_2=mm2)
    assert new_mmm._map_dict.get(
        'map_manager') is None  # should not be any yet
    assert approx_equal(new_mmm.map_manager().map_data()[232],
                        mm.deep_copy().map_data()[232] + 0.5)
    assert new_mmm._map_dict.get(
        'map_manager') is not None  # now should be there

    # generate map data from a model
    mm1 = mm.deep_copy()
    mm2 = mm.deep_copy()
    new_mmm = map_model_manager(model=mmm.model().deep_copy(), map_manager=mm1)
    mmm.generate_map(model=mmm.model())
    mm = mmm.map_manager()
    mmm.show_summary()

    # check get_map_model_manager function
    dm = DataManager(['model'])
    assert not hasattr(dm, 'get_map_model_manager')
    dm = DataManager(['real_map'])
    assert not hasattr(dm, 'get_map_model_manager')
    dm = DataManager(['sequence'])
    assert not hasattr(dm, 'get_map_model_manager')
    dm = DataManager(['model', 'real_map'])
    assert hasattr(dm, 'get_map_model_manager')

    # usage
    dm.get_map_model_manager(model_file=data_pdb, map_files=data_ccp4)
    dm.get_map_model_manager(model_file=data_pdb, map_files=[data_ccp4])
    dm.get_map_model_manager(model_file=data_pdb,
                             map_files=[data_ccp4, data_ccp4, data_ccp4])
    dm.get_map_model_manager(model_file=data_pdb,
                             map_files=data_ccp4,
                             ignore_symmetry_conflicts=True)

    # errors
    try:
        dm.get_map_model_manager(model_file=data_pdb,
                                 map_files=data_ccp4,
                                 from_phil=True)
    except Sorry as e:
        assert 'from_phil is set to True' in str(e)
    try:
        dm.get_map_model_manager(model_file=data_pdb,
                                 map_files=data_ccp4,
                                 abc=123)
    except TypeError as e:
        assert 'unexpected keyword argument' in str(e)
    try:
        dm.get_map_model_manager(model_file=data_pdb,
                                 map_files=[data_ccp4, data_ccp4])
    except Sorry as e:
        assert '1 full map and 2 half maps' in str(e)

    # PHIL
    class test_program(ProgramTemplate):
        master_phil_str = '''
include scope iotbx.map_model_manager.map_model_phil_str
'''

    working_phil_str = '''
  map_model {
    full_map = %s
    half_map = %s
    half_map = s.mrc
    model = %s
  }
''' % (data_ccp4, data_ccp4, data_pdb)

    master_phil = parse(test_program.master_phil_str, process_includes=True)
    working_phil = master_phil.fetch(parse(working_phil_str))
    tp = test_program(dm, working_phil.extract())

    try:
        dm.get_map_model_manager(from_phil=True)
    except Exception as e:
        assert 'ignore_symmetry_conflicts' in str(e)
    try:
        dm.get_map_model_manager(from_phil=True,
                                 ignore_symmetry_conflicts=True)
    except AssertionError:
        pass
def generate_model(file_name=None,
                   n_residues=None,
                   start_res=None,
                   b_iso=30,
                   box_cushion=5,
                   space_group_number=1,
                   output_model_file_name=None,
                   shake=None,
                   random_seed=None,
                   log=sys.stdout):
    '''
    generate_model: Simple utility for generating a model for testing purposes.

    This function typically accessed and tested through map_model_manager

     Summary
    -------

    Generate a model from a user-specified file or from some examples available
    in the cctbx.  Cut out specified number of residues, shift to place on
    positive side of origin, optionally set b values to b_iso,
    place in box with buffering of box_cushion on all
    edges, optionally randomly shift (shake) atomic positions by rms of shake A,
    and write out to output_model_file_name and return model object.

    Parameters:

      file_name (string, None):  File containing model (PDB, CIF format)
      n_residues (int, 10):      Number of residues to include
      start_res (int, None):     Starting residue number
      b_iso (float, 30):         B-value (ADP) to use for all atoms
      box_cushion (float, 5):     Buffer (A) around model
      space_group_number (int, 1):  Space group to use
      output_model_file_name (string, None):  File for output model
      shake (float, None):       RMS variation to add (A) in shake
      random_seed (int, None):    Random seed for shake

    Returns:
      model.manager object (model) in a box defined by a crystal_symmetry object

  '''

    # Get the parameters

    space_group_number = int(space_group_number)
    if n_residues is not None:
        n_residues = int(n_residues)
    box_cushion = float(box_cushion)
    if start_res:
        start_res = int(start_res)
    if shake:
        shake = float(shake)
    if random_seed:
        random_seed = int(random_seed)
        import random
        random.seed(random_seed)
        random_seed = random.randint(1, 714717)
        flex.set_random_seed(random_seed)

    # Choose file with coordinates

    if not file_name:
        if not n_residues:
            n_residues = 10  # default
        import libtbx.load_env
        iotbx_regression = os.path.join(
            libtbx.env.find_in_repositories("iotbx"), 'regression')
        if n_residues < 25:
            file_name = os.path.join(iotbx_regression, 'secondary_structure',
                                     '5a63_chainBp.pdb')  # starts at 219
            if not start_res: start_res = 219
        elif n_residues < 167:
            file_name = os.path.join(iotbx_regression, 'secondary_structure',
                                     '3jd6_noh.pdb')  # starts at 58
            if not start_res: start_res = 58
        else:
            file_name = os.path.join(iotbx_regression, 'secondary_structure',
                                     '4a7h_chainC.pdb')  # starts at 9
            if not start_res: start_res = 9
    else:  # have file_name
        if start_res is None:
            start_res = 1
        if not n_residues:
            n_residues = 100000  #  a big number

    # Read in coordinates and cut out the part of the model we want

    from iotbx.data_manager import DataManager

    dm = DataManager(['model'])
    dm.process_model_file(file_name)
    model = dm.get_model(file_name)

    selection = model.selection('resseq %s:%s' %
                                (start_res, start_res + n_residues - 1))
    model = model.select(selection)

    # shift the model and return it with new crystal_symmetry
    from cctbx.maptbx.box import shift_and_box_model
    model = shift_and_box_model(model=model, box_cushion=box_cushion)

    if b_iso is not None:
        b_values = flex.double(model.get_sites_cart().size(), b_iso)
        ph = model.get_hierarchy()
        ph.atoms().set_b(b_values)

    # Optionally shake model
    if shake:
        model = shake_model(model, shake=shake)

    if output_model_file_name:
        f = open(output_model_file_name, 'w')
        print("%s" % (model.model_as_pdb()), file=f)
        f.close()
        print("Writing model with %s residues and b_iso=%s from %s to %s" %
              (n_residues, b_iso, file_name, output_model_file_name),
              file=log)
    else:
        print("Generated model with %s residues and b_iso=%s from %s " %
              (n_residues, b_iso, file_name),
              file=log)
    return model
Esempio n. 5
0
def test_01():

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_ccp4 = os.path.join(data_dir, 'data', 'non_zero_origin_map.ccp4')
    data_pdb = os.path.join(data_dir, 'data', 'non_zero_origin_map.ccp4')

    dm = DataManager(['miller_array', 'real_map', 'phil'])
    dm.set_overwrite(True)
    dm.process_real_map_file(data_ccp4)

    # test writing and reading file
    mm = dm.get_real_map()
    mm.shift_origin()
    mm.show_summary()
    dm.write_real_map_file(mm,
                           filename='test_map_manager.ccp4',
                           overwrite=True)
    os.remove('test_map_manager.ccp4')

    # test writing and reading file without shifting origin
    dm = DataManager(['miller_array', 'real_map', 'phil'])
    dm.set_overwrite(True)
    dm.process_real_map_file(data_ccp4)
    mm = dm.get_real_map()
    mm.show_summary()
    dm.write_real_map_file(mm,
                           filename='test_map_manager.ccp4',
                           overwrite=True)

    new_mm = map_manager('test_map_manager.ccp4')
    assert (new_mm.is_similar(mm))
    new_mm.shift_origin()
    assert (not new_mm.is_similar(mm))

    # get map_data
    mm.shift_origin()
    map_data = mm.map_data()
    assert approx_equal(map_data[15, 10, 19], 0.38, eps=0.01)

    # get crystal_symmetry
    cs = mm.crystal_symmetry()
    assert approx_equal(cs.unit_cell().parameters()[0], 22.41, eps=0.01)

    # and full cell symmetry
    full_cs = mm.unit_cell_crystal_symmetry()
    assert approx_equal(full_cs.unit_cell().parameters()[0],
                        149.4066,
                        eps=0.01)

    # write map directly:
    mm.write_map('test_direct.ccp4')

    # read back directly
    new_mm = map_manager('test_direct.ccp4')
    assert (not new_mm.is_similar(mm))

    new_mm.shift_origin()
    assert mm.is_similar(new_mm)

    # deep_copy
    new_mm = mm.deep_copy()
    assert new_mm.is_similar(mm)

    # deep_copy a map without shifting origin
    dm = DataManager(['miller_array', 'real_map', 'phil'])
    dm.set_overwrite(True)
    dm.process_real_map_file(data_ccp4)
    omm = dm.get_real_map()
    omm.show_summary()
    new_omm = omm.deep_copy()
    assert new_omm.is_similar(omm)
    assert (not new_omm.is_similar(mm))

    # customized_copy
    new_mm = mm.customized_copy(map_data=mm.map_data().deep_copy())
    assert new_mm.is_similar(mm)

    # Initialize with parameters
    mm_para = map_manager(
        unit_cell_grid=mm.unit_cell_grid,
        unit_cell_crystal_symmetry=mm.unit_cell_crystal_symmetry(),
        origin_shift_grid_units=mm.origin_shift_grid_units,
        map_data=mm.map_data())
    assert mm_para.is_similar(mm)

    # Adjust origin and gridding:
    mm_read = map_manager(data_ccp4)
    mm_read.shift_origin()
    mm.show_summary()
    mm_read.show_summary()
    mm_read.set_original_origin_and_gridding((10, 10, 10),
                                             gridding=(100, 100, 100))
    mm_read.show_summary()
    assert (not mm_read.is_similar(mm))
    assert (mm_read.origin_is_zero())

    # Set program name
    mm_read.set_program_name('test program')
    assert mm_read.program_name == 'test program'

    # Set limitation
    mm_read.add_limitation('map_is_sharpened')
    assert mm_read.limitations == ['map_is_sharpened']

    # Add a label
    mm_read.add_label('TEST LABEL')
    assert mm_read.labels[0] == 'TEST LABEL'
    mm_read.write_map('map_with_labels.mrc')
    new_mm = map_manager('map_with_labels.mrc')
    assert 'TEST LABEL' in new_mm.labels
    assert new_mm.is_in_limitations('map_is_sharpened')
    assert new_mm.labels[0].find('test program') > -1

    # Read a map directly
    mm_read = map_manager(data_ccp4)
    mm_read.shift_origin()
    assert mm_read.is_similar(mm)

    # Set log
    import sys
    mm.set_log(sys.stdout)

    # Add map_data
    new_mm = mm_read.customized_copy(map_data=mm.map_data().deep_copy())
    assert new_mm.is_similar(mm)

    # replace data
    new_mm.set_map_data(map_data=mm.map_data().deep_copy())
    assert new_mm.is_similar(mm)

    # Apply a mask to edges of a map
    assert approx_equal(new_mm.map_data().as_1d().min_max_mean().max,
                        mm.map_data().as_1d().min_max_mean().max)
    assert approx_equal((new_mm.map_data()[0], mm.map_data()[0]), (0.0, 0.0))
    new_mm.create_mask_around_edges(soft_mask_radius=3)
    new_mm.soft_mask(soft_mask_radius=3)
    assert approx_equal(new_mm.map_data().as_1d().min_max_mean().max,
                        mm.map_data().as_1d().min_max_mean().max)
    new_mm.apply_mask(set_outside_to_mean_inside=True)
    assert approx_equal((new_mm.map_data()[0], mm.map_data()[0]),
                        (0.0116267086024, 0.0))

    dm.process_real_map_file('test_map_manager.ccp4')
    new_mm = dm.get_real_map('test_map_manager.ccp4')
    new_mm.show_summary()
    assert (not new_mm.is_similar(mm))
    new_mm.shift_origin()
    new_mm.show_summary()
    assert new_mm.is_similar(mm)
    os.remove('test_map_manager.ccp4')

    # Check origin_shifts
    print(new_mm.origin_shift_grid_units)
    print(new_mm.origin_shift_cart())
    assert approx_equal(new_mm.origin_shift_grid_units, (100, 100, 100))
    assert approx_equal(
        new_mm.origin_shift_cart(),
        (74.70333099365234, 72.30750274658205, 73.7437515258789))
    # Convert to map coeffs, write out, read back, convert back to map

    map_coeffs = mm.map_as_fourier_coefficients(high_resolution=3)
    mtz_dataset = map_coeffs.as_mtz_dataset(column_root_label='F')
    mtz_object = mtz_dataset.mtz_object()
    dm.write_miller_array_file(mtz_object, filename="map_coeffs.mtz")
    # Note these Fourier coeffs correspond to working map (not original position)

    array_labels = dm.get_miller_array_labels("map_coeffs.mtz")
    labels = array_labels[0]
    dm.get_reflection_file_server(filenames=["map_coeffs.mtz"],
                                  labels=[labels])
    miller_arrays = dm.get_miller_arrays()
    new_map_coeffs = miller_arrays[0]
    map_data_from_map_coeffs = mm.fourier_coefficients_as_map(
        map_coeffs=new_map_coeffs)

    mm_from_map_coeffs = mm.customized_copy(map_data=map_data_from_map_coeffs)
    assert mm_from_map_coeffs.is_similar(mm)
def test_model_datatype():
    import mmtbx.monomer_library.server
    try:
        mon_lib_srv = mmtbx.monomer_library.server.server()
    except mmtbx.monomer_library.server.MonomerLibraryServerError:
        print(
            "Can not initialize monomer_library, skipping test_model_datatype."
        )
        return

    # 1yjp
    model_str = '''
CRYST1   21.937    4.866   23.477  90.00 107.08  90.00 P 1 21 1      2
ORIGX1      1.000000  0.000000  0.000000        0.00000
ORIGX2      0.000000  1.000000  0.000000        0.00000
ORIGX3      0.000000  0.000000  1.000000        0.00000
SCALE1      0.045585  0.000000  0.014006        0.00000
SCALE2      0.000000  0.205508  0.000000        0.00000
SCALE3      0.000000  0.000000  0.044560        0.00000
ATOM      1  N   GLY A   1      -9.009   4.612   6.102  1.00 16.77           N
ATOM      2  CA  GLY A   1      -9.052   4.207   4.651  1.00 16.57           C
ATOM      3  C   GLY A   1      -8.015   3.140   4.419  1.00 16.16           C
ATOM      4  O   GLY A   1      -7.523   2.521   5.381  1.00 16.78           O
ATOM      5  N   ASN A   2      -7.656   2.923   3.155  1.00 15.02           N
ATOM      6  CA  ASN A   2      -6.522   2.038   2.831  1.00 14.10           C
ATOM      7  C   ASN A   2      -5.241   2.537   3.427  1.00 13.13           C
ATOM      8  O   ASN A   2      -4.978   3.742   3.426  1.00 11.91           O
ATOM      9  CB  ASN A   2      -6.346   1.881   1.341  1.00 15.38           C
ATOM     10  CG  ASN A   2      -7.584   1.342   0.692  1.00 14.08           C
ATOM     11  OD1 ASN A   2      -8.025   0.227   1.016  1.00 17.46           O
ATOM     12  ND2 ASN A   2      -8.204   2.155  -0.169  1.00 11.72           N
ATOM     13  N   ASN A   3      -4.438   1.590   3.905  1.00 12.26           N
ATOM     14  CA  ASN A   3      -3.193   1.904   4.589  1.00 11.74           C
ATOM     15  C   ASN A   3      -1.955   1.332   3.895  1.00 11.10           C
ATOM     16  O   ASN A   3      -1.872   0.119   3.648  1.00 10.42           O
ATOM     17  CB  ASN A   3      -3.259   1.378   6.042  1.00 12.15           C
ATOM     18  CG  ASN A   3      -2.006   1.739   6.861  1.00 12.82           C
ATOM     19  OD1 ASN A   3      -1.702   2.925   7.072  1.00 15.05           O
ATOM     20  ND2 ASN A   3      -1.271   0.715   7.306  1.00 13.48           N
ATOM     21  N   GLN A   4      -1.005   2.228   3.598  1.00 10.29           N
ATOM     22  CA  GLN A   4       0.384   1.888   3.199  1.00 10.53           C
ATOM     23  C   GLN A   4       1.435   2.606   4.088  1.00 10.24           C
ATOM     24  O   GLN A   4       1.547   3.843   4.115  1.00  8.86           O
ATOM     25  CB  GLN A   4       0.656   2.148   1.711  1.00  9.80           C
ATOM     26  CG  GLN A   4       1.944   1.458   1.213  1.00 10.25           C
ATOM     27  CD  GLN A   4       2.504   2.044  -0.089  1.00 12.43           C
ATOM     28  OE1 GLN A   4       2.744   3.268  -0.190  1.00 14.62           O
ATOM     29  NE2 GLN A   4       2.750   1.161  -1.091  1.00  9.05           N
ATOM     30  N   GLN A   5       2.154   1.821   4.871  1.00 10.38           N
ATOM     31  CA  GLN A   5       3.270   2.361   5.640  1.00 11.39           C
ATOM     32  C   GLN A   5       4.594   1.768   5.172  1.00 11.52           C
ATOM     33  O   GLN A   5       4.768   0.546   5.054  1.00 12.05           O
ATOM     34  CB  GLN A   5       3.056   2.183   7.147  1.00 11.96           C
ATOM     35  CG  GLN A   5       1.829   2.950   7.647  1.00 10.81           C
ATOM     36  CD  GLN A   5       1.344   2.414   8.954  1.00 13.10           C
ATOM     37  OE1 GLN A   5       0.774   1.325   9.002  1.00 10.65           O
ATOM     38  NE2 GLN A   5       1.549   3.187  10.039  1.00 12.30           N
ATOM     39  N   ASN A   6       5.514   2.664   4.856  1.00 11.99           N
ATOM     40  CA  ASN A   6       6.831   2.310   4.318  1.00 12.30           C
ATOM     41  C   ASN A   6       7.854   2.761   5.324  1.00 13.40           C
ATOM     42  O   ASN A   6       8.219   3.943   5.374  1.00 13.92           O
ATOM     43  CB  ASN A   6       7.065   3.016   2.993  1.00 12.13           C
ATOM     44  CG  ASN A   6       5.961   2.735   2.003  1.00 12.77           C
ATOM     45  OD1 ASN A   6       5.798   1.604   1.551  1.00 14.27           O
ATOM     46  ND2 ASN A   6       5.195   3.747   1.679  1.00 10.07           N
ATOM     47  N   TYR A   7       8.292   1.817   6.147  1.00 14.70           N
ATOM     48  CA  TYR A   7       9.159   2.144   7.299  1.00 15.18           C
ATOM     49  C   TYR A   7      10.603   2.331   6.885  1.00 15.91           C
ATOM     50  O   TYR A   7      11.041   1.811   5.855  1.00 15.76           O
ATOM     51  CB  TYR A   7       9.061   1.065   8.369  1.00 15.35           C
ATOM     52  CG  TYR A   7       7.665   0.929   8.902  1.00 14.45           C
ATOM     53  CD1 TYR A   7       6.771   0.021   8.327  1.00 15.68           C
ATOM     54  CD2 TYR A   7       7.210   1.756   9.920  1.00 14.80           C
ATOM     55  CE1 TYR A   7       5.480  -0.094   8.796  1.00 13.46           C
ATOM     56  CE2 TYR A   7       5.904   1.649  10.416  1.00 14.33           C
ATOM     57  CZ  TYR A   7       5.047   0.729   9.831  1.00 15.09           C
ATOM     58  OH  TYR A   7       3.766   0.589  10.291  1.00 14.39           O
ATOM     59  OXT TYR A   7      11.358   2.999   7.612  1.00 17.49           O
TER      60      TYR A   7
HETATM   61  O   HOH A   8      -6.471   5.227   7.124  1.00 22.62           O
HETATM   62  O   HOH A   9      10.431   1.858   3.216  1.00 19.71           O
HETATM   63  O   HOH A  10     -11.286   1.756  -1.468  1.00 17.08           O
HETATM   64  O   HOH A  11      11.808   4.179   9.970  1.00 23.99           O
HETATM   65  O   HOH A  12      13.605   1.327   9.198  1.00 26.17           O
HETATM   66  O   HOH A  13      -2.749   3.429  10.024  1.00 39.15           O
HETATM   67  O   HOH A  14      -1.500   0.682  10.967  1.00 43.49           O
MASTER      238    0    0    0    0    0    0    6   66    1    0    1
END
'''

    # test reading/writing PDB
    test_filename = 'test_model.pdb'
    test_eff = 'model.eff'
    dm = DataManager(['model'])
    dm.process_model_str(test_filename, model_str)
    dm.write_model_file(model_str, filename=test_filename, overwrite=True)
    assert (test_filename in dm.get_model_names())

    # test type
    assert (dm.get_model_type() == 'x_ray')
    dm.set_model_type(test_filename, 'neutron')
    assert (dm.get_model_type() == 'neutron')
    phil_scope = dm.export_phil_scope()
    extract = phil_scope.extract()
    assert (extract.data_manager.model[0].type == 'neutron')
    with open(test_eff, 'w') as f:
        f.write(phil_scope.as_str())
    new_phil_scope = iotbx.phil.parse(file_name=test_eff)
    new_dm = DataManager(['model'])
    new_dm.load_phil_scope(new_phil_scope)
    assert (new_dm.get_model_type(test_filename) == 'neutron')
    new_dm = DataManager(['model'])
    try:
        new_dm.set_default_model_type('nonsense')
    except Sorry:
        pass
    new_dm.set_default_model_type('electron')
    new_dm.process_model_file(test_filename)
    assert (new_dm.get_model_type() == 'electron')
    assert (len(new_dm.get_model_names()) == 1)
    assert (len(new_dm.get_model_names(model_type='electron')) == 1)
    assert (len(new_dm.get_model_names(model_type='neutron')) == 0)

    os.remove(test_eff)
    os.remove(test_filename)

    # test reading/writing CIF
    test_filename = 'test_model_datatype.cif'
    dm.write_model_file(dm.get_model().model_as_mmcif(),
                        filename=test_filename,
                        overwrite=True)
    dm.process_model_file(test_filename)
    os.remove(test_filename)
    assert (test_filename in dm.get_model_names())

    # test pdb_interpretation
    extract = mmtbx.model.manager.get_default_pdb_interpretation_params()
    extract.pdb_interpretation.use_neutron_distances = True
    dm.update_pdb_interpretation_for_model(test_filename, extract)
    assert (dm.get_model(test_filename).restraints_manager is None)
Esempio n. 7
0
def test_default_filenames():
  datatypes = ['model', 'ncs_spec', 'phil', 'real_map', 'restraint', 'sequence']
  extensions = ['cif', 'ncs_spec', 'eff', 'mrc', 'cif', 'seq']
  dm = DataManager(datatypes)
  for datatype, extension in zip(datatypes, extensions):
    filename = getattr(dm, 'get_default_output_{datatype}_filename'.
                       format(datatype=datatype))()
    assert filename == 'cctbx_program.' + extension

  filename = dm.get_default_output_model_filename(extension='.abc')
  assert filename == 'cctbx_program.abc'

  class TestProgram(ProgramTemplate):
    master_phil_str = """
output {
  serial = 0
    .type = int
}
"""
  master_phil = iotbx.phil.parse(TestProgram.master_phil_str)
  required_output_phil = iotbx.phil.parse(ProgramTemplate.output_phil_str)
  master_phil.adopt_scope(required_output_phil)
  working_phil = iotbx.phil.parse(ProgramTemplate.master_phil_str)
  params = master_phil.fetch(working_phil).extract()
  p = ProgramTemplate(dm, params, master_phil)
  assert dm.get_default_output_filename() == 'cctbx_program_000'
  dm.set_overwrite(True)
  dm.write_model_file('abc')    # cctbx_program_000.cif
  dm.write_phil_file('123')     # cctbx_program_000.eff
  dm.write_phil_file('456')     # cctbx_program_001.eff
  dm.write_model_file('def')    # cctbx_program_001.cif
  assert dm.get_default_output_filename() == 'cctbx_program_001'
  dm.write_sequence_file('ghi') # cctbx_program_001.seq
  dm.write_sequence_file('hkl') # cctbx_program_002.seq
  assert dm.get_default_output_filename() == 'cctbx_program_002'
  assert os.path.isfile('cctbx_program_000.cif')
  assert os.path.isfile('cctbx_program_001.cif')
  assert os.path.isfile('cctbx_program_000.eff')
  assert os.path.isfile('cctbx_program_001.eff')
  assert os.path.isfile('cctbx_program_001.seq')
  assert os.path.isfile('cctbx_program_002.seq')
  os.remove('cctbx_program_000.cif')
  os.remove('cctbx_program_001.cif')
  os.remove('cctbx_program_000.eff')
  os.remove('cctbx_program_001.eff')
  os.remove('cctbx_program_001.seq')
  os.remove('cctbx_program_002.seq')

  # test output.filename, output.file_name
  assert p.get_default_output_filename() == 'cctbx_program_002'
  assert p.get_default_output_filename(filename='abc') == 'abc'
  working_phil_str = 'output.filename=def'
  working_phil = iotbx.phil.parse(working_phil_str)
  params = master_phil.fetch(working_phil).extract()
  p = ProgramTemplate(dm, params, master_phil)
  assert params.output.filename == params.output.file_name == 'def'
  assert p.get_default_output_filename() == 'def'
  assert dm.get_default_output_filename() == 'def'
  working_phil_str = 'output.file_name=ghi'
  working_phil = iotbx.phil.parse(working_phil_str)
  params = master_phil.fetch(working_phil).extract()
  p = ProgramTemplate(dm, params, master_phil)
  assert params.output.filename == params.output.file_name == 'ghi'
  assert p.get_default_output_filename() == 'ghi'
  assert dm.get_default_output_filename() == 'ghi'
Esempio n. 8
0
def exercise(file_name, out = sys.stdout):

  # Set up source data
  if not os.path.isfile(file_name):
    raise Sorry("Missing the file: %s" %(file_name)+"\n")

  print ("Reading from %s" %(file_name))
  from iotbx.map_manager import map_manager

  m = map_manager(file_name)

  # make a little model
  sites_cart = flex.vec3_double( ((8, 10, 12), (14, 15, 16)))
  model = model_manager.from_sites_cart(
         atom_name = ' CA ',
         resname = 'ALA',
         chain_id = 'A',
         b_iso = 30.,
         occ = 1.,
         scatterer = 'C',
         sites_cart = sites_cart,
         crystal_symmetry = m.crystal_symmetry())

  # make a map_model_manager with lots of maps and model and ncs
  from iotbx.map_model_manager import map_model_manager

  from mmtbx.ncs.ncs import ncs
  ncs_object=ncs()
  ncs_object.set_unit_ncs()
  mask_mm=m.deep_copy()
  mask_mm.set_is_mask(True)
  mam = map_model_manager(
          map_manager =  m,
          ncs_object =  ncs_object,
          map_manager_1 =  m.deep_copy(),
          map_manager_2 =  m.deep_copy(),
          extra_map_manager_list =  [m.deep_copy(),m.deep_copy(),m.deep_copy()],
          extra_map_manager_id_list = ["extra_1","extra_2","map_manager_mask"],
          model     = model.deep_copy(),)
  print (mam.map_manager())
  print (mam.model())
  print (mam.map_manager_1())
  print (mam.map_manager_2())
  print (mam.map_manager_mask())
  print (mam.map_manager().ncs_object())
  all_map_names=mam.map_id_list()
  for id in all_map_names:
    print("Map_manager %s: %s " %(id,mam.get_map_manager_by_id(id)))

  dm = DataManager(['model','miller_array', 'real_map', 'phil','ncs_spec'])
  dm.set_overwrite(True)

  # Create a model with ncs
  from iotbx.regression.ncs.tst_ncs import pdb_str_5
  file_name='tst_mam.pdb'
  f=open(file_name,'w')
  print (pdb_str_5, file = f)
  f.close()

  # Generate map data from this model (it has ncs)
  mmm=map_model_manager()
  mmm.generate_map(box_cushion=0, file_name=file_name,n_residues=500)
  ncs_mam=mmm.deep_copy()
  ncs_mam_copy=mmm.deep_copy()

  # Make sure this model has 126 sites (42 sites times 3-fold ncs)
  assert ncs_mam.model().get_sites_cart().size() == 126
  assert approx_equal (ncs_mam.model().get_sites_cart()[0],
    (23.560999999999996, 8.159, 10.660000000000002))

  # Get just unique part (42 sites)
  unique_mam=ncs_mam.extract_all_maps_around_model(select_unique_by_ncs=True)
  assert unique_mam.model().get_sites_cart().size() == 42
  assert approx_equal (unique_mam.model().get_sites_cart()[0],
    (18.740916666666664, 13.1794, 16.10544))

  # Make sure that the extraction did not change the original but does change
  #   the extracted part
  assert (unique_mam.model().get_sites_cart()[0] !=
     ncs_mam.model().get_sites_cart()[0])  # it was a deep copy so original stays

  # Shift back the extracted part and make sure it matches the original now
  shifted_back_unique_model=mmm.get_model_from_other(unique_mam.deep_copy())
  assert approx_equal (shifted_back_unique_model.get_sites_cart()[0],
    (23.560999999999996, 8.158999999999997, 10.66))

  # Change the extracted model
  sites_cart=unique_mam.model().get_sites_cart()
  sites_cart[0]=(1,1,1)
  unique_mam.model().get_hierarchy().atoms().set_xyz(sites_cart)
  # Note; setting xyz in hierarchy does not set xrs by itself. do that now:
  unique_mam.model().set_sites_cart_from_hierarchy(multiply_ncs=False)

  # Make sure we really changed it
  assert approx_equal (unique_mam.model().get_sites_cart()[0], (1,1,1))

  # Now propagate all the changes in this unique part to entire original model
  #   using NCS
  ncs_mam.propagate_model_from_other(other = unique_mam,
    model_id = 'model',
    other_model_id = 'model')
  # ...and check that copy 1 and copy 2 both change
  assert approx_equal (ncs_mam.model().get_sites_cart()[0],
     (5.820083333333333, -4.020400000000001, -4.445440000000001))
  assert approx_equal (ncs_mam.model().get_sites_cart()[42],
     (38.41904613024224, 17.233251085893276, 2.5547442135142524))

  # Find ncs from map or model
  nn=ncs_mam_copy
  nn.write_map('ncs.ccp4')
  nn.write_model('ncs.pdb')
  ncs_object=nn.get_ncs_from_model()
  dm.write_ncs_spec_file(ncs_object,'ncs.ncs_spec')
  print ("NCS from map",ncs_object)
  nn.set_ncs_object(ncs_object)
  print ("NCS now: ",nn.ncs_object())
  nn.get_ncs_from_map(ncs_object=ncs_object)
  print ("ncs cc:",nn.ncs_cc())
  assert approx_equal(nn.ncs_cc(),0.961915979834,eps=0.01)

  # Make a deep_copy
  dc=mam.deep_copy()
  new_mam=mam.deep_copy()
  assert mam.map_manager().map_data()[0]==new_mam.map_manager().map_data()[0]

  # Make a customized_copy
  new_mam=mam.customized_copy(model_dict={'model':mam.model()})
  assert new_mam.model() is mam.model()
  assert not new_mam.map_dict() is mam.map_dict()

  new_mam=mam.customized_copy(model_dict={'model':mam.model()},
    map_dict=mam.map_dict())
  assert new_mam.model() is mam.model()
  assert new_mam.map_dict() is mam.map_dict()
  print (mam)

  # Add a map
  mam = dc.deep_copy()
  print (mam.map_id_list())
  assert len(mam.map_id_list()) == 6
  mam.add_map_manager_by_id(mam.map_manager().deep_copy(),'new_map_manager')
  print (mam.map_id_list())
  assert len(mam.map_id_list()) == 7

  # duplicate a map
  mam = dc.deep_copy()
  print (mam.map_id_list())
  assert len(mam.map_id_list()) == 6
  mam.duplicate_map_manager('map_manager','new_map_manager')
  print (mam.map_id_list())
  assert len(mam.map_id_list()) == 7

  # resolution_filter a map
  mam = dc.deep_copy()
  print (mam.map_id_list())
  mam.duplicate_map_manager('map_manager','new_map_manager')
  mam.resolution_filter(map_id='new_map_manager',d_min=3.5,d_max=6)

  # Add a model
  mam = dc.deep_copy()
  print (mam.model_id_list())
  assert len(mam.model_id_list()) == 1
  mam.add_model_by_id(mam.model().deep_copy(),'new_model')
  print (mam.model_id_list())
  assert len(mam.model_id_list()) == 2

  # Initialize a map
  mam1=new_mam.deep_copy()
  mam1.initialize_maps(map_value=6)
  assert mam1.map_manager().map_data()[225] == 6

  # Create mask around density and apply to all maps
  mam1=new_mam.deep_copy()
  mam1.mask_all_maps_around_density(solvent_content=0.5,
    soft_mask=False,)
  s = (mam1.get_map_manager_by_id('mask').map_data() > 0.5)
  assert approx_equal( (s.count(True),s.size()), (1024,2048))

  # Create soft mask around density and apply to all maps
  mam1=new_mam.deep_copy()
  mam1.mask_all_maps_around_density(solvent_content=0.5,
    soft_mask=True,)
  s = (mam1.get_map_manager_by_id('mask').map_data() > 0.5)

  # Create mask around edges and apply to all maps
  mam1=new_mam.deep_copy()
  mam1.write_map('before.ccp4')
  mam1.mask_all_maps_around_edges(soft_mask_radius=8)
  mam1.write_map('after.ccp4')
  mam1.write_map(map_id = 'mask',file_name='mask.ccp4')
  s = (mam1.get_map_manager_by_id('mask').map_data() > 0.5)
  assert approx_equal( (s.count(True),s.size()), (1496, 2048))

  # Create a  mask around atoms and apply to all maps
  new_mam.mask_all_maps_around_atoms(mask_atoms_atom_radius=8,
      soft_mask=False)
  s = (new_mam.get_map_manager_by_id('mask').map_data() > 0.5)
  assert approx_equal( (s.count(True),s.size()), (138,2048))

  # Create a soft mask around atoms and apply to all maps
  new_mam.mask_all_maps_around_atoms(mask_atoms_atom_radius=8,
      soft_mask=True)
  s = (new_mam.get_map_manager_by_id('mask').map_data() > 0.5)
  assert approx_equal( (s.count(True),s.size()), (1924,2048))

  # Create a soft mask around atoms and do not do anything with it
  new_mam.create_mask_around_atoms(mask_atoms_atom_radius=8,
      soft_mask=True)
  s = (new_mam.get_map_manager_by_id('mask').map_data() > 0.5)
  assert approx_equal( (s.count(True),s.size()), (1924,2048))

  # Create a soft mask around atoms; do not do anything with it, wrapping =true
  dummy_mam=new_mam.deep_copy()
  dummy_mam.map_manager().set_wrapping(True)
  dummy_mam.create_mask_around_atoms(mask_atoms_atom_radius=8,
      soft_mask=True)
  s = (dummy_mam.get_map_manager_by_id('mask').map_data() > 0.5)
  assert approx_equal( (s.count(True),s.size()), (1924,2048))

  # Mask around edges and do not do anything with it
  mam=dc.deep_copy()
  mam.create_mask_around_edges()
  s = (mam.get_map_manager_by_id('mask').map_data() > 0.5)
  mam.write_map(map_id='mask',file_name='edge.ccp4')
  assert approx_equal( (s.count(True),s.size()), (1792,2048))

  # Mask around density and to not do anything with it
  mam=dc.deep_copy()
  mam.create_mask_around_density(soft_mask=False)
  s = (mam.get_map_manager_by_id('mask').map_data() > 0.5)
  assert approx_equal( (s.count(True),s.size()), (856,2048))

  # Apply the current mask to one map
  mam.apply_mask_to_map('map_manager')
  s = (mam.map_manager().map_data() > 0.)
  assert approx_equal( (s.count(True),s.size()), (424,2048))
  s = (mam.map_manager().map_data() != 0.)
  assert approx_equal( (s.count(True),s.size()), (856,2048))
  assert approx_equal ((mam.map_manager().map_data()[225]),-0.0418027862906)

  # Apply any mask to one map
  mam.apply_mask_to_map('map_manager',mask_id='mask')
  s = (mam.map_manager().map_data() > 0.)
  assert approx_equal( (s.count(True),s.size()), (424,2048))
  s = (mam.map_manager().map_data() != 0.)
  assert approx_equal( (s.count(True),s.size()), (856,2048))
  assert approx_equal ((mam.map_manager().map_data()[225]),-0.0418027862906)

  # Apply the mask to all maps
  mam.apply_mask_to_maps()
  s = (mam.map_manager().map_data() > 0.)
  assert approx_equal( (s.count(True),s.size()), (424,2048))
  s = (mam.map_manager().map_data() != 0.)
  assert approx_equal( (s.count(True),s.size()), (856,2048))
  assert approx_equal ((mam.map_manager().map_data()[225]),-0.0418027862906)

  # Apply the mask to all maps, setting outside value to mean inside
  mam.apply_mask_to_maps(set_outside_to_mean_inside=True)
  s = (mam.map_manager().map_data() > 0.)
  assert approx_equal( (s.count(True),s.size()), (424,2048))
  s = (mam.map_manager().map_data() != 0.)
  assert approx_equal( (s.count(True),s.size()), (2048,2048))
  assert approx_equal ((mam.map_manager().map_data()[2047]),-0.0759598612785)
  s = (mam.get_map_manager_by_id('mask').map_data() >  0).as_1d()
  inside = mam.map_manager().map_data().as_1d().select(s)
  outside = mam.map_manager().map_data().as_1d().select(~s)
  assert approx_equal ((inside.min_max_mean().max,outside.min_max_mean().max),
   (0.317014873028,-0.0159585822888))


  # Make a new map and model, get mam and box with selection
  mmm=map_model_manager()
  mmm.generate_map(box_cushion=0,wrapping=True)
  mam=mmm
  mam_dc=mam.deep_copy()

  new_mm_1=mam.map_manager()
  assert approx_equal( (mmm.map_data().all(),new_mm_1.map_data().all()),
     ((18, 25, 20),(18, 25, 20)))

  # Get local fsc or randomized map
  dc=mam_dc.deep_copy()
  dc.map_manager().set_wrapping(False)
  map_coeffs = dc.map_manager().map_as_fourier_coefficients(d_min=3)
  from cctbx.development.create_models_or_maps import generate_map
  new_mm_1 = generate_map(map_coeffs=map_coeffs,
    d_min=3,
    low_resolution_real_space_noise_fraction=1,
    high_resolution_real_space_noise_fraction=50,
    map_manager=dc.map_manager(),
    random_seed=124321)
  new_mm_2 = generate_map(map_coeffs=map_coeffs,
    d_min=3,
    low_resolution_real_space_noise_fraction=1,
    high_resolution_real_space_noise_fraction=50,
    map_manager=dc.map_manager(),
    random_seed=734119)
  dc.add_map_manager_by_id(new_mm_1,'map_manager_1')
  dc.add_map_manager_by_id(new_mm_2,'map_manager_2')
  cc=dc.map_map_cc()
  fsc_curve=dc.map_map_fsc()
  dc.set_log(sys.stdout)
  dc.local_fsc(n_boxes = 1)

  # Get map-map FSC
  dc=mam_dc.deep_copy()
  dc.duplicate_map_manager(map_id='map_manager',new_map_id='filtered')
  dc.resolution_filter(d_min=3.5, d_max=10, map_id='filtered')
  dc.create_mask_around_atoms()
  fsc_curve=dc.map_map_fsc(
      map_id_1='map_manager',map_id_2='filtered',mask_id='mask',
      resolution=3.5,fsc_cutoff = 0.97)
  assert approx_equal(fsc_curve.d_min, 3.93793648601,eps=0.01)
  assert approx_equal (fsc_curve.fsc.fsc[-1],0.707536576779)

  # Get map-map CC
  dc=mam_dc.deep_copy()
  dc.duplicate_map_manager(map_id='map_manager',new_map_id='filtered')
  dc.resolution_filter(d_min=3.5, d_max=6, map_id='filtered')
  cc=dc.map_map_cc('map_manager','filtered')
  assert approx_equal(cc , 0.676687646486)

  # Get map-map CC with mask
  dc=mam_dc.deep_copy()
  dc.duplicate_map_manager(map_id='map_manager',new_map_id='filtered')
  dc.create_mask_around_density(mask_id='filtered')
  cc=dc.map_map_cc('map_manager','filtered',mask_id='mask')
  assert approx_equal(cc , 0.443401641784)

  # box around model
  mam=mam_dc.deep_copy()
  mam.box_all_maps_around_model_and_shift_origin(
      selection_string="resseq 221:221")
  new_mm_1=mam.map_manager()
  assert approx_equal( (mam_dc.map_data().all(),new_mm_1.map_data().all()),
    ((18, 25, 20),(24, 20, 20)))

  # box around model and add soft mask to edges
  mam=mam_dc.deep_copy()
  mam.box_all_maps_around_model_and_shift_origin(
      selection_string="resseq 221:221",
      soft_mask_around_edges = True)
  new_mm_2=mam.map_manager()
  assert approx_equal( (mam_dc.map_data().all(),new_mm_2.map_data().all()),
    ((18, 25, 20),(41,36,38)))

  # extract_around_model (get new mam)
  new_mam_dc=mam_dc.extract_all_maps_around_model(
      selection_string="resseq 221:221")
  new_mm_1a=new_mam_dc.map_manager()
  assert approx_equal( (mam_dc.map_data().all(),new_mm_1a.map_data().all()),
    ((18, 25, 20),(24, 20, 20)))
  assert approx_equal(new_mm_1.map_data(),new_mm_1a.map_data())

  # extract_around_model (get new mam) and soft_mask_around_edges
  new_mam_dc=mam_dc.extract_all_maps_around_model(
      selection_string="resseq 221:221", soft_mask_around_edges = True)
  new_mm_2a=new_mam_dc.map_manager()
  assert approx_equal( (mam_dc.map_data().all(),new_mm_2a.map_data().all()),
    ((18, 25, 20),(41,36,38)))
  assert approx_equal(new_mm_2.map_data(),new_mm_2a.map_data())

  # box around_density
  mam2=mam_dc.deep_copy()
  mam2.box_all_maps_around_density_and_shift_origin(box_cushion=0)
  new_mm_2=mam2.map_manager()
  assert approx_equal( (mam_dc.map_data().all(),new_mm_2.map_data().all()),
    ((18, 25, 20),(16, 23, 18)))

  # extract_around_density (get new mam)
  mam2=mam_dc.deep_copy()
  mam2_b=mam2.extract_all_maps_around_density(box_cushion=0)
  new_mm_2=mam2_b.map_manager()
  assert approx_equal( (mam_dc.map_data().all(),new_mm_2.map_data().all()),
    ((18, 25, 20),(16, 23, 18)))

  # Repeat as map_model_manager:
  mmm=mam_dc.as_map_model_manager().deep_copy()
  mmm.box_all_maps_around_model_and_shift_origin(
      selection_string="resseq 221:221")
  new_mm_1a=mmm.map_manager()
  assert approx_equal( (mam_dc.map_data().all(),new_mm_1a.map_data().all()),
    ((18, 25, 20),(24, 20, 20)))
  assert approx_equal(new_mm_1.map_data(),new_mm_1a.map_data())

  # box around density
  mam = mam_dc.deep_copy()
  mam.box_all_maps_around_density_and_shift_origin(box_cushion=0,soft_mask_around_edges=False)
  new_mm_1=mam.map_manager()
  assert approx_equal( (mam_dc.map_data().all(),new_mm_1.map_data().all()),
    ((18,25 , 20),(16, 23, 18)))

  # box around density and soft mask edges
  mam = mam_dc.deep_copy()
  mam.box_all_maps_around_density_and_shift_origin(box_cushion=0,
   soft_mask_around_edges=True)
  new_mm_1=mam.map_manager()
  assert approx_equal( (mam_dc.map_data().all(),new_mm_1.map_data().all()),
    ((18, 25 , 20),(18, 25, 20)))

  # extract around density (get new mam)
  mam1=mam_dc.deep_copy()
  mam1.extract_all_maps_around_density(box_cushion=0)
  new_mm_1=mam1.map_manager()
  assert approx_equal( (mmm.map_data().all(),new_mm_1.map_data().all()),
    ((24, 20, 20),(18, 25, 20)))

  # create mask around density, then box around mask (i.e., box around density)
  mam.create_mask_around_density(soft_mask=False)
  mam.box_all_maps_around_mask_and_shift_origin(box_cushion=3)
  new_mm_1=mam.map_manager()
  assert approx_equal( (mmm.map_data().all(),new_mm_1.map_data().all()),
    ((24, 20, 20),(18, 25, 20)))

  # box with bounds
  mam.box_all_maps_with_bounds_and_shift_origin(lower_bounds=(10,10,10),
     upper_bounds=(15,15,15))
  new_mm_1=mam.map_manager()
  assert approx_equal( (mmm.map_data().all(),new_mm_1.map_data().all()),
    ((24, 20, 20),(6, 6, 6)))

  # extract with bounds
  mam=mam_dc.deep_copy()
  mam_1=mam.extract_all_maps_with_bounds(lower_bounds=(10,10,10),
     upper_bounds=(15,15,15))
  new_mm_1=mam_1.map_manager()
  assert approx_equal( (mmm.map_data().all(),new_mm_1.map_data().all()),
    ((24, 20, 20),(6, 6, 6)))

  # box with unique
  mam=mam_dc.deep_copy()
  mam.box_all_maps_around_unique_and_shift_origin(
      molecular_mass=2500,resolution=3)
  new_mm_1=mam.map_manager()
  assert approx_equal( (mmm.map_data().all(),new_mm_1.map_data().all()),
    ((24, 20, 20),(18, 25, 20)))

  # extract with unique
  mam=mam_dc.deep_copy()
  mam_1=mam.extract_all_maps_around_unique(
      molecular_mass=2500,resolution=3)
  new_mm_1=mam_1.map_manager()
  assert approx_equal( (mmm.map_data().all(),new_mm_1.map_data().all()),
    ((24,20, 20),(18, 25, 20)))

  # extract a box and then restore model into same reference as current mam
  mam=mam_dc.deep_copy()
  mam.box_all_maps_with_bounds_and_shift_origin(lower_bounds=(2,2,2),
     upper_bounds=(17,17,17))
  print("mam:",mam.model().get_sites_cart()[0],mam.map_manager().origin_is_zero())
  # extract a box
  box_mam=mam.extract_all_maps_with_bounds(lower_bounds=(10,10,10),
     upper_bounds=(15,15,15))
  box_model=box_mam.model()
  matched_box_model=mam.get_model_from_other(box_mam)
  assert approx_equal(matched_box_model.get_sites_cart()[0],mam.model().get_sites_cart()[0])

  # Convert a map to fourier coefficients
  mam=mam_dc.deep_copy()
  ma=mam.map_as_fourier_coefficients(d_min=3)
  assert approx_equal(ma.d_min(),3.01655042414)


  mam.add_map_from_fourier_coefficients(ma, map_id='new_map_manager')
  cc=flex.linear_correlation(
   mam.get_map_manager_by_id('map_manager').map_data().as_1d(),
   mam.get_map_manager_by_id('new_map_manager').map_data().as_1d()).coefficient()
  assert (cc >= 0.99)

  # Get map-model CC
  dc=mam_dc.extract_all_maps_around_model(
      selection_string="(name ca or name cb or name c or name o) "+
        "and resseq 221:221", box_cushion=0)
  cc=dc.map_model_cc(resolution=3)
  assert approx_equal (cc, 0.817089390421)

  # Get map-model density
  dc=mam_dc.extract_all_maps_around_model(
      selection_string="(name ca or name cb or name c or name o) "+
        "and resseq 221:221", box_cushion=0)
  density=dc.density_at_model_sites(selection_string = 'name ca')
  assert approx_equal (density.min_max_mean().mean, 0.841152333991)


  # Remove model outside map
  dc.remove_model_outside_map(boundary=0)
  assert (mam_dc.model().get_sites_cart().size(),
     dc.model().get_sites_cart().size()) == (86, 4)

  # shift a model to match the map
  dc=mam_dc.extract_all_maps_around_model(
      selection_string="(name ca or name cb or name c or name o) "+
        "and resseq 221:221", box_cushion=0)
  actual_model=dc.model().deep_copy()
  working_model=dc.model().deep_copy()
  working_model.set_shift_cart((0,0,0))
  working_model.set_sites_cart(working_model.get_sites_cart()-actual_model.shift_cart())
  dc.shift_any_model_to_match(working_model)
  assert approx_equal (actual_model.get_sites_cart()[0],working_model.get_sites_cart()[0])
Esempio n. 9
0
def run(args, out=None, verbose=True, plots_dir=None):
    t0 = time.time()
    if (out is None): out = sys.stdout
    import iotbx.phil
    cmdline = iotbx.phil.process_command_line_with_files(
        args=args,
        master_phil=master_phil,
        pdb_file_def="model",
        reflection_file_def="map_coeffs",
        map_file_def="map_file",
        usage_string="""\
phenix.emringer model.pdb map.mrc [cif_file ...] [options]

%s
""" % __doc__)
    params = cmdline.work.extract()
    validate_params(params)
    from iotbx.data_manager import DataManager
    dm = DataManager()
    model = dm.get_model(params.model)
    crystal_symmetry_model = model.crystal_symmetry()
    hierarchy = model.get_hierarchy()
    map_coeffs = map_inp = None
    map_data, unit_cell = None, None
    if (params.map_coeffs is not None):
        mtz_in = cmdline.get_file(params.map_coeffs)
        mtz_in.check_file_type("hkl")
        best_guess = None
        best_labels = []
        all_labels = []
        for array in mtz_in.file_server.miller_arrays:
            if (array.info().label_string() == params.map_label):
                map_coeffs = array
                break
            elif (params.map_label is None):
                if (array.is_complex_array()):
                    labels = array.info().label_string()
                    all_labels.append(labels)
                    if (labels.startswith("2FOFCWT")
                            or labels.startswith("2mFoDFc")
                            or labels.startswith("FWT")):
                        best_guess = array
                        best_labels.append(labels)
        if (map_coeffs is None):
            if (len(all_labels) == 0):
                raise Sorry(
                    "No valid (pre-weighted) map coefficients found in file.")
            elif (best_guess is None):
                raise Sorry(
                    "Couldn't automatically determine appropriate map labels. "
                    + "Choices:\n  %s" % "  \n".join(all_labels))
            elif (len(best_labels) > 1):
                raise Sorry(
                    "Multiple appropriate map coefficients found in file. " +
                    "Choices:\n  %s" % "\n  ".join(best_labels))
            map_coeffs = best_guess
            print("  Guessing %s for input map coefficients" % best_labels[0],
                  file=out)
    else:
        ccp4_map_in = cmdline.get_file(params.map_file)
        ccp4_map_in.check_file_type("ccp4_map")
        map_inp = ccp4_map_in.file_object
        base = map_model_manager(
            map_manager=map_inp,
            model=model,
            wrapping=params.wrapping,
            ignore_symmetry_conflicts=params.ignore_symmetry_conflicts)
        hierarchy = base.model().get_hierarchy()
        map_data = base.map_data()
        unit_cell = map_inp.grid_unit_cell()

    hierarchy.atoms().reset_i_seq()
    make_header("Iterating over residues", out=out)
    t1 = time.time()
    from mmtbx.ringer import iterate_over_residues
    results = iterate_over_residues(pdb_hierarchy=hierarchy,
                                    map_coeffs=map_coeffs,
                                    map_data=map_data,
                                    unit_cell=unit_cell,
                                    params=params,
                                    log=out).results
    t2 = time.time()
    if (verbose):
        print("Time excluding I/O: %8.1fs" % (t2 - t1), file=out)
        print("Overall runtime:    %8.1fs" % (t2 - t0), file=out)
    if (params.output_base is None):
        pdb_base = os.path.basename(params.model)
        params.output_base = os.path.splitext(pdb_base)[0] + "_emringer"
    easy_pickle.dump("%s.pkl" % params.output_base, results)
    print("Wrote %s.pkl" % params.output_base, file=out)
    csv = "\n".join([r.format_csv() for r in results])
    open("%s.csv" % params.output_base, "w").write(csv)
    print("Wrote %s.csv" % params.output_base, file=out)
    if (plots_dir is None):
        plots_dir = params.output_base + "_plots"
    if (not os.path.isdir(plots_dir)):
        os.makedirs(plots_dir)
    from mmtbx.ringer import em_rolling
    from mmtbx.ringer import em_scoring
    import matplotlib
    matplotlib.use("Agg")
    make_header("Scoring results", out=out)
    scoring = em_scoring.main(file_name=params.output_base,
                              ringer_result=results,
                              out_dir=plots_dir,
                              sampling_angle=params.sampling_angle,
                              quiet=False,
                              out=out)
    make_header("Inspecting chains", out=out)
    rolling_window_threshold = params.rolling_window_threshold
    rolling = em_rolling.main(
        ringer_results=results,
        dir_name=plots_dir,
        threshold=rolling_window_threshold,  #scoring.optimal_threshold,
        graph=False,
        save=True,
        out=out)
    scoring.show_summary(out=out)
    print("\nReferences:", file=out)

    references = """\
  Barad BA, Echols N, Wang RYR, Cheng YC, DiMaio F, Adams PD, Fraser JS. (2015)
  Side-chain-directed model and map validation for 3D Electron Cryomicroscopy.
  Nature Methods, in press.

  Lang PT, Ng HL, Fraser JS, Corn JE, Echols N, Sales M, Holton JM, Alber T.
  Automated electron-density sampling reveals widespread conformational
  polymorphism in proteins. Protein Sci. 2010 Jul;19(7):1420-31. PubMed PMID:
  20499387"""
    print(references, file=out)
    if (params.show_gui):
        run_app(results)
    else:
        return (results, scoring, rolling)
Esempio n. 10
0
def test_model_datatype():

    # 1yjp
    model_str = '''
CRYST1   21.937    4.866   23.477  90.00 107.08  90.00 P 1 21 1      2
ORIGX1      1.000000  0.000000  0.000000        0.00000
ORIGX2      0.000000  1.000000  0.000000        0.00000
ORIGX3      0.000000  0.000000  1.000000        0.00000
SCALE1      0.045585  0.000000  0.014006        0.00000
SCALE2      0.000000  0.205508  0.000000        0.00000
SCALE3      0.000000  0.000000  0.044560        0.00000
ATOM      1  N   GLY A   1      -9.009   4.612   6.102  1.00 16.77           N
ATOM      2  CA  GLY A   1      -9.052   4.207   4.651  1.00 16.57           C
ATOM      3  C   GLY A   1      -8.015   3.140   4.419  1.00 16.16           C
ATOM      4  O   GLY A   1      -7.523   2.521   5.381  1.00 16.78           O
ATOM      5  N   ASN A   2      -7.656   2.923   3.155  1.00 15.02           N
ATOM      6  CA  ASN A   2      -6.522   2.038   2.831  1.00 14.10           C
ATOM      7  C   ASN A   2      -5.241   2.537   3.427  1.00 13.13           C
ATOM      8  O   ASN A   2      -4.978   3.742   3.426  1.00 11.91           O
ATOM      9  CB  ASN A   2      -6.346   1.881   1.341  1.00 15.38           C
ATOM     10  CG  ASN A   2      -7.584   1.342   0.692  1.00 14.08           C
ATOM     11  OD1 ASN A   2      -8.025   0.227   1.016  1.00 17.46           O
ATOM     12  ND2 ASN A   2      -8.204   2.155  -0.169  1.00 11.72           N
ATOM     13  N   ASN A   3      -4.438   1.590   3.905  1.00 12.26           N
ATOM     14  CA  ASN A   3      -3.193   1.904   4.589  1.00 11.74           C
ATOM     15  C   ASN A   3      -1.955   1.332   3.895  1.00 11.10           C
ATOM     16  O   ASN A   3      -1.872   0.119   3.648  1.00 10.42           O
ATOM     17  CB  ASN A   3      -3.259   1.378   6.042  1.00 12.15           C
ATOM     18  CG  ASN A   3      -2.006   1.739   6.861  1.00 12.82           C
ATOM     19  OD1 ASN A   3      -1.702   2.925   7.072  1.00 15.05           O
ATOM     20  ND2 ASN A   3      -1.271   0.715   7.306  1.00 13.48           N
ATOM     21  N   GLN A   4      -1.005   2.228   3.598  1.00 10.29           N
ATOM     22  CA  GLN A   4       0.384   1.888   3.199  1.00 10.53           C
ATOM     23  C   GLN A   4       1.435   2.606   4.088  1.00 10.24           C
ATOM     24  O   GLN A   4       1.547   3.843   4.115  1.00  8.86           O
ATOM     25  CB  GLN A   4       0.656   2.148   1.711  1.00  9.80           C
ATOM     26  CG  GLN A   4       1.944   1.458   1.213  1.00 10.25           C
ATOM     27  CD  GLN A   4       2.504   2.044  -0.089  1.00 12.43           C
ATOM     28  OE1 GLN A   4       2.744   3.268  -0.190  1.00 14.62           O
ATOM     29  NE2 GLN A   4       2.750   1.161  -1.091  1.00  9.05           N
ATOM     30  N   GLN A   5       2.154   1.821   4.871  1.00 10.38           N
ATOM     31  CA  GLN A   5       3.270   2.361   5.640  1.00 11.39           C
ATOM     32  C   GLN A   5       4.594   1.768   5.172  1.00 11.52           C
ATOM     33  O   GLN A   5       4.768   0.546   5.054  1.00 12.05           O
ATOM     34  CB  GLN A   5       3.056   2.183   7.147  1.00 11.96           C
ATOM     35  CG  GLN A   5       1.829   2.950   7.647  1.00 10.81           C
ATOM     36  CD  GLN A   5       1.344   2.414   8.954  1.00 13.10           C
ATOM     37  OE1 GLN A   5       0.774   1.325   9.002  1.00 10.65           O
ATOM     38  NE2 GLN A   5       1.549   3.187  10.039  1.00 12.30           N
ATOM     39  N   ASN A   6       5.514   2.664   4.856  1.00 11.99           N
ATOM     40  CA  ASN A   6       6.831   2.310   4.318  1.00 12.30           C
ATOM     41  C   ASN A   6       7.854   2.761   5.324  1.00 13.40           C
ATOM     42  O   ASN A   6       8.219   3.943   5.374  1.00 13.92           O
ATOM     43  CB  ASN A   6       7.065   3.016   2.993  1.00 12.13           C
ATOM     44  CG  ASN A   6       5.961   2.735   2.003  1.00 12.77           C
ATOM     45  OD1 ASN A   6       5.798   1.604   1.551  1.00 14.27           O
ATOM     46  ND2 ASN A   6       5.195   3.747   1.679  1.00 10.07           N
ATOM     47  N   TYR A   7       8.292   1.817   6.147  1.00 14.70           N
ATOM     48  CA  TYR A   7       9.159   2.144   7.299  1.00 15.18           C
ATOM     49  C   TYR A   7      10.603   2.331   6.885  1.00 15.91           C
ATOM     50  O   TYR A   7      11.041   1.811   5.855  1.00 15.76           O
ATOM     51  CB  TYR A   7       9.061   1.065   8.369  1.00 15.35           C
ATOM     52  CG  TYR A   7       7.665   0.929   8.902  1.00 14.45           C
ATOM     53  CD1 TYR A   7       6.771   0.021   8.327  1.00 15.68           C
ATOM     54  CD2 TYR A   7       7.210   1.756   9.920  1.00 14.80           C
ATOM     55  CE1 TYR A   7       5.480  -0.094   8.796  1.00 13.46           C
ATOM     56  CE2 TYR A   7       5.904   1.649  10.416  1.00 14.33           C
ATOM     57  CZ  TYR A   7       5.047   0.729   9.831  1.00 15.09           C
ATOM     58  OH  TYR A   7       3.766   0.589  10.291  1.00 14.39           O
ATOM     59  OXT TYR A   7      11.358   2.999   7.612  1.00 17.49           O
TER      60      TYR A   7
HETATM   61  O   HOH A   8      -6.471   5.227   7.124  1.00 22.62           O
HETATM   62  O   HOH A   9      10.431   1.858   3.216  1.00 19.71           O
HETATM   63  O   HOH A  10     -11.286   1.756  -1.468  1.00 17.08           O
HETATM   64  O   HOH A  11      11.808   4.179   9.970  1.00 23.99           O
HETATM   65  O   HOH A  12      13.605   1.327   9.198  1.00 26.17           O
HETATM   66  O   HOH A  13      -2.749   3.429  10.024  1.00 39.15           O
HETATM   67  O   HOH A  14      -1.500   0.682  10.967  1.00 43.49           O
MASTER      238    0    0    0    0    0    0    6   66    1    0    1
END
'''

    dm = DataManager(['model'])
    dm.process_model_str('test', model_str)
    assert ('test' in dm.get_model_names())

    test_filename = 'test_model_datatype.cif'
    dm.write_model_file(test_filename,
                        dm.get_model().model_as_mmcif(),
                        overwrite=True)
    dm.process_model_file(test_filename)
    os.remove(test_filename)
    assert (test_filename in dm.get_model_names())

    extract = mmtbx.model.manager.get_default_pdb_interpretation_params()
    extract.pdb_interpretation.use_neutron_distances = True
    dm.update_pdb_interpretation_for_model('test', extract)
    assert (dm.get_model().restraints_manager is None)
Esempio n. 11
0
def test_miller_array_datatype():

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_mtz = os.path.join(data_dir, 'data',
                            'insulin_unmerged_cutted_from_ccp4.mtz')

    dm = DataManager(['miller_array', 'phil'])
    dm.process_miller_array_file(data_mtz)

    # test labels
    labels = [
        'M_ISYM', 'BATCH', 'I,SIGI,merged', 'IPR,SIGIPR,merged',
        'FRACTIONCALC', 'XDET', 'YDET', 'ROT', 'WIDTH', 'LP', 'MPART', 'FLAG',
        'BGPKRATIOS'
    ]
    for label in dm.get_miller_array_labels():
        assert (label in labels)

    assert (len(dm.get_miller_arrays()) == len(dm.get_miller_array_labels()))

    # test access by label
    label = dm.get_miller_array_labels()[3]
    new_label = dm.get_miller_arrays(labels=[label])[0].info().label_string()
    assert (label == new_label)

    # test custom PHIL
    dm.write_phil_file('test.phil',
                       dm.export_phil_scope().as_str(),
                       overwrite=True)
    loaded_phil = libtbx.phil.parse(file_name='test.phil')
    new_dm = DataManager(['miller_array', 'phil'])
    new_dm.load_phil_scope(loaded_phil)
    assert (data_mtz == new_dm.get_default_miller_array_name())
    for label in new_dm.get_miller_array_labels():
        assert (label in labels)

    os.remove('test.phil')

    # test file server
    fs1 = dm.get_reflection_file_server()
    fs2 = dm.get_reflection_file_server([data_mtz, data_mtz])
    assert (2 * len(fs1.miller_arrays) == len(fs2.miller_arrays))
    cs = crystal.symmetry(
        unit_cell=dm.get_miller_arrays()[0].crystal_symmetry().unit_cell(),
        space_group_symbol='P1')
    fs = dm.get_reflection_file_server(crystal_symmetry=cs)
    assert (fs.crystal_symmetry.is_similar_symmetry(cs))
    assert (not fs.crystal_symmetry.is_similar_symmetry(
        dm.get_miller_arrays()[0].crystal_symmetry()))
    fs = dm.get_reflection_file_server(labels=['I,SIGI,merged'])
    assert (len(fs.get_miller_arrays(None)) == 1)
    miller_array = fs.get_amplitudes(None, None, True, None, None)
    assert (
        miller_array.info().label_string() == 'I,as_amplitude_array,merged')

    master_phil_str = '''
include scope iotbx.data_manager.miller_array.miller_array_phil_str
  '''
    master_phil = libtbx.phil.parse(master_phil_str, process_includes=True)
    master_extract = master_phil.extract()
    master_extract.data[0].file_name = data_mtz
    master_extract.data[0].labels = 'IPR,SIGIPR,merged'
    fs = get_reflection_file_server(dm, master_extract)
    assert (len(fs.get_miller_arrays(None)) == 1)
    master_extract.data[0].type = 'neutron'
    fs = get_reflection_file_server(dm, master_extract)
    assert (fs is None)
    fs = get_reflection_file_server(dm, master_extract, datatype='neutron')
    assert (len(fs.get_miller_arrays(None)) == 1)
Esempio n. 12
0
def test_01():

    # Source data

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_ccp4 = os.path.join(data_dir, 'data', 'non_zero_origin_map.ccp4')
    data_pdb = os.path.join(data_dir, 'data', 'non_zero_origin_model.pdb')
    data_ncs_spec = os.path.join(data_dir, 'data',
                                 'non_zero_origin_ncs_spec.ncs_spec')

    # DataManager

    dm = DataManager(['ncs_spec', 'model', 'real_map', 'phil'])
    dm.set_overwrite(True)

    # Read in map and model and ncs

    map_file = data_ccp4
    dm.process_real_map_file(map_file)
    mm = dm.get_real_map(map_file)

    model_file = data_pdb
    dm.process_model_file(model_file)
    model = dm.get_model(model_file)

    ncs_file = data_ncs_spec
    dm.process_ncs_spec_file(ncs_file)
    ncs = dm.get_ncs_spec(ncs_file)

    ncs_dc = ncs.deep_copy()

    mmmn = match_map_model_ncs()
    mmmn.add_map_manager(mm)
    mmmn.add_model(model)
    mmmn.add_ncs_object(ncs)

    # Save it
    mmmn_dc = mmmn.deep_copy()

    # Test creating mmm from model:
    mmm_from_model = model.as_map_model_manager(create_model_map=False)
    mmm_from_model = model.as_map_model_manager(create_model_map=True,
                                                resolution=5)
    assert mmm_from_model.map_manager() is not None

    # Make sure we can add an ncs object that is either shifted or not
    mmmn_dcdc = mmmn.deep_copy()
    new_mmmn = match_map_model_ncs()
    new_mmmn.add_map_manager(mmmn_dcdc.map_manager())
    new_mmmn.add_model(mmmn_dcdc.model())
    new_mmmn.add_ncs_object(mmmn_dcdc.ncs_object())
    assert new_mmmn.ncs_object().shift_cart() == new_mmmn.map_manager(
    ).shift_cart()

    mmmn_dcdc = mmmn.deep_copy()
    new_mmmn = match_map_model_ncs()
    new_mmmn.add_map_manager(mmmn_dcdc.map_manager())
    new_mmmn.add_model(mmmn_dcdc.model())
    new_mmmn.add_ncs_object(ncs_dc)
    assert new_mmmn.ncs_object().shift_cart() == new_mmmn.map_manager(
    ).shift_cart()

    original_ncs = mmmn.ncs_object()
    assert approx_equal(
        (24.0528, 11.5833, 20.0004),
        tuple(original_ncs.ncs_groups()[0].translations_orth()[-1]),
        eps=0.1)

    assert tuple(mmmn._map_manager.origin_shift_grid_units) == (0, 0, 0)

    # Shift origin to (0,0,0)
    mmmn = mmmn_dc.deep_copy()  # fresh version of match_map_model_ncs
    mmmn.shift_origin()
    new_ncs = mmmn.ncs_object()
    assert tuple(mmmn._map_manager.origin_shift_grid_units) == (100, 100, 100)

    mmmn.write_model('s.pdb')
    mmmn.write_map('s.mrc')

    shifted_ncs = mmmn.ncs_object()
    assert approx_equal(
        (-153.758, -74.044, -127.487),
        tuple(shifted_ncs.ncs_groups()[0].translations_orth()[-1]),
        eps=0.1)

    # Shift a model and shift it back

    mmmn = mmmn_dc.deep_copy()  # fresh version of match_map_model_ncs
    model = mmmn.model()
    shifted_model = mmmn.shift_model_to_match_working_map(model=model)
    model_in_original_position = mmmn.shift_model_to_match_original_map(
        model=shifted_model)
    assert (approx_equal(
        model.get_sites_cart(),  # not a copy
        shifted_model.get_sites_cart()))
    assert approx_equal(model.get_sites_cart(),
                        model_in_original_position.get_sites_cart())

    # test data_manager map_model_manager
    generated_mmm = dm.get_map_model_manager()
    print(generated_mmm)
    assert (isinstance(generated_mmm, map_model_manager))

    # Generate a map and model

    import sys
    mmm = map_model_manager(log=sys.stdout)
    mmm.generate_map()
    model = mmm.model()
    mm = mmm.map_manager()
    assert approx_equal(model.get_sites_cart()[0], (14.476, 10.57, 8.34),
                        eps=0.01)
    assert approx_equal(mm.map_data()[10, 10, 10], -0.0506, eps=0.001)
    # Save it
    mmm_dc = mmm.deep_copy()

    # Create model from sites
    mmm_sites = mmm_dc.deep_copy()
    from scitbx.array_family import flex
    sites_cart = flex.vec3_double()
    sites_cart.append((3, 4, 5))
    mmm_sites.model_from_sites_cart(sites_cart=sites_cart,
                                    model_id='new_model')
    assert mmm_sites.get_model_by_id('new_model').get_sites_cart()[0] == (3, 4,
                                                                          5)
    ph_sites = mmm_sites.get_model_by_id('new_model').get_hierarchy()
    text_sites = mmm_sites.get_model_by_id('new_model').model_as_pdb()

    # Create model from hierarchy
    mmm_sites = mmm_dc.deep_copy()
    mmm_sites.model_from_hierarchy(hierarchy=ph_sites, model_id='new_model')
    assert mmm_sites.get_model_by_id('new_model').get_sites_cart()[0] == (3, 4,
                                                                          5)

    # Create model from text
    mmm_sites = mmm_dc.deep_copy()
    mmm_sites.model_from_text(text=text_sites, model_id='new_model')
    assert mmm_sites.get_model_by_id('new_model').get_sites_cart()[0] == (3, 4,
                                                                          5)

    # Set crystal_symmetry and unit_cell_crystal_symmetry and shift_cart
    # Box and shift the map_model_manager so we have new coordinate system
    mmm_sites.box_all_maps_around_model_and_shift_origin(box_cushion=4.5)
    new_model = mmm_sites.get_model_by_id('new_model')
    assert approx_equal(
        (3., 4., 5.0),
        mmm_sites.get_model_by_id('new_model').get_sites_cart()[0])

    # arbitrarily set unit_cell crystal symmetry of model to
    #  match crystal_symmetry. First have to set shift_cart to None
    new_model.set_shift_cart(shift_cart=None)
    new_model.set_unit_cell_crystal_symmetry_and_shift_cart()
    assert new_model.crystal_symmetry() != mmm_sites.crystal_symmetry()

    # now set crystal symmetries and shift cart of model to match the manager
    mmm_sites.set_model_symmetries_and_shift_cart_to_match_map(new_model)
    assert new_model.crystal_symmetry().is_similar_symmetry(
        mmm_sites.crystal_symmetry())
    assert new_model.unit_cell_crystal_symmetry().is_similar_symmetry(
        mmm_sites.unit_cell_crystal_symmetry())
    assert new_model.shift_cart() == mmm_sites.shift_cart()

    # Import hierarchy into a model and set symmetries and shift to match
    mmm_sites.model_from_hierarchy(hierarchy=mmm_sites.model().get_hierarchy(),
                                   model_id='model_from_hierarchy')
    assert mmm_sites.get_model_by_id('model_from_hierarchy').model_as_pdb() \
       == mmm_sites.get_model_by_id('model').model_as_pdb()

    # Check on wrapping
    assert not mm.wrapping(
    )  # this one should not wrap because it is zero at edges

    # Make a new one with no buffer so it is not zero at edges
    mmm = map_model_manager()
    mmm.generate_map(box_cushion=0)
    mm = mmm.map_manager()
    # check its compatibility with wrapping
    assert mm.is_consistent_with_wrapping()
    mmm.show_summary()

    # now box it
    sel = mmm.model().selection("resseq 221:221")
    new_model = mmm.model().deep_copy().select(sel)
    new_mmm = map_model_manager(model=new_model, map_manager=mm.deep_copy())
    new_mmm.box_all_maps_around_model_and_shift_origin()
    new_mm = new_mmm.map_manager()

    assert not new_mm.wrapping()
    assert not new_mm.is_consistent_with_wrapping()

    # now box it with selection
    new_mmm_1 = map_model_manager(model=mmm.model().deep_copy(),
                                  map_manager=mm.deep_copy())
    new_mmm_1.box_all_maps_around_model_and_shift_origin(
        selection_string="resseq 221:221")
    new_mm_1 = new_mmm_1.map_manager()

    assert not new_mm_1.wrapping()
    assert not new_mm_1.is_consistent_with_wrapping()
    assert new_mm_1.map_data().all() == new_mm.map_data().all()

    # create map_model_manager with just half-maps
    mm1 = mm.deep_copy()
    mm2 = mm.deep_copy()
    map_data = mm2.map_data()
    map_data += 1.
    new_mmm = map_model_manager(model=mmm.model().deep_copy(),
                                map_manager_1=mm1,
                                map_manager_2=mm2)
    assert new_mmm._map_dict.get(
        'map_manager') is None  # should not be any yet
    assert approx_equal(new_mmm.map_manager().map_data()[232],
                        mm.deep_copy().map_data()[232] + 0.5)
    assert new_mmm._map_dict.get(
        'map_manager') is not None  # now should be there

    # generate map data from a model
    mm1 = mm.deep_copy()
    mm2 = mm.deep_copy()
    new_mmm = map_model_manager(model=mmm.model().deep_copy(), map_manager=mm1)
    mmm.generate_map(model=mmm.model())
    mm = mmm.map_manager()
    mmm.show_summary()

    # check get_map_model_manager function
    dm = DataManager(['model'])
    assert not hasattr(dm, 'get_map_model_manager')
    dm = DataManager(['real_map'])
    assert not hasattr(dm, 'get_map_model_manager')
    dm = DataManager(['sequence'])
    assert not hasattr(dm, 'get_map_model_manager')
    dm = DataManager(['model', 'real_map'])
    assert hasattr(dm, 'get_map_model_manager')

    # usage
    dm.get_map_model_manager(model_file=data_pdb, map_files=data_ccp4)
    dm.get_map_model_manager(model_file=data_pdb, map_files=[data_ccp4])
    dm.get_map_model_manager(model_file=data_pdb,
                             map_files=[data_ccp4, data_ccp4, data_ccp4])
    dm.get_map_model_manager(model_file=data_pdb,
                             map_files=data_ccp4,
                             ignore_symmetry_conflicts=True)

    # errors
    try:
        dm.get_map_model_manager(model_file=data_pdb,
                                 map_files=data_ccp4,
                                 from_phil=True)
    except Sorry as e:
        assert 'from_phil is set to True' in str(e)
    try:
        dm.get_map_model_manager(model_file=data_pdb,
                                 map_files=data_ccp4,
                                 abc=123)
    except TypeError as e:
        assert 'unexpected keyword argument' in str(e)
    try:
        dm.get_map_model_manager(model_file=data_pdb,
                                 map_files=[data_ccp4, data_ccp4])
    except Sorry as e:
        assert '1 full map and 2 half maps' in str(e)

    # PHIL
    class test_program(ProgramTemplate):
        master_phil_str = '''
include scope iotbx.map_model_manager.map_model_phil_str
'''

    working_phil_str = '''
  map_model {
    full_map = %s
    half_map = %s
    half_map = s.mrc
    model = %s
  }
''' % (data_ccp4, data_ccp4, data_pdb)

    master_phil = parse(test_program.master_phil_str, process_includes=True)
    working_phil = master_phil.fetch(parse(working_phil_str))
    tp = test_program(dm, working_phil.extract())

    try:
        dm.get_map_model_manager(from_phil=True)
    except Exception as e:
        assert 'ignore_symmetry_conflicts' in str(e)
    try:
        dm.get_map_model_manager(from_phil=True,
                                 ignore_symmetry_conflicts=True)
    except AssertionError:
        pass
Esempio n. 13
0
def RunProbeTests(inFileName):

  #========================================================================
  # Call the test functions for the libraries we test.

  ret = probeext.DotSpheres_test()
  assert len(ret) == 0, "DotSpheres_test() failed: " + ret

  ret = probeext.SpatialQuery_test()
  assert len(ret) == 0, "SpatialQuery_test() failed: " + ret

  ret = probeext.Scoring_test()
  assert len(ret) == 0, "Scoring_test() failed: " + ret

  AtomTypes.Test()
  Helpers.Test()

  #========================================================================
  # Now ensure that we can use the C++-wrapped classes as intended to make sure
  # that the wrapping code or parameters have not changed.

  #========================================================================
  # Make sure we can get at the DotSphere objects and their methods
  cache = probeext.DotSphereCache(10)
  sphere1 = cache.get_sphere(1)
  dots = sphere1.dots()

  #========================================================================
  # Make sure we can fill in an ExtraAtomInfoList and pass it to scoring
  # Generate an example data model with a small molecule in it
  if inFileName is not None and len(inFileName) > 0:
    # Read a model from a file using the DataManager
    dm = DataManager()
    dm.process_model_file(inFileName)
    model = dm.get_model(inFileName)
  else:
    # Generate a small-molecule model using the map model manager
    mmm=map_model_manager()         #   get an initialized instance of the map_model_manager
    mmm.generate_map()              #   get a model from a generated small library model and calculate a map for it
    model = mmm.model()             #   get the model

  # Fix up bogus unit cell when it occurs by checking crystal symmetry.
  cs =model.crystal_symmetry()
  if (cs is None) or (cs.unit_cell() is None):
    model = shift_and_box_model(model = model)

  # Get the list of all atoms in the model
  atoms = model.get_atoms()

  # Get the bonding information we'll need to exclude our bonded neighbors.
  try:
    p = mmtbx.model.manager.get_default_pdb_interpretation_params()
    model.process(make_restraints=True, pdb_interpretation_params=p) # make restraints
    geometry = model.get_restraints_manager().geometry
    sites_cart = model.get_sites_cart() # cartesian coordinates
    bond_proxies_simple, asu = \
        geometry.get_all_bond_proxies(sites_cart = sites_cart)
  except Exception as e:
    raise Exception("Could not get bonding information for input file: " + str(e))
  bondedNeighbors = Helpers.getBondedNeighborLists(atoms, bond_proxies_simple)

  # Traverse the hierarchy and look up the extra data to be filled in.
  ret = Helpers.getExtraAtomInfo(model,bondedNeighbors)
  extra = ret.extraAtomInfo

  # Construct a SpatialQuery and fill in the atoms.  Ensure that we can make a
  # query within 1000 Angstroms of the origin.
  sq = probeext.SpatialQuery(atoms)
  nb = sq.neighbors((0,0,0), 0, 1000)

  # Construct a DotScorer object.
  # Find the radius of each atom in the structure and construct dot spheres for
  # them. Find the atoms that are bonded to them and add them to an excluded list.
  # Then compute the score for each of them and report the summed score over the
  # whole molecule the way that Reduce will.
  ds = probeext.DotScorer(extra)
  total = 0
  badBumpTotal = 0
  for a in atoms:
    rad = extra.getMappingFor(a).vdwRadius
    assert rad > 0, "Invalid radius for atom look-up: "+a.name+" rad = "+str(rad)
    sphere = cache.get_sphere(rad)

    # Excluded atoms that are bonded to me or to one of my neightbors.
    # It has the side effect of excluding myself if I have any neighbors.
    # Construct as a set to avoid duplicates.
    exclude = set()
    for n in bondedNeighbors[a]:
      exclude.add(n)
      for n2 in bondedNeighbors[n]:
        exclude.add(n2)
    exclude = list(exclude)

    dots = sphere.dots()
    res = ds.score_dots(a, 1.0, sq, rad*3, 0.25, exclude, sphere.dots(), sphere.density(), False)
    total += res.totalScore()
    if res.hasBadBump:
      badBumpTotal += 1

  # Test calling the single-dot checking code as will be used by Probe to make sure
  # all of the Python linkage is working
  dotOffset = [1, 0, 0]
  check = ds.check_dot(atoms[0], dotOffset, 1, atoms, [atoms[0]])
  overlapType = check.overlapType

  # Test calling the interaction_type method to be sure Python linkage is working
  interactionType = ds.interaction_type(check.overlapType, check.gap)
Esempio n. 14
0
def test_miller_array_datatype():

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_mtz = os.path.join(data_dir, 'data',
                            'insulin_unmerged_cutted_from_ccp4.mtz')

    dm = DataManager(['miller_array', 'phil'])
    dm.process_miller_array_file(data_mtz)

    # test labels
    labels = [
        'M_ISYM', 'BATCH', 'I,SIGI,merged', 'IPR,SIGIPR,merged',
        'FRACTIONCALC', 'XDET', 'YDET', 'ROT', 'WIDTH', 'LP', 'MPART', 'FLAG',
        'BGPKRATIOS'
    ]
    for label in dm.get_miller_array_labels():
        assert (label in labels)

    assert (len(dm.get_miller_arrays()) == len(dm.get_miller_array_labels()))

    # test access by label
    label = dm.get_miller_array_labels()[3]
    new_label = dm.get_miller_arrays(labels=[label])[0].info().label_string()
    assert (label == new_label)

    # test custom PHIL
    dm.write_phil_file(dm.export_phil_scope().as_str(),
                       filename='test.phil',
                       overwrite=True)
    loaded_phil = iotbx.phil.parse(file_name='test.phil')
    new_dm = DataManager(['miller_array', 'phil'])
    new_dm.load_phil_scope(loaded_phil)
    assert (data_mtz == new_dm.get_default_miller_array_name())
    for label in new_dm.get_miller_array_labels():
        assert (label in labels)

    os.remove('test.phil')

    # test type
    assert (dm.get_miller_array_type() == 'x_ray')
    label = labels[3]
    dm.set_miller_array_type(data_mtz, label, 'electron')
    assert (dm.get_miller_array_type(label=label) == 'electron')
    dm.write_phil_file(dm.export_phil_scope().as_str(),
                       filename='test_phil',
                       overwrite=True)
    loaded_phil = iotbx.phil.parse(file_name='test_phil')
    new_dm.load_phil_scope(loaded_phil)
    assert (new_dm.get_miller_array_type(label=label) == 'electron')
    new_dm = DataManager(['miller_array'])
    try:
        new_dm.set_default_miller_array_type('q')
    except Sorry:
        pass
    new_dm.set_default_miller_array_type('neutron')
    new_dm.process_miller_array_file(data_mtz)
    assert (new_dm.get_miller_array_type(label=label) == 'neutron')

    os.remove('test_phil')

    # test writing file
    arrays = dm.get_miller_arrays()
    dataset = arrays[2].as_mtz_dataset(column_root_label='label1')
    dataset.add_miller_array(miller_array=arrays[3],
                             column_root_label='label2')
    mtz_object = dataset.mtz_object()
    dm.write_miller_array_file(mtz_object, filename='test.mtz', overwrite=True)
    dm.process_miller_array_file('test.mtz')
    new_labels = dm.get_miller_array_labels('test.mtz')
    assert ('label1,SIGlabel1' in new_labels)
    assert ('label2,SIGlabel2' in new_labels)

    os.remove('test.mtz')

    # test file server
    fs1 = dm.get_reflection_file_server()
    fs2 = dm.get_reflection_file_server([data_mtz, data_mtz])
    assert (2 * len(fs1.miller_arrays) == len(fs2.miller_arrays))
    cs = crystal.symmetry(
        unit_cell=dm.get_miller_arrays()[0].crystal_symmetry().unit_cell(),
        space_group_symbol='P1')
    fs = dm.get_reflection_file_server(crystal_symmetry=cs)
    assert (fs.crystal_symmetry.is_similar_symmetry(cs))
    assert (not fs.crystal_symmetry.is_similar_symmetry(
        dm.get_miller_arrays()[0].crystal_symmetry()))
    fs = dm.get_reflection_file_server(labels=['I,SIGI,merged'])
    assert (len(fs.get_miller_arrays(None)) == 1)
    miller_array = fs.get_amplitudes(None, None, True, None, None)
    assert (
        miller_array.info().label_string() == 'I,as_amplitude_array,merged')

    for label in dm.get_miller_array_labels():
        dm.set_miller_array_type(label=label, array_type='electron')
    fs = dm.get_reflection_file_server(array_type='x_ray')
    assert (len(fs.get_miller_arrays(None)) == 0)
    fs = dm.get_reflection_file_server(array_type='electron')
    assert (len(fs.get_miller_arrays(None)) == 13)
    fs = dm.get_reflection_file_server(
        filenames=[data_mtz],
        labels=[['I,SIGI,merged', 'IPR,SIGIPR,merged']],
        array_type='neutron')
    assert (len(fs.get_miller_arrays(None)) == 0)
    for label in ['I,SIGI,merged', 'IPR,SIGIPR,merged']:
        dm.set_miller_array_type(label=label, array_type='x_ray')
    fs = dm.get_reflection_file_server(
        filenames=[data_mtz],
        labels=[['I,SIGI,merged', 'IPR,SIGIPR,merged']],
        array_type='x_ray')
    assert (len(fs.get_miller_arrays(data_mtz)) == 2)
    fs = dm.get_reflection_file_server(filenames=[data_mtz],
                                       array_type='x_ray')
    assert (len(fs.get_miller_arrays(data_mtz)) == 2)
    fs = dm.get_reflection_file_server(filenames=[data_mtz],
                                       array_type='electron')
    assert (len(fs.get_miller_arrays(data_mtz)) == 11)

    # test subset of labels
    label_subset = labels[3:8]
    dm = DataManager(['miller_array', 'phil'])
    dm.process_miller_array_file(data_mtz)
    dm._miller_array_labels[data_mtz] = label_subset
    dm.set_miller_array_type(label=label_subset[2], array_type='electron')
    assert (dm.get_miller_array_type(label=label_subset[2]) == 'electron')
    dm.write_phil_file(dm.export_phil_scope().as_str(),
                       filename='test.phil',
                       overwrite=True)
    loaded_phil = iotbx.phil.parse(file_name='test.phil')
    new_dm = DataManager(['miller_array', 'phil'])
    new_dm.load_phil_scope(loaded_phil)
    assert (new_dm.get_miller_array_type(label=label_subset[2]) == 'electron')
    fs = new_dm.get_reflection_file_server(array_type='x_ray')
    assert (len(fs.get_miller_arrays(None)) == 4)
    fs = new_dm.get_reflection_file_server(array_type='electron')
    assert (len(fs.get_miller_arrays(None)) == 1)
    os.remove('test.phil')

    label_subset = list()
    dm = DataManager(['miller_array', 'phil'])
    dm.process_miller_array_file(data_mtz)
    dm._miller_array_labels[data_mtz] = label_subset
    dm.write_phil_file(dm.export_phil_scope().as_str(),
                       filename='test.phil',
                       overwrite=True)
    loaded_phil = iotbx.phil.parse(file_name='test.phil')
    new_dm = DataManager(['miller_array', 'phil'])
    new_dm.load_phil_scope(loaded_phil)
    fs = new_dm.get_reflection_file_server(array_type='x_ray')
    assert (len(fs.get_miller_arrays(None)) == 13)
    fs = new_dm.get_reflection_file_server(array_type='electron')
    assert (len(fs.get_miller_arrays(None)) == 0)
    os.remove('test.phil')
Esempio n. 15
0
def main():
  dm = DataManager()
  dm.process_model_str('testing', model_1yjp)
  model = dm.get_model()
  rc = model.restraints_as_geo(force=True)
  rc = check_geo(rc)
  assert rc == count_1yjp, check_diff(rc, count_1yjp)

  dm = DataManager()
  dm.process_model_str('testing', model_1yjp_with_waters)
  model = dm.get_model()
  rc = model.restraints_as_geo(force=True)
  rc = check_geo(rc)
  assert rc == count_1yjp_with_waters, rc

  params = model.get_default_pdb_interpretation_params()
  edits_1yjp = params.geometry_restraints.edits

  edits_1yjp.bond[0].action='add'
  edits_1yjp.bond[0].atom_selection_1='resname HOH and resid 10 and name O'
  edits_1yjp.bond[0].atom_selection_2='resname ASN and resid 2 and name ND2'
  edits_1yjp.bond[0].distance_ideal=2.1
  edits_1yjp.bond[0].sigma=0.1
  model.process(pdb_interpretation_params=params,
                make_restraints=True)
  rc = model.restraints_as_geo(force=True)
  rc = check_geo(rc)
  current = count_1yjp_with_waters.copy()
  current['User supplied restraints'] = 1
  current['Nonbonded interactions']   = 1176
  assert rc == current, check_diff(rc, current)

  edits_1yjp.angle[0].action='add'
  edits_1yjp.angle[0].atom_selection_1='resname HOH and resid 10 and name O'
  edits_1yjp.angle[0].atom_selection_2='resname ASN and resid 2 and name ND2'
  edits_1yjp.angle[0].atom_selection_3='resname ASN and resid 2 and name CG'
  edits_1yjp.angle[0].angle_ideal=21.9
  edits_1yjp.angle[0].sigma=1.1
  model.process(pdb_interpretation_params=params,
                make_restraints=True)
  rc = model.restraints_as_geo(force=True)
  rc = check_geo(rc)
  current = count_1yjp_with_waters.copy()
  current['User supplied restraints'] = 1
  current['User supplied angle restraints'] = 1
  current['Nonbonded interactions']   = 1176
  assert rc == current, check_diff(rc, current)

  edits_1yjp.dihedral[0].action='add'
  edits_1yjp.dihedral[0].atom_selection_1='resname HOH and resid 10 and name O'
  edits_1yjp.dihedral[0].atom_selection_2='resname ASN and resid 2 and name ND2'
  edits_1yjp.dihedral[0].atom_selection_3='resname ASN and resid 2 and name CG'
  edits_1yjp.dihedral[0].atom_selection_4='resname ASN and resid 2 and name CB'
  edits_1yjp.dihedral[0].angle_ideal=121.9
  edits_1yjp.dihedral[0].sigma=1.12
  edits_1yjp.dihedral[0].periodicity=10
  model.process(pdb_interpretation_params=params,
                make_restraints=True)
  rc = model.restraints_as_geo(force=True)
  rc = check_geo(rc)
  current = count_1yjp_with_waters.copy()
  current['User supplied restraints'] = 1
  current['User supplied angle restraints'] = 1
  current['User supplied torsion angle restraints'] = 1
  #current['  sinusoidal'] = 16
  current['Nonbonded interactions']   = 1176
  assert rc == current, check_diff(rc, current)
  print('OK')
Esempio n. 16
0
def exercise(out=sys.stdout):

    # test shift_aware_rt

    mmm1, mmm2 = get_map_model_managers()
    initial_shift_aware_rt_info = mmm1.shift_aware_rt_to_superpose_other(mmm2)
    initial_rt_info = initial_shift_aware_rt_info.working_rt_info(
        from_obj=mmm2, to_obj=mmm1)

    model_2 = mmm2.model().apply_selection_string("resseq 222:235")
    mmm2.set_model(model_2)
    shift_aware_rt_info = mmm1.shift_aware_rt_to_superpose_other(mmm2)
    rt_info = shift_aware_rt_info.working_rt_info(from_obj=mmm2, to_obj=mmm1)
    assert shift_aware_rt_info.is_similar(initial_shift_aware_rt_info,
                                          tol=0.002)

    shift_aware_rt = mmm1.shift_aware_rt(working_rt_info=rt_info,
                                         from_obj=mmm2,
                                         to_obj=mmm1)

    shift_aware_rt = mmm1.map_manager().shift_aware_rt(working_rt_info=rt_info,
                                                       from_obj=mmm2,
                                                       to_obj=mmm1)
    print(mmm1, mmm2)
    sites_cart_2 = mmm2.model().get_sites_cart()
    mapped_sites_cart = shift_aware_rt.apply_rt(sites_cart=sites_cart_2,
                                                from_obj=mmm2,
                                                to_obj=mmm1)
    assert approx_equal(
        mapped_sites_cart,
        mmm1.model().apply_selection_string("resseq 222:235").get_sites_cart(),
        eps=0.01)
    working_rt_info = shift_aware_rt.working_rt_info(from_obj=mmm2,
                                                     to_obj=mmm1)
    mapped_sites_cart = working_rt_info.r.elems * mmm2.model().get_sites_cart(
    ) + working_rt_info.t.elems
    assert approx_equal(
        mapped_sites_cart,
        mmm1.model().apply_selection_string("resseq 222:235").get_sites_cart(),
        eps=0.01)

    inverse_shift_aware_rt = shift_aware_rt.inverse()
    mapped_sites_cart = inverse_shift_aware_rt.apply_rt(
        sites_cart=mmm1.model().apply_selection_string(
            "resseq 222:235").get_sites_cart(),
        from_obj=mmm1,
        to_obj=mmm2)
    assert approx_equal(mapped_sites_cart,
                        mmm2.model().get_sites_cart(),
                        eps=0.01)

    mmm1, mmm2 = get_map_model_managers()

    # get r,t to map mmm2 model on mmm1 model
    shift_aware_rt_info = mmm1.shift_aware_rt_to_superpose_other(mmm2)
    rt_info = shift_aware_rt_info.working_rt_info(from_obj=mmm2, to_obj=mmm1)
    print(rt_info)

    # get mmm2 map superimposed on mmm1 map (in region where it is defined, zero
    #   outside that region)

    new_mm = mmm1.superposed_map_manager_from_other(other=mmm2)
    new_mm.write_map('super.ccp4')
    mmm1.write_map('orig.ccp4')
    mmm1.write_model('orig.pdb')

    new_mm = mmm1.superposed_map_manager_from_other(
        other=mmm2, selection_string="resseq 221:225")
    assert approx_equal(new_mm.map_map_cc(mmm1.map_manager()),
                        0.994645868918,
                        eps=0.01)
    new_mm.write_map('super_221-225.ccp4')

    new_mm = mmm1.superposed_map_manager_from_other(other=mmm2,
                                                    working_rt_info=rt_info)
    assert approx_equal(new_mm.map_map_cc(mmm1.map_manager()),
                        0.994645868918,
                        eps=0.01)
    new_mm.write_map('super_221-225.ccp4')

    # get a local resolution map (this one should look pretty constant!)
    mmm1.set_resolution(3)
    mmma = mmm1.deep_copy()
    model = mmm1.model()
    mmma.remove_model_by_id('model')
    mmmb = mmma.deep_copy()

    mmma.map_manager().randomize(random_seed=23412,
                                 d_min=3,
                                 high_resolution_fourier_noise_fraction=10,
                                 low_resolution_noise_cutoff=5)
    mmmb.map_manager().randomize(random_seed=887241,
                                 d_min=3,
                                 high_resolution_fourier_noise_fraction=10,
                                 low_resolution_noise_cutoff=5)

    #  We are going to read in these maps so that we have a constant value
    from iotbx.data_manager import DataManager
    dm = DataManager()
    mmma_map = dm.get_real_map(random_map_a)
    mmmb_map = dm.get_real_map(random_map_b)
    mmma_map.shift_origin()
    mmmb_map.shift_origin()
    mmma.add_map_manager_by_id(map_id="map_manager", map_manager=mmma_map)
    mmmb.add_map_manager_by_id(map_id="map_manager", map_manager=mmmb_map)

    assert approx_equal(mmma.map_manager().map_map_cc(mmmb.map_manager()),
                        0.16, 0.10)
    from iotbx.map_model_manager import map_model_manager
    model.set_b_iso(flex.double(model.get_sites_cart().size(), 0))
    local_mmm = map_model_manager(map_manager_1=mmma.map_manager(),
                                  map_manager_2=mmmb.map_manager(),
                                  model=model)
    local_mmm.set_resolution(3)
    local_mmm.local_fsc()

    from iotbx.data_manager import DataManager
    dm = DataManager()
    dm.set_overwrite(True)

    cc_before = local_mmm.map_model_cc()
    print("Working with randomized maps cc = ", cc_before)
    dc = local_mmm.deep_copy()
    dc.set_log(sys.stdout)
    cc_before = dc.map_model_cc()
    dc.half_map_sharpen(n_bins=15)
    cc_after = dc.map_model_cc(map_id='map_manager_scaled')
    print("CC before, after half map sharpen: ", cc_before, cc_after)
    assert approx_equal((cc_before, cc_after), (0.80, 0.80), eps=0.10)

    dc = local_mmm.deep_copy()
    dc.set_log(sys.stdout)
    cc_before = dc.map_model_cc()
    dc.model_sharpen(n_bins=15,
                     local_sharpen=False,
                     anisotropic_sharpen=False,
                     optimize_b_eff=False)
    cc_after = dc.map_model_cc(map_id='map_manager_scaled')
    print("CC before, after std model sharpen: ", cc_before, cc_after)
    assert approx_equal((cc_before, cc_after), (0.80, 0.90), eps=0.10)
    model_sharpened_mm = dc.get_map_manager_by_id(map_id='map_manager_scaled')

    dc = local_mmm.deep_copy()
    dc.set_log(sys.stdout)
    cc_before = dc.map_model_cc()
    dc.model_sharpen(local_sharpen=True, n_boxes=1, n_bins=15)
    cc_after = dc.map_model_cc(map_id='map_manager_scaled')
    print("CC before, after local model sharpen n_boxes=1: ", cc_before,
          cc_after)
    assert approx_equal((cc_before, cc_after), (0.80, 0.90), eps=0.10)
    model_sharpened_mm = dc.get_map_manager_by_id(map_id='map_manager_scaled')

    dc = local_mmm.deep_copy()
    dc.set_log(sys.stdout)
    dc.add_map_manager_by_id(model_sharpened_mm, 'external_map')
    cc_before = dc.map_map_cc(map_id='map_manager',
                              other_map_id='external_map')
    dc.external_sharpen(n_bins=15, map_id_external_map='external_map')
    print(dc)
    cc_after = dc.map_map_cc(map_id='map_manager_scaled',
                             other_map_id='external_map')
    print("CC before, after external sharpen n_boxes=1: ", cc_before, cc_after)
    assert approx_equal((cc_before, cc_after), (0.7, 0.95), eps=0.10)

    dc = local_mmm.deep_copy()
    dc.set_log(sys.stdout)
    dc.add_map_manager_by_id(model_sharpened_mm, 'external_map')
    cc_before = dc.map_map_cc(map_id='map_manager',
                              other_map_id='external_map')
    dc.external_sharpen(local_sharpen=True,
                        n_boxes=1,
                        n_bins=15,
                        map_id_external_map='external_map')
    cc_after = dc.map_map_cc(map_id='map_manager_scaled',
                             other_map_id='external_map')
    print("CC before, after external sharpen local n_boxes=1: ", cc_before,
          cc_after)
    assert approx_equal((cc_before, cc_after), (0.70, 0.95), eps=0.10)

    dc = local_mmm.deep_copy()
    dc.set_log(sys.stdout)
    dc._local_sharpen(map_id_scaled_list=['map_manager_scaled'],
                      map_id_to_be_scaled_list=['map_manager'],
                      n_bins=15,
                      n_boxes=1)
    cc = dc.map_model_cc()
    assert approx_equal(cc, 0.80, eps=0.1)

    # create a mask around density
    dc.create_mask_around_density(soft_mask=False)
    count = dc.get_map_manager_by_id('mask').map_data().count(1)
    print(count)
    assert 8000 < count < 14000
    dc.expand_mask(buffer_radius=2)
    count = dc.get_map_manager_by_id('mask').map_data().count(1)
    print(count)
    assert count == 1

    # Test mask and map info functions
    mmm1, mmm2 = get_map_model_managers()
    mmm1.create_mask_around_density(soft_mask=False)
    mask_info = mmm1.mask_info()
    map_info = mmm1.map_info()
    mask_info_by_id = mmm1.mask_info(mask_id='mask')
    map_info_by_id = mmm1.map_info(map_id='map_manager')
    assert mask_info() == mask_info_by_id()
    assert map_info() == map_info_by_id()
    assert (approx_equal(mask_info.fraction_marked, 0.207070707071)
            or approx_equal(mask_info.fraction_marked, 0.210091991342))

    assert approx_equal(map_info.fraction_above_sigma_cutoff, 0.0577876984127)

    # create a spherical mask around a point
    print("Spherical masks", )
    dc = mmm1.deep_copy()
    dc.mask_info()
    print(dc.mask_info().marked_points)
    assert dc.mask_info().marked_points in [9184, 9318]
    dc.create_spherical_mask()
    dc.mask_info()
    print(dc.mask_info().marked_points)
    assert dc.mask_info().marked_points in [1311, 1286]
    dc.create_spherical_mask(soft_mask_radius=1)
    dc.mask_info()
    print(dc.mask_info().marked_points)
    assert dc.mask_info().marked_points in [
        8990,
    ]
    dc.create_spherical_mask(soft_mask=False)
    dc.mask_info()
    print(dc.mask_info().marked_points)
    assert dc.mask_info().marked_points in [1566, 1458]
    dc.create_spherical_mask(mask_radius=4)
    dc.mask_info()
    print(dc.mask_info().marked_points)
    assert dc.mask_info().marked_points in [886, 914]
    dc.create_spherical_mask(soft_mask=False, mask_radius=4)
    dc.mask_info()
    print(dc.mask_info().marked_points)
    assert dc.mask_info().marked_points == 654
Esempio n. 17
0
def exercise_around_model():

    from cctbx.maptbx.box import make_list_symmetric
    a = [3, 4, 5, 3, 9, 1, 6, 3, 2, 5, 6, 6]
    new_a = make_list_symmetric(a)
    from scitbx.array_family import flex
    aa = flex.double(a)
    new_aa = flex.double(new_a)
    assert (aa.size(), new_aa.size()) == (12, 12)
    assert aa.min_max_mean().mean == new_aa.min_max_mean().mean
    print(a, new_a)

    a = [3, 4, 5, 3, 8, 1, 6, 7, 3, 2, 5, 6, 6]
    new_a = make_list_symmetric(a)
    from scitbx.array_family import flex
    aa = flex.double(a)
    new_aa = flex.double(new_a)
    print(a, new_a)
    assert (aa.size(), new_aa.size()) == (13, 13)
    assert aa.min_max_mean().mean == new_aa.min_max_mean().mean

    mam = get_random_structure_and_map(use_static_structure=True)

    map_data_orig = mam.mm.map_data().deep_copy()
    sites_frac_orig = mam.model.get_sites_frac().deep_copy()
    sites_cart_orig = mam.model.get_sites_cart().deep_copy()
    cs_orig = mam.model.crystal_symmetry()

    box = cctbx.maptbx.box.around_model(map_manager=mam.mm,
                                        model=mam.model.deep_copy(),
                                        box_cushion=10,
                                        wrapping=True)
    new_mm1 = box.map_manager()
    new_mm2 = box.apply_to_map(map_manager=mam.mm.deep_copy())
    assert approx_equal(new_mm1.map_data(), new_mm2.map_data())

    new_model1 = box.model()
    new_model2 = box.apply_to_model(model=mam.model.deep_copy())
    assert new_model1.crystal_symmetry().is_similar_symmetry(
        new_model2.crystal_symmetry())
    assert new_model1.crystal_symmetry().is_similar_symmetry(
        box.crystal_symmetry)

    assert approx_equal(new_model1.get_sites_cart()[0],
                        (19.705233333333336, 15.631525, 13.5040625))
    # make sure things did change
    assert new_mm2.map_data().size() != map_data_orig.size()

    # make sure things are changed in-place and are therefore different from start
    assert box.map_manager().map_data().size() != map_data_orig.size()
    assert box.model().get_sites_frac() != sites_frac_orig
    assert box.model().get_sites_cart() != sites_cart_orig
    assert (not cs_orig.is_similar_symmetry(box.model().crystal_symmetry()))

    # make sure box, model and map_manager remember original crystal symmetry
    assert cs_orig.is_similar_symmetry(
        box.map_manager().unit_cell_crystal_symmetry())
    assert cs_orig.is_similar_symmetry(
        box.map_manager().unit_cell_crystal_symmetry())

    assert approx_equal(
        box.model().shift_cart(),
        [5.229233333333334, 5.061524999999999, 5.162062499999999])

    assert box.model().unit_cell_crystal_symmetry().is_similar_symmetry(
        cs_orig)
    assert (not box.model().crystal_symmetry().is_similar_symmetry(cs_orig))

    assert approx_equal(
        box.model()._figure_out_hierarchy_to_output(
            do_not_shift_back=False).atoms().extract_xyz()[0],
        (14.476, 10.57, 8.342))

    # make sure we can stack shifts
    sel = box.model().selection("resseq 219:219")
    m_small = box.model().select(selection=sel)

    assert approx_equal(box.model().shift_cart(), m_small.shift_cart())

    # Now box again:
    small_box = cctbx.maptbx.box.around_model(map_manager=mam.mm,
                                              model=mam.model.deep_copy(),
                                              box_cushion=5,
                                              wrapping=True)

    # Make sure nothing was zeroed out in this map (wrapping = True)
    assert new_mm1.map_data().as_1d().count(0) == 0

    # Now without wrapping...
    box = cctbx.maptbx.box.around_model(map_manager=mam.mm,
                                        model=mam.model.deep_copy(),
                                        box_cushion=10,
                                        wrapping=False)

    # make sure things are changed in-place and are therefore different from start
    assert box.map_manager().map_data().size() != map_data_orig.size()
    assert box.model().get_sites_frac() != sites_frac_orig
    assert box.model().get_sites_cart() != sites_cart_orig
    assert (not cs_orig.is_similar_symmetry(box.model().crystal_symmetry()))

    # make sure box, model and map_manager remember original crystal symmetry
    assert cs_orig.is_similar_symmetry(
        box.model().unit_cell_crystal_symmetry())
    assert cs_orig.is_similar_symmetry(
        box.map_manager().unit_cell_crystal_symmetry())

    assert box.map_manager().map_data().as_1d().count(0) == 81264

    # Now specify bounds directly
    new_box = cctbx.maptbx.box.with_bounds(map_manager=mam.mm.deep_copy(),
                                           lower_bounds=(-7, -7, -7),
                                           upper_bounds=(37, 47, 39),
                                           wrapping=False)

    new_model = new_box.apply_to_model(mam.model.deep_copy())
    # make sure things are changed in-place and are therefore different from start
    assert new_box.map_manager().map_data().size() != map_data_orig.size()
    assert new_model.get_sites_frac() != sites_frac_orig
    assert new_model.get_sites_cart() != sites_cart_orig
    assert (not cs_orig.is_similar_symmetry(new_model.crystal_symmetry()))

    # make sure box, model and map_manager remember original crystal symmetry
    assert cs_orig.is_similar_symmetry(
        box.model().unit_cell_crystal_symmetry())
    assert cs_orig.is_similar_symmetry(
        box.map_manager().unit_cell_crystal_symmetry())

    assert box.map_manager().map_data().as_1d().count(0) == 81264

    # Now specify bounds directly and init with model
    box = cctbx.maptbx.box.with_bounds(map_manager=mam.mm.deep_copy(),
                                       lower_bounds=(-7, -7, -7),
                                       upper_bounds=(37, 47, 39),
                                       wrapping=False,
                                       model=mam.model.deep_copy())

    new_model = box.model()
    # make sure things are changed in-place and are therefore different from start
    assert box.map_manager().map_data().size() != map_data_orig.size()
    assert new_model.get_sites_frac() != sites_frac_orig
    assert new_model.get_sites_cart() != sites_cart_orig
    assert (not cs_orig.is_similar_symmetry(new_model.crystal_symmetry()))

    # make sure box, model and map_manager remember original crystal symmetry
    assert cs_orig.is_similar_symmetry(
        box.model().unit_cell_crystal_symmetry())
    assert cs_orig.is_similar_symmetry(
        box.map_manager().unit_cell_crystal_symmetry())

    assert box.map_manager().map_data().as_1d().count(0) == 81264

    # Extract using around_unique

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_ccp4 = os.path.join(data_dir, 'data', 'D7.ccp4')
    data_ncs = os.path.join(data_dir, 'data', 'D7.ncs_spec')
    data_seq = os.path.join(data_dir, 'data', 'D7.seq')

    dm = DataManager(['real_map', 'phil', 'ncs_spec', 'sequence'])
    dm.process_real_map_file(data_ccp4)
    mm = dm.get_real_map(data_ccp4)

    dm.process_ncs_spec_file(data_ncs)
    ncs_obj = dm.get_ncs_spec(data_ncs)

    dm.process_sequence_file(data_seq)
    sequence = dm.get_sequence(data_seq)
    sequence_as_text = sequence[0].sequence

    map_model_mgr = map_model_manager(map_manager=mm, ncs_object=ncs_obj)
    mm = map_model_mgr.map_manager()
    mm.show_summary()

    box = cctbx.maptbx.box.around_unique(
        map_manager=mm.deep_copy(),
        resolution=3,
        box_cushion=1,
        sequence=sequence_as_text,
        soft_mask=True,
        wrapping=False,
    )

    box.map_manager().write_map('new_box.ccp4')

    # run again from map_manager

    map_model_mgr.box_all_maps_around_unique_and_shift_origin(
        resolution=3,
        box_cushion=1,
        sequence=sequence_as_text,
        soft_mask=True,
    )

    # Get bounds around density
    box = cctbx.maptbx.box.around_density(map_manager=mam.mm.deep_copy(),
                                          wrapping=False)

    # Create a mask

    mm = mam.mm.deep_copy()

    mm.create_mask_around_density(
        resolution=3,
        molecular_mass=2100,
        sequence="GAVAGA",
        solvent_content=0.5,
    )
    mask_mm = mm.get_mask_as_map_manager()
    assert approx_equal(
        (mask_mm.map_data().count(0), mask_mm.map_data().count(1),
         mask_mm.map_data().size()), (19184, 19216, 38400))

    # Box around the mask
    box = cctbx.maptbx.box.around_mask(
        map_manager=mam.mm.deep_copy(),
        mask_as_map_manager=mask_mm,
        wrapping=False,
    )

    assert (box.gridding_first, box.gridding_last) == ([0, 0, 0], [29, 39, 31])

    #
    # IF you are about to change this - THINK TWICE!
    #
    import inspect
    r = inspect.getargspec(cctbx.maptbx.box.around_model.__init__)
    assert r.args == [
        'self', 'map_manager', 'model', 'box_cushion', 'wrapping',
        'model_can_be_outside_bounds', 'log'
    ], r.args
    r = inspect.getargspec(cctbx.maptbx.box.with_bounds.__init__)
    assert r.args == [
        'self', 'map_manager', 'lower_bounds', 'upper_bounds', 'model',
        'wrapping', 'model_can_be_outside_bounds', 'log'
    ], r.args

    print("OK")
Esempio n. 18
0
def exercise(file_name=None,
             pdb_file_name=None,
             map_file_name=None,
             split_pdb_file_name=None,
             ncs_pdb_file_name=None,
             out=sys.stdout):

    # Set up source data

    if not os.path.isfile(file_name):
        raise Sorry("Missing the file: %s" % (file_name) + "\n")

    print("Reading from %s" % (file_name))
    from iotbx.map_manager import map_manager
    m = map_manager(file_name)

    print("Header information from %s:" % (file_name))
    m.show_summary(out=out)

    map_data = m.map_data().deep_copy()
    crystal_symmetry = m.crystal_symmetry()
    unit_cell_parameters = m.crystal_symmetry().unit_cell().parameters()

    print("\nMap origin: %s Extent %s" % (map_data.origin(), map_data.all()))
    print("Original unit cell, not just unit cell of part in this file): %s" %
          (str(unit_cell_parameters)))

    grid_point = (1, 2, 3)
    if map_data.origin() != (0, 0, 0):  # make sure it is inside
        from scitbx.matrix import col
        grid_point = tuple(col(grid_point) + col(map_data.origin()))
    print("\nValue of map_data at grid point %s: %.3f" %
          (str(grid_point), map_data[grid_point]))
    print("Map data is %s" % (type(map_data)))

    random_position = (10, 5, 7.9)
    point_frac = crystal_symmetry.unit_cell().fractionalize(random_position)
    value_at_point_frac = map_data.eight_point_interpolation(point_frac)
    print("Value of map_data at coordinates %s: %.3f" %
          (str(random_position), value_at_point_frac))

    map_data_as_float = map_data.as_float()
    print("Map data as float is %s" % (type(map_data_as_float)))

    # make a little model
    sites_cart = flex.vec3_double(((8, 10, 12), (14, 15, 16)))
    model = model_manager.from_sites_cart(atom_name=' CA ',
                                          resname='ALA',
                                          chain_id='A',
                                          b_iso=30.,
                                          occ=1.,
                                          scatterer='C',
                                          sites_cart=sites_cart,
                                          crystal_symmetry=crystal_symmetry)

    # Move map and a model to place origin at (0, 0, 0)
    # map data is new copy but model is shifted in place.

    from iotbx.map_model_manager import map_model_manager
    mam = map_model_manager(
        map_manager=m,
        model=model.deep_copy(),
    )

    # Read in map and model and split up
    dm = DataManager()
    aa = dm.get_map_model_manager(model_file=pdb_file_name,
                                  map_files=map_file_name)
    cc = dm.get_map_model_manager(model_file=ncs_pdb_file_name,
                                  map_files=map_file_name)
    bb = dm.get_map_model_manager(model_file=split_pdb_file_name,
                                  map_files=map_file_name)

    # Merge by models
    a = aa.deep_copy()
    n_starting_models = len(list(a.model().get_hierarchy().models()))
    box_info = a.split_up_map_and_model_by_boxes()
    # Change the hierarchy in a box
    small_hierarchy = box_info.mmm_list[0].model().get_hierarchy()
    # delete an atom
    for m in small_hierarchy.models():
        for chain in m.chains()[:1]:
            chain.remove_residue_group(i=0)
    box_info.mmm_list[0].model().reset_after_changing_hierarchy()  # REQUIRED
    # Put everything back together
    a.merge_split_maps_and_models(box_info=box_info,
                                  allow_changes_in_hierarchy=True)
    n_merged_models = len(list(a.model().get_hierarchy().models()))
    assert n_starting_models == 1
    assert n_merged_models == 7

    # Merge in various ways
    for selection_method in [
            'by_ncs_groups', 'by_chain', 'by_segment', 'supplied_selections',
            'boxes'
    ]:
        if selection_method == 'boxes':
            choices = [True, False]
        else:
            choices = [True]
        if selection_method == 'by_chain':
            mask_choices = [True, False]
        else:
            mask_choices = [False]
        for select_final_boxes_based_on_model in choices:
            for skip_empty_boxes in choices:
                for mask_choice in mask_choices:
                    if selection_method == 'by_ncs_groups':
                        a = cc.deep_copy()
                    if mask_choice:  # use split model
                        a = bb.deep_copy()
                    else:  # usual
                        a = aa.deep_copy()
                    print("\nRunning split_up_map_and_model with \n" +
                          "select_final_boxes_based_on_model=" +
                          "%s   skip_empty_boxes=%s selection_method=%s" %
                          (select_final_boxes_based_on_model, skip_empty_boxes,
                           selection_method))

                    if selection_method == 'by_chain':
                        print("Mask around unused atoms: %s" % (mask_choice))
                        box_info = a.split_up_map_and_model_by_chain(
                            mask_around_unselected_atoms=mask_choice)
                    elif selection_method == 'by_segment':
                        box_info = a.split_up_map_and_model_by_segment()
                    elif selection_method == 'supplied_selections':
                        selection = a.model().selection('all')
                        box_info = a.split_up_map_and_model_by_supplied_selections(
                            selection_list=[selection])
                    elif selection_method == 'by_ncs_groups':
                        box_info = a.split_up_map_and_model_by_ncs_groups()
                    elif selection_method == 'boxes':
                        box_info = a.split_up_map_and_model_by_boxes(
                            skip_empty_boxes=skip_empty_boxes,
                            select_final_boxes_based_on_model=
                            select_final_boxes_based_on_model)
                    assert box_info is not None
                    print(selection_method, skip_empty_boxes,
                          len(box_info.selection_list),
                          box_info.selection_list[0].count(True))
                    assert (selection_method, skip_empty_boxes,
                            len(box_info.selection_list),
                            box_info.selection_list[0].count(True)) in [
                                ('by_chain', True, 3, 19),
                                (
                                    "by_chain",
                                    True,
                                    1,
                                    86,
                                ),
                                (
                                    "by_segment",
                                    True,
                                    1,
                                    86,
                                ),
                                (
                                    "supplied_selections",
                                    True,
                                    1,
                                    86,
                                ),
                                ("by_ncs_groups", True, 1, 86),
                                ("boxes", True, 7, 9),
                                (
                                    "boxes",
                                    False,
                                    12,
                                    0,
                                ),
                                (
                                    "boxes",
                                    True,
                                    13,
                                    1,
                                ),
                                (
                                    "boxes",
                                    False,
                                    36,
                                    0,
                                ),
                            ], 'failed to find %s %s %s %s' % (
                                selection_method, skip_empty_boxes,
                                len(box_info.selection_list),
                                box_info.selection_list[0].count(True))

                    # Change the coordinates in one box
                    small_model = box_info.mmm_list[0].model()
                    small_sites_cart = small_model.get_sites_cart()
                    from scitbx.matrix import col
                    small_sites_cart += col((1, 0, 0))
                    small_model.set_crystal_symmetry_and_sites_cart(
                        sites_cart=small_sites_cart,
                        crystal_symmetry=small_model.crystal_symmetry())
                    # Put everything back together
                    a.merge_split_maps_and_models(box_info=box_info)

    mam.box_all_maps_around_model_and_shift_origin()

    shifted_crystal_symmetry = mam.model().crystal_symmetry()
    shifted_model = mam.model()
    shifted_map_data = mam.map_data()

    print("\nOriginal map origin (grid units):", map_data.origin())
    print("Original model:\n", model.model_as_pdb())

    print("Shifted map origin:", shifted_map_data.origin())
    print("Shifted model:\n", shifted_model.model_as_pdb())

    # Save the map_model manager
    mam_dc = mam.deep_copy()
    print("dc", mam)
    print("dc mam_dc", mam_dc)

    # Mask map around atoms
    mam = mam_dc.deep_copy()
    print("dc mam_dc dc", mam_dc)
    print(mam)
    mam.mask_all_maps_around_atoms(mask_atoms_atom_radius=3,
                                   set_outside_to_mean_inside=True,
                                   soft_mask=False)
    print("Mean before masking", mam.map_data().as_1d().min_max_mean().mean)
    assert approx_equal(mam.map_data().as_1d().min_max_mean().mean,
                        -0.0585683621466)
    print("Max before masking", mam.map_data().as_1d().min_max_mean().max)
    assert approx_equal(mam.map_data().as_1d().min_max_mean().max,
                        -0.0585683621466)

    # Mask map around atoms, with soft mask
    mam = mam_dc.deep_copy()
    mam.mask_all_maps_around_atoms(mask_atoms_atom_radius=3,
                                   soft_mask=True,
                                   soft_mask_radius=5,
                                   set_outside_to_mean_inside=True)
    print("Mean after first masking",
          mam.map_data().as_1d().min_max_mean().mean)
    assert approx_equal(mam.map_data().as_1d().min_max_mean().mean,
                        0,
                        eps=0.05)
    print("Max after first masking", mam.map_data().as_1d().min_max_mean().max)
    assert approx_equal(mam.map_data().as_1d().min_max_mean().max,
                        0.10,
                        eps=0.05)

    # Mask map around atoms again
    mam.mask_all_maps_around_atoms(mask_atoms_atom_radius=3,
                                   set_outside_to_mean_inside=True,
                                   soft_mask=False)
    print("Mean after second masking",
          mam.map_data().as_1d().min_max_mean().mean)
    assert approx_equal(mam.map_data().as_1d().min_max_mean().mean, 0, eps=0.1)
    print("Max after second masking",
          mam.map_data().as_1d().min_max_mean().max)
    assert approx_equal(mam.map_data().as_1d().min_max_mean().max, 0, eps=0.1)

    # Mask around edges
    mam = mam_dc.deep_copy()
    mam.mask_all_maps_around_edges(soft_mask_radius=3)
    print("Mean after masking edges",
          mam.map_data().as_1d().min_max_mean().mean)
    assert approx_equal(mam.map_data().as_1d().min_max_mean().mean,
                        0,
                        eps=0.05)
    print("Max after masking edges", mam.map_data().as_1d().min_max_mean().max)
    assert approx_equal(mam.map_data().as_1d().min_max_mean().max,
                        0.20,
                        eps=0.05)

    print(
        "\nWriting map_data and model in shifted position (origin at 0, 0, 0)")

    output_file_name = 'shifted_map.ccp4'
    print("Writing to %s" % (output_file_name))
    mrcfile.write_ccp4_map(
        file_name=output_file_name,
        crystal_symmetry=shifted_crystal_symmetry,
        map_data=shifted_map_data,
    )

    output_file_name = 'shifted_model.pdb'
    f = open(output_file_name, 'w')
    print(shifted_model.model_as_pdb(), file=f)
    f.close()

    print("\nWriting map_data and model in original position (origin at %s)" %
          (str(mam.map_manager().origin_shift_grid_units)))

    output_file_name = 'new_map_original_position.ccp4'
    print("Writing to %s" % (output_file_name))
    mrcfile.write_ccp4_map(
        file_name=output_file_name,
        crystal_symmetry=shifted_crystal_symmetry,
        map_data=shifted_map_data,
        origin_shift_grid_units=mam.map_manager().origin_shift_grid_units)
    print(shifted_model.model_as_pdb())
    output_pdb_file_name = 'new_model_original_position.pdb'
    f = open(output_pdb_file_name, 'w')
    print(shifted_model.model_as_pdb(), file=f)
    f.close()

    # Write as mmcif
    output_cif_file_name = 'new_model_original_position.cif'
    f = open(output_cif_file_name, 'w')
    print(shifted_model.model_as_mmcif(), file=f)
    f.close()

    # Read the new map and model
    import iotbx.pdb
    new_model = model_manager(model_input=iotbx.pdb.input(
        source_info=None,
        lines=flex.split_lines(open(output_pdb_file_name).read())),
                              crystal_symmetry=crystal_symmetry)
    assert new_model.model_as_pdb() == model.model_as_pdb()

    new_model_from_cif = model_manager(model_input=iotbx.pdb.input(
        source_info=None,
        lines=flex.split_lines(open(output_cif_file_name).read())),
                                       crystal_symmetry=crystal_symmetry)
    assert new_model_from_cif.model_as_pdb() == model.model_as_pdb()

    # Read and box the original file again in case we modified m in any
    #   previous tests
    m = map_manager(file_name)
    mam = map_model_manager(model=model.deep_copy(), map_manager=m)
    mam.box_all_maps_around_model_and_shift_origin()

    file_name = output_file_name
    print("Reading from %s" % (file_name))
    new_map = iotbx.mrcfile.map_reader(file_name=file_name, verbose=False)
    new_map.data = new_map.data.shift_origin()
    print("Header information from %s:" % (file_name))
    new_map.show_summary(out=out)
    assert new_map.map_data().origin() == mam.map_manager().map_data().origin()
    assert new_map.crystal_symmetry().is_similar_symmetry(
        mam.map_manager().crystal_symmetry())

    # make a map_model_manager with lots of maps and model and ncs
    from mmtbx.ncs.ncs import ncs
    ncs_object = ncs()
    ncs_object.set_unit_ncs()
    mam = map_model_manager(
        map_manager=m,
        ncs_object=ncs_object,
        map_manager_1=m.deep_copy(),
        map_manager_2=m.deep_copy(),
        extra_model_list=[model.deep_copy(),
                          model.deep_copy()],
        extra_model_id_list=["model_1", "model_2"],
        extra_map_manager_list=[m.deep_copy(), m.deep_copy()],
        extra_map_manager_id_list=["extra_1", "extra_2"],
        model=model.deep_copy(),
    )

    # make a map_model_manager with lots of maps and model and ncs and run
    # with wrapping and ignore_symmetry_conflicts on
    from mmtbx.ncs.ncs import ncs
    ncs_object = ncs()
    ncs_object.set_unit_ncs()
    m.set_ncs_object(ncs_object.deep_copy())
    mam2 = map_model_manager(
        map_manager=m.deep_copy(),
        ncs_object=ncs_object.deep_copy(),
        map_manager_1=m.deep_copy(),
        map_manager_2=m.deep_copy(),
        extra_model_list=[model.deep_copy(),
                          model.deep_copy()],
        extra_model_id_list=["model_1", "model_2"],
        extra_map_manager_list=[m.deep_copy(), m.deep_copy()],
        extra_map_manager_id_list=["extra_1", "extra_2"],
        model=model.deep_copy(),
        ignore_symmetry_conflicts=True,
        wrapping=m.wrapping(),
    )
    assert mam.map_manager().is_similar(mam2.map_manager())
    assert mam.map_manager().is_similar(mam2.map_manager_1())
    for m in mam2.map_managers():
        assert mam.map_manager().is_similar(m)
    assert mam.model().shift_cart() == mam2.model().shift_cart()
    assert mam.model().shift_cart() == mam2.get_model_by_id(
        'model_2').shift_cart()

    print("OK")
def tst_01(log = sys.stdout):


  # Check calculations of conversion between rmsd, lddt , and B values
  print("\nChecking conversions between rmsd, lddt and B-values", file = log)
  for maximum_rmsd, minimum_lddt, target_b in [
       (1.5, None, 59.2175263686),
       (None,0.7,59.2175263686),
       (1.5,0.7,59.2175263686),
       (1.0, None, 26.3189006083),
       (None,0.5,293.306328196),]:
    print()
    cutoff_b = get_cutoff_b_value(
      maximum_rmsd,
      minimum_lddt,
      log = log)
    print("maximum_rmsd: %s min lddt %s Cutoff B:  %.2f" %(
     maximum_rmsd, minimum_lddt,
     cutoff_b), file = log)
    assert approx_equal(cutoff_b, target_b)


  # Read in alphafold model and get LDDT from B-value field
  print("\nReading in alphafold model with lddt values in B-value field",
    file = log)

  dm = DataManager()
  dm.set_overwrite(True)
  m = dm.get_model(model_file)
  pae_m = dm.get_model(pae_model_file)
  pae_matrix = parse_pae_file(pae_file)

  lddt_values = m.get_hierarchy().atoms().extract_b().deep_copy()
  print("\nLDDT mean:",lddt_values.min_max_mean().mean)
  assert approx_equal(lddt_values.min_max_mean().mean, 82.5931111111)

  # Multiply lddt_values by 0.01 (fractional)
  fractional_lddt = lddt_values * 0.01

  #  Convert lddt to b
  b_values = get_b_values_from_lddt(lddt_values)
  print("B-value mean:",b_values.min_max_mean().mean)
  assert approx_equal(b_values.min_max_mean().mean, 24.7254093338)

  # Convert  lddt to rmsd
  rmsd_values = get_rmsd_from_lddt(lddt_values)
  print("RMSD mean:",rmsd_values.min_max_mean().mean)
  assert approx_equal(rmsd_values.min_max_mean().mean, 0.93559254135)

  # use process_predicted_model to convert lddt or rmsd to B return with
  #  mark_atoms_to_ignore_with_occ_zero

  print("\nConverting lddt to B values and using mark_atoms_to_ignore_with_occ_zero", file = log)
  params.process_predicted_model.maximum_fraction_close = 0.5
  params.process_predicted_model.b_value_field_is = 'lddt'
  params.process_predicted_model.remove_low_confidence_residues = True
  params.process_predicted_model.maximum_rmsd = 1.5
  params.process_predicted_model.split_model_by_compact_regions = True
  params.process_predicted_model.maximum_domains = 3

  model_info = process_predicted_model(m, params, mark_atoms_to_keep_with_occ_one= True)
  models = model_info.model_list
  for mm,n1,n2 in zip(models,[84,88],[88,84]):
    model_occ_values = mm.get_hierarchy().atoms().extract_occ()
    assert model_occ_values.count(1) == n1
    assert model_occ_values.count(0) == n2

  # use process_predicted_model to convert lddt or rmsd to B

  print("\nConverting lddt to B values", file = log)
  params.process_predicted_model.maximum_fraction_close = 0.5
  params.process_predicted_model.b_value_field_is = 'lddt'
  params.process_predicted_model.remove_low_confidence_residues = False
  params.process_predicted_model.split_model_by_compact_regions = False
  params.process_predicted_model.input_lddt_is_fractional = None

  model_info = process_predicted_model(m, params)
  model = model_info.model
  model_b_values = model.get_hierarchy().atoms().extract_b()
  assert approx_equal(b_values, model_b_values, eps = 0.02) # come back rounded


  print("\nConverting fractional lddt to B values", file = log)
  ph = model.get_hierarchy().deep_copy()
  ph.atoms().set_b(fractional_lddt)
  test_model = model.as_map_model_manager().model_from_hierarchy(ph,
     return_as_model = True)
  params.process_predicted_model.maximum_fraction_close = 0.5
  params.process_predicted_model.b_value_field_is = 'lddt'
  params.process_predicted_model.remove_low_confidence_residues = False
  params.process_predicted_model.split_model_by_compact_regions = False
  params.process_predicted_model.input_lddt_is_fractional = None
  model_info = process_predicted_model(test_model, params)
  model = model_info.model
  model_b_values = model.get_hierarchy().atoms().extract_b()
  assert approx_equal(b_values, model_b_values, eps = 3) # come back very rounded

  ph = model.get_hierarchy().deep_copy()
  ph.atoms().set_b(rmsd_values)
  test_model = model.as_map_model_manager().model_from_hierarchy(ph,
     return_as_model = True)

  print("\nConverting rmsd to B values", file = log)
  params.process_predicted_model.maximum_fraction_close = 0.5
  params.process_predicted_model.b_value_field_is = 'rmsd'
  params.process_predicted_model.remove_low_confidence_residues = False
  params.process_predicted_model.split_model_by_compact_regions = False
  params.process_predicted_model.input_lddt_is_fractional = None
  model_info = process_predicted_model(test_model, params)
  model = model_info.model
  model_b_values = model.get_hierarchy().atoms().extract_b()
  assert approx_equal(b_values, model_b_values, eps = 0.5) # come back rounded

  print("B-values > 59: %s of %s" %(
     (model_b_values > 59).count(True), model_b_values.size()), file = log)

  print("\nConverting rmsd to B values and selecting rmsd < 1.5", file = log)
  params.process_predicted_model.maximum_fraction_close = 0.5
  params.process_predicted_model.b_value_field_is = 'rmsd'
  params.process_predicted_model.remove_low_confidence_residues = True
  params.process_predicted_model.maximum_rmsd = 1.5
  params.process_predicted_model.split_model_by_compact_regions = False
  params.process_predicted_model.input_lddt_is_fractional = None

  model_info = process_predicted_model(test_model, params)
  model = model_info.model
  print("Residues before: %s   After: %s " %(
    test_model.get_hierarchy().overall_counts().n_residues,
    model.get_hierarchy().overall_counts().n_residues,), file = log)

  # Check splitting model into domains
  print("\nSplitting model into domains", file = log)
  model_info = split_model_into_compact_units(model,
      maximum_fraction_close = 0.5, log = log)

  chainid_list = model_info.chainid_list
  print("Segments found: %s" %(" ".join(chainid_list)), file = log)
  assert len(chainid_list) == 2

  # Check processing and splitting model into domains
  print("\nProcessing and splitting model into domains", file = log)

  params.process_predicted_model.maximum_fraction_close = 0.5
  params.process_predicted_model.b_value_field_is = 'lddt'
  params.process_predicted_model.remove_low_confidence_residues = True
  params.process_predicted_model.maximum_rmsd = 1.5
  params.process_predicted_model.split_model_by_compact_regions = True
  params.process_predicted_model.maximum_domains = 3
  model_info = process_predicted_model(m,  params, log = log)

  chainid_list = model_info.chainid_list
  print("Segments found: %s" %(" ".join(chainid_list)), file = log)
  assert len(chainid_list) == 2


  mmm = model_info.model.as_map_model_manager()
  mmm.write_model('model_with_groupings.pdb')
  residue_count = []
  expected_residue_count = [84, 88]
  for chainid in chainid_list:
    selection_string = "chain %s" %(chainid)
    ph = model_info.model.get_hierarchy()
    asc1 = ph.atom_selection_cache()
    sel = asc1.selection(selection_string)
    m1 = model_info.model.select(sel)
    n = m1.get_hierarchy().overall_counts().n_residues
    print("Residues in %s: %s" %(
      selection_string, n),
       file = log)
    residue_count.append(n)
  assert expected_residue_count == residue_count

  # Now process and use pae model and pae model file
  print("\nProcessing and splitting model into domains with pae", file = log)


  params.process_predicted_model.maximum_fraction_close = 0.5
  params.process_predicted_model.b_value_field_is = 'lddt'
  params.process_predicted_model.remove_low_confidence_residues = True
  params.process_predicted_model.maximum_rmsd = 0.7
  params.process_predicted_model.split_model_by_compact_regions = True
  params.process_predicted_model.maximum_domains = 3
  params.process_predicted_model.pae_power= 2
  model_info = process_predicted_model(pae_m,  params, pae_matrix = pae_matrix,
     log = log)
Esempio n. 20
0
    def __init__(self,
                 program_class,
                 custom_process_arguments=None,
                 logger=None,
                 *args,
                 **kwargs):
        '''
    '''
        # program name
        # Order of precedence:
        # 1) ProgramTemplate.program_name
        # 2) LIBTBX_DISPATCHER_NAME
        # 3) Calling command
        if sys.argv:
            self.prog = os.getenv('LIBTBX_DISPATCHER_NAME', sys.argv[0])
        else:
            self.prog = 'unknown.unknown'
        if program_class.program_name is not None:
            self.prog = program_class.program_name
        self.prefix = self.prog.split('.')[-1]

        # PHIL filenames
        self.data_filename = self.prefix + '_data.eff'
        self.modified_filename = self.prefix + '_modified.eff'
        self.all_filename = self.prefix + '_all.eff'

        # terminal width
        self.text_width = 79

        # print header
        border = '-' * self.text_width
        description = border + program_class.description + border
        epilog = border + program_class.epilog
        super(CCTBXParser, self).__init__(
            prog=self.prog,
            description=description,
            epilog=epilog,
            formatter_class=argparse.RawDescriptionHelpFormatter,
            *args,
            **kwargs)

        # default values
        self.program_class = program_class
        self.custom_process_arguments = custom_process_arguments
        self.logger = logger
        if (self.logger is None):
            self.logger = logging.getLogger('main')
        self.data_manager = DataManager(datatypes=program_class.datatypes,
                                        logger=self.logger)

        # add PHIL converters if available
        if (len(program_class.phil_converters) > 0):
            iotbx.phil.default_converter_registry = \
              libtbx.phil.extended_converter_registry(
                additional_converters=program_class.phil_converters,
                base_registry=iotbx.phil.default_converter_registry)

        # set up master and working PHIL scopes
        self.master_phil = iotbx.phil.parse(program_class.master_phil_str,
                                            process_includes=True)
        required_output_phil = iotbx.phil.parse(
            ProgramTemplate.output_phil_str)
        self.master_phil.adopt_scope(required_output_phil)
        self.working_phil = None

        self.add_default_options()
Esempio n. 21
0
def test_01():

    # Source data (map and model)

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_ccp4 = os.path.join(data_dir, 'data', 'non_zero_origin_map.ccp4')
    data_pdb = os.path.join(data_dir, 'data', 'non_zero_origin_model.pdb')

    # Read in map data with data_manager
    dm = DataManager(['real_map'])
    dm.set_overwrite(True)

    # Next step uses map_manager to do the actual reading
    dm.process_real_map_file(data_ccp4)
    mm = dm.get_real_map()

    # Shift the origin of the map; starts at (100,100,100)
    print(mm.map_data().origin())
    assert mm.map_data().origin() == (100, 100, 100)
    assert mm.origin_shift_grid_units == (0, 0, 0)
    mm.shift_origin()
    assert mm.map_data().origin() == (0, 0, 0)
    assert mm.origin_shift_grid_units == (100, 100, 100)
    mm.show_summary()

    # test cc_to_other_map
    assert mm.cc_to_other_map_manager(mm) == 1

    # test writing and reading file
    dm.write_real_map_file(mm,
                           filename='test_map_manager.ccp4',
                           overwrite=True)
    dm.process_real_map_file('test_map_manager.ccp4')
    new_mm = dm.get_real_map('test_map_manager.ccp4')
    os.remove('test_map_manager.ccp4')
    new_mm.shift_origin()
    # Check whether gridding and crystal_symmetry are similar for mm, new_mm
    assert new_mm.is_similar(mm)
    assert approx_equal(new_mm.map_data()[3125], mm.map_data()[3125])

    # test writing and reading file without shifting origin
    dm = DataManager(['real_map'])
    dm.set_overwrite(True)
    dm.process_real_map_file(data_ccp4)
    mm = dm.get_real_map()
    mm.show_summary()
    dm.write_real_map_file(mm,
                           filename='test_map_manager.ccp4',
                           overwrite=True)
    new_mm = map_manager('test_map_manager.ccp4')
    assert (new_mm.is_similar(mm))
    new_mm.shift_origin()
    assert (not new_mm.is_similar(mm))

    # get map_data
    dm = DataManager(['real_map'])
    dm.set_overwrite(True)
    dm.process_real_map_file(data_ccp4)
    mm = dm.get_real_map()
    mm.shift_origin()
    map_data = mm.map_data()
    assert approx_equal(map_data[15, 10, 19], 0.38, eps=0.01)

    # get crystal_symmetry
    cs = mm.crystal_symmetry()
    assert approx_equal(cs.unit_cell().parameters()[0], 22.41, eps=0.01)

    # and full cell symmetry
    full_cs = mm.unit_cell_crystal_symmetry()
    assert approx_equal(full_cs.unit_cell().parameters()[0],
                        149.4066,
                        eps=0.01)

    # write map directly:
    mm.write_map('test_direct.ccp4')

    # read back directly
    new_mm = map_manager('test_direct.ccp4')
    assert (not new_mm.is_similar(mm))

    new_mm.shift_origin()
    assert mm.is_similar(new_mm)
    assert approx_equal(new_mm.map_data()[3125], mm.map_data()[3125])

    # deep_copy
    new_mm = mm.deep_copy()
    assert new_mm.is_similar(mm)
    assert approx_equal(new_mm.map_data()[3125], mm.map_data()[3125])

    # deep_copy a map without shifting origin
    # Make a DataManager that can write a map coeffs file too
    dm = DataManager(['miller_array', 'real_map'])
    dm.set_overwrite(True)
    dm.process_real_map_file(data_ccp4)
    omm = dm.get_real_map()
    omm.show_summary()
    new_omm = omm.deep_copy()
    assert new_omm.is_similar(omm)
    assert (not new_omm.is_similar(mm))

    # customized_copy
    new_mm = mm.customized_copy(map_data=mm.map_data().deep_copy())
    assert new_mm.is_similar(mm)

    # Initialize with parameters
    mm_para = map_manager(
        unit_cell_grid=mm.unit_cell_grid,
        unit_cell_crystal_symmetry=mm.unit_cell_crystal_symmetry(),
        origin_shift_grid_units=mm.origin_shift_grid_units,
        map_data=mm.map_data(),
        wrapping=False)
    assert mm_para.is_similar(mm)

    # Adjust origin and gridding:
    mm_read = map_manager(data_ccp4)
    mm_read.shift_origin()
    mm.show_summary()
    mm_read.show_summary()
    mm_read.set_original_origin_and_gridding((10, 10, 10),
                                             gridding=(100, 100, 100))
    mm_read.show_summary()
    assert (not mm_read.is_similar(mm))
    assert (mm_read.origin_is_zero())

    # Set program name
    mm_read.set_program_name('test program')
    assert mm_read.program_name == 'test program'

    # Set limitation
    mm_read.add_limitation('map_is_sharpened')
    assert mm_read.limitations == ['map_is_sharpened']

    # Add a label
    mm_read.add_label('TEST LABEL')
    assert mm_read.labels[0] == 'TEST LABEL'
    mm_read.write_map('map_with_labels.mrc')
    new_mm = map_manager('map_with_labels.mrc')
    assert 'TEST LABEL' in new_mm.labels
    assert new_mm.is_in_limitations('map_is_sharpened')
    assert new_mm.labels[0].find('test program') > -1

    # change the cell dimensions
    mm_read = map_manager(data_ccp4)
    mm_read.shift_origin()
    assert mm_read.is_similar(mm)
    assert approx_equal(mm_read.pixel_sizes(), (0.7470, 0.7231, 0.7374),
                        eps=0.001)
    from cctbx import crystal
    new_uc_params = list(
        mm_read.unit_cell_crystal_symmetry().unit_cell().parameters())
    new_uc_params[0] += 10
    new_cs = crystal.symmetry(new_uc_params, 1)
    mm_read.set_unit_cell_crystal_symmetry(new_cs)
    assert not mm_read.crystal_symmetry().is_similar_symmetry(
        mm.crystal_symmetry())
    assert not mm_read.is_similar(mm)
    mm_read.show_summary()
    assert approx_equal(mm_read.pixel_sizes(), (0.7970, 0.7231, 0.7374),
                        eps=0.001)

    # Read a map directly
    mm_read = map_manager(data_ccp4)
    mm_read.shift_origin()
    assert mm_read.is_similar(mm)

    # Set log
    import sys
    mm.set_log(sys.stdout)

    # Add map_data
    new_mm = mm_read.customized_copy(map_data=mm.map_data().deep_copy())
    assert new_mm.is_similar(mm)

    # replace data
    new_mm.set_map_data(map_data=mm.map_data().deep_copy())
    assert new_mm.is_similar(mm)

    # create a full-sized map from this one
    mm_full_size = mm_read.deep_copy().as_full_size_map()
    assert not mm_full_size.is_similar(mm_read)
    print(mm_full_size.map_data().origin(), mm_read.map_data().origin())
    print(mm_full_size.map_data().all(), mm_read.map_data().all())

    # Apply a mask to edges of a map
    assert approx_equal(new_mm.map_data().as_1d().min_max_mean().max,
                        mm.map_data().as_1d().min_max_mean().max)
    assert approx_equal((new_mm.map_data()[0], mm.map_data()[0]), (0.0, 0.0))
    new_mm.create_mask_around_edges(soft_mask_radius=3)
    new_mm.soft_mask(soft_mask_radius=3)
    assert approx_equal(new_mm.map_data().as_1d().min_max_mean().max,
                        mm.map_data().as_1d().min_max_mean().max)
    new_mm.apply_mask(set_outside_to_mean_inside=True)
    assert approx_equal((new_mm.map_data()[0], mm.map_data()[0]),
                        (0.0116267086024, 0.0))

    dm.process_real_map_file('test_map_manager.ccp4')
    new_mm = dm.get_real_map('test_map_manager.ccp4')
    new_mm.show_summary()
    assert (not new_mm.is_similar(mm))
    new_mm.shift_origin()
    new_mm.show_summary()
    assert new_mm.is_similar(mm)
    os.remove('test_map_manager.ccp4')

    # Check origin_shifts
    print(new_mm.origin_shift_grid_units)
    print(new_mm.shift_cart())
    assert approx_equal(new_mm.origin_shift_grid_units, (100, 100, 100))
    assert approx_equal(
        new_mm.shift_cart(),
        (-74.70333099365234, -72.30750274658205, -73.7437515258789))

    # Convert to map coeffs, write out, read back, convert back to map

    map_coeffs = mm.map_as_fourier_coefficients(d_min=3)
    mtz_dataset = map_coeffs.as_mtz_dataset(column_root_label='F')
    mtz_object = mtz_dataset.mtz_object()
    dm.write_miller_array_file(mtz_object, filename="map_coeffs.mtz")
    # Note these Fourier coeffs correspond to working map (not original position)

    array_labels = dm.get_miller_array_labels("map_coeffs.mtz")
    labels = array_labels[0]
    dm.get_reflection_file_server(filenames=["map_coeffs.mtz"],
                                  labels=[labels])
    miller_arrays = dm.get_miller_arrays()
    new_map_coeffs = miller_arrays[0]
    mm_from_map_coeffs = mm.fourier_coefficients_as_map_manager(
        map_coeffs=new_map_coeffs)

    assert mm_from_map_coeffs.is_similar(mm)

    # Find map symmetry in a map
    data_d7 = os.path.join(data_dir, 'data', 'D7.ccp4')
    dm = DataManager(['real_map', 'model'])
    dm.process_real_map_file(data_d7)
    dm.process_model_file(data_pdb)
    mm = dm.get_real_map(data_d7)
    model = dm.get_model(data_pdb)
    mm.shift_origin()
    mm.set_original_origin_and_gridding(original_origin=(0, 0, 0))

    # Box it so it is not so easy to find symmetry
    from cctbx.maptbx.box import with_bounds
    box = with_bounds(mm, lower_bounds=(2, 2, 2), upper_bounds=(43, 43, 43))
    new_mm = box.map_manager()
    new_mm.find_map_symmetry(symmetry='d7',
                             min_ncs_cc=0.8,
                             include_helical_symmetry=False)
    ncs_obj = new_mm.ncs_object()
    assert ncs_obj is not None
    print("NCS: ", new_mm.ncs_object().as_ncs_spec_string())
    another_mm = map_manager(
        unit_cell_grid=new_mm.unit_cell_grid,
        unit_cell_crystal_symmetry=new_mm.unit_cell_crystal_symmetry(),
        origin_shift_grid_units=new_mm.origin_shift_grid_units,
        map_data=new_mm.map_data(),
        ncs_object=ncs_obj,
        wrapping=False)
    assert another_mm.is_similar(new_mm)
    assert ncs_obj.is_similar_ncs_object(another_mm.ncs_object())
    assert new_mm.is_similar(another_mm)

    # Get resolution
    assert approx_equal(new_mm.resolution(force=True, method='d99'),
                        3.4663129793)
    assert approx_equal(new_mm.resolution(force=True, method='d_min'),
                        0.888888888889)
    assert approx_equal(new_mm.resolution(force=True, method='d9'),
                        0.888888888889)
    assert approx_equal(new_mm.resolution(force=True, method='d99'),
                        3.4663129793)
    assert approx_equal(new_mm.resolution(), 3.4663129793)

    # Adjust model and ncs symmetry to match this map
    assert model.shift_cart() is None
    new_mm.set_model_symmetries_and_shift_cart_to_match_map(model)
    assert approx_equal(
        model.shift_cart(),
        (-0.888888888888889, -0.8888888888888891, -0.888888888888889))

    assert new_mm.is_compatible_ncs_object(ncs_obj)
    ncs_obj.set_shift_cart((0, 0, 0))
    assert not new_mm.is_compatible_ncs_object(ncs_obj)

    new_mm.set_ncs_object_shift_cart_to_match_map(ncs_obj)
    new_mm.set_ncs_object(ncs_obj)
    assert new_mm.is_compatible_ncs_object(new_mm.ncs_object())
    new_mm.show_summary()

    new_mm.shift_origin(desired_origin=(11, 1, 1))
    print(new_mm.shift_cart(), new_mm.ncs_object().shift_cart())
    assert new_mm.is_compatible_ncs_object(new_mm.ncs_object())
    new_mm.shift_origin()
    assert new_mm.is_compatible_ncs_object(new_mm.ncs_object())

    # filter a map
    dm = DataManager()
    mm = dm.get_real_map(data_d7)

    low_pass_filtered = mm.deep_copy()
    low_pass_filtered.resolution_filter(d_min=2.5)

    high_pass_filtered = mm.deep_copy()
    high_pass_filtered.resolution_filter(d_max=2.5)

    gaussian = mm.deep_copy()
    gaussian.gaussian_filter(smoothing_radius=1)

    binary = mm.deep_copy()
    binary.binary_filter(threshold=0.5)

    assert approx_equal(
        (mm.map_data().as_1d()[1073],
         low_pass_filtered.map_data().as_1d()[1073],
         high_pass_filtered.map_data().as_1d()[1073],
         gaussian.map_data().as_1d()[1073], binary.map_data().as_1d()[1073]),
        (0.0171344596893, 0.0227163900537, -0.0072717454565, 0.0149086679298,
         0.0))

    info = mm.get_density_along_line((5, 5, 5), (10, 10, 10))
    assert approx_equal([info.along_density_values[4]] +
                        list(info.along_sites[4]),
                        [-0.562231123447, 8.0, 8.0, 8.0])
    from iotbx.map_model_manager import map_model_manager
    extra_map_manager_id_list = [
        "low_pass_filtered", "high_pass_filtered", "gaussian", "binary"
    ]

    expected_cc = [
        0.999920243317, 0.0129365545729, 0.971491994253, 0.733986499746
    ]
    mam = map_model_manager(
        map_manager=mm,
        extra_map_manager_list=[
            low_pass_filtered, high_pass_filtered, gaussian, binary
        ],
        extra_map_manager_id_list=extra_map_manager_id_list,
    )
    for other_id, cc in zip(extra_map_manager_id_list, expected_cc):
        assert approx_equal(
            cc, mam.map_map_cc(map_id='map_manager', other_map_id=other_id))
def test_data_manager():
    a = DataManager(['model'])

    a.add_model('a', 'b')
    a.add_model('c', 'd')
    assert (a.get_model() == 'b')
    assert (a.get_model('a') == 'b')
    assert (a.get_model('c') == 'd')
    assert (a.get_model_names() == ['a', 'c'])

    assert (a.has_models())
    assert (a.has_models(exact_count=True, expected_n=2))
    assert (not a.has_models(expected_n=3, raise_sorry=False))

    # exporting phil
    working_phil = a.export_phil_scope()
    assert (len(working_phil.extract().data_manager.model) == 2)

    # data tracking
    try:
        a.has_models(expected_n=3, raise_sorry=True)
    except Sorry:
        pass

    try:
        a.has_models(exact_count=True, raise_sorry=True)
    except Sorry:
        pass

    a.set_default_model('c')
    assert (a.get_model() == 'd')

    assert ((a.get_model_names() == ['a', 'c'])
            or (a.get_model_names() == ['c', 'a']))

    a.remove_model('c')
    try:
        a.get_model()
    except Sorry:
        pass
    try:
        a.get_model('missing')
    except Sorry:
        pass
    try:
        a.set_default_model('missing')
    except Sorry:
        pass

    a = DataManager(datatypes=['sequence', 'phil'])
    assert (a.get_sequence_names() == [])
    assert (not hasattr(a, 'get_model'))

    # phil functions
    test_phil_str = '''
data_manager {
  phil_files = data_manager_test.eff
}
'''
    with open('data_manager_test.eff', 'w') as f:
        f.write(test_phil_str)

    # loading file with get function
    assert (len(a.get_phil_names()) == 0)
    p = a.get_phil('data_manager_test.eff')
    assert (type(p) == libtbx.phil.scope)
    assert ('data_manager_test.eff' in a.get_phil_names())

    # loading file with phil
    a = DataManager(datatypes=['phil'])
    test_phil = iotbx.phil.parse(test_phil_str)
    a.load_phil_scope(test_phil)

    assert ('data_manager_test.eff' in a.get_phil_names())
    assert (a.get_default_phil_name() == 'data_manager_test.eff')

    os.remove('data_manager_test.eff')

    # writing
    a = DataManager(datatypes=['model', 'phil', 'sequence'])
    a.add_model('a', 'b')
    a.add_phil('c', 'd')
    a.add_sequence('e', 'f')

    a.write_model_file(a.get_model(), filename='a.dat', overwrite=True)
    a.write_phil_file(a.get_phil(), filename='c.dat', overwrite=True)
    a.write_sequence_file(a.get_sequence(), filename='e.dat', overwrite=True)

    with open('a.dat', 'r') as f:
        lines = f.readlines()
    assert (lines[0] == 'b')

    os.remove('a.dat')
    os.remove('c.dat')
    os.remove('e.dat')
Esempio n. 23
0
def RunProbeTests(inFileName):

    #========================================================================
    # Call the test functions for the libraries we test.

    ret = probeext.DotSpheres_test()
    assert len(ret) == 0, "DotSpheres_test() failed: " + ret

    ret = probeext.SpatialQuery_test()
    assert len(ret) == 0, "SpatialQuery_test() failed: " + ret

    ret = probeext.Scoring_test()
    assert len(ret) == 0, "Scoring_test() failed: " + ret

    AtomTypes.Test()
    Helpers.Test()

    #========================================================================
    # Now ensure that we can use the C++-wrapped classes as intended to make sure
    # that the wrapping code or parameters have not changed.

    #========================================================================
    # Make sure we can get at the DotSphere objects and their methods
    cache = probeext.DotSphereCache(10)
    sphere1 = cache.get_sphere(1)
    dots = sphere1.dots()

    #========================================================================
    # Make sure we can fill in an ExtraAtomInfoList and pass it to scoring
    # Generate an example data model with a small molecule in it unless we've
    # been given a file name to open.
    if inFileName is not None and len(inFileName) > 0:
        # Read a model from a file using the DataManager
        dm = DataManager()
        dm.process_model_file(inFileName)
        model = dm.get_model(inFileName)
    else:
        # Generate a small-molecule model using the map model manager
        mmm = map_model_manager(
        )  #   get an initialized instance of the map_model_manager
        mmm.generate_map(
        )  #   get a model from a generated small library model and calculate a map for it
        model = mmm.model()  #   get the model

    # Fix up bogus unit cell when it occurs by checking crystal symmetry.
    cs = model.crystal_symmetry()
    if (cs is None) or (cs.unit_cell() is None):
        model = shift_and_box_model(model=model)

    # Get the list of all atoms in the model
    atoms = model.get_atoms()

    # Get the bonding information we'll need to exclude our bonded neighbors.
    try:
        p = mmtbx.model.manager.get_default_pdb_interpretation_params()
        model.process(make_restraints=True,
                      pdb_interpretation_params=p)  # make restraints
        geometry = model.get_restraints_manager().geometry
        sites_cart = model.get_sites_cart()  # cartesian coordinates
        bond_proxies_simple, asu = \
            geometry.get_all_bond_proxies(sites_cart = sites_cart)
    except Exception as e:
        raise Exception("Could not get bonding information for input file: " +
                        str(e))
    bondedNeighbors = Helpers.getBondedNeighborLists(atoms,
                                                     bond_proxies_simple)

    # Traverse the hierarchy and look up the extra data to be filled in.
    class philLike:
        def __init__(self, useImplicitHydrogenDistances=False):
            self.implicit_hydrogens = useImplicitHydrogenDistances
            self.set_polar_hydrogen_radius = True

    ret = Helpers.getExtraAtomInfo(model,
                                   bondedNeighbors,
                                   useNeutronDistances=False,
                                   probePhil=philLike(False))
    extra = ret.extraAtomInfo

    # Construct a SpatialQuery and fill in the atoms.  Ensure that we can make a
    # query within 1000 Angstroms of the origin.
    sq = probeext.SpatialQuery(atoms)
    nb = sq.neighbors((0, 0, 0), 0, 1000)

    # Construct a DotScorer object.
    # Find the radius of each atom in the structure and construct dot spheres for
    # them. Find the atoms that are bonded to them and add them to an excluded list.
    # Then compute the score for each of them and report the summed score over the
    # whole molecule the way that Reduce will.
    ds = probeext.DotScorer(extra)
    total = 0
    badBumpTotal = 0
    for a in atoms:
        rad = extra.getMappingFor(a).vdwRadius
        assert rad > 0, "Invalid radius for atom look-up: " + a.name + " rad = " + str(
            rad)
        sphere = cache.get_sphere(rad)

        # Excluded atoms that are bonded to me or to one of my neightbors.
        # It has the side effect of excluding myself if I have any neighbors.
        # Construct as a set to avoid duplicates.
        exclude = set()
        for n in bondedNeighbors[a]:
            exclude.add(n)
            for n2 in bondedNeighbors[n]:
                exclude.add(n2)
        exclude = list(exclude)

        dots = sphere.dots()
        res = ds.score_dots(a, 1.0, sq, rad * 3, 0.25, exclude, sphere.dots(),
                            sphere.density(), False)
        total += res.totalScore()
        if res.hasBadBump:
            badBumpTotal += 1

    # Test calling the single-dot checking code as will be used by Probe to make sure
    # all of the Python linkage is working
    dotOffset = [1, 0, 0]
    check = ds.check_dot(atoms[0], dotOffset, 1, atoms, [atoms[0]])
    overlapType = check.overlapType

    # Test calling the interaction_type method to be sure Python linkage is working
    interactionType = ds.interaction_type(check.overlapType, check.gap)

    #========================================================================
    # Regression test a Probe2 run against a snippet of a file, comparing the output
    # to the output generated by a previous version of the program.  If there are
    # differences, report that this is the case and recommend verifying that the
    # differences are intentional and replacing the stored output.
    data_dir = libtbx.env.under_dist(module_name="mmtbx",
                                     path=os.path.join("regression", "pdbs"),
                                     test=os.path.isdir)
    model_file = os.path.join(data_dir, 'Fe_1brf_snip_reduced.pdb')
    kin_dir = libtbx.env.under_dist(module_name="mmtbx",
                                    path=os.path.join("regression", "kins"),
                                    test=os.path.isdir)
    kin_file = os.path.join(kin_dir, 'Fe_1brf_snip_reduced.kin')
    temp_file = os.path.join(tempfile._get_default_tempdir(),
                             next(tempfile._get_candidate_names()) + ".kin")
    try:
        my_env = os.environ
        exe_name = 'mmtbx.probe2'
        if platform.system() == 'Windows':
            exe_name += '.bat'
        if subprocess.check_call([
                exe_name, 'source_selection="all"', 'approach=self',
                'output.separate_worse_clashes=True', 'output.file_name=' +
                temp_file, 'include_mainchain_mainchain=True',
                'output.add_kinemage_keyword=True', model_file
        ],
                                 env=my_env,
                                 stdout=subprocess.DEVNULL,
                                 stderr=subprocess.DEVNULL):
            raise Exception(
                "Call to subprocess to regression test had nonzero return")
    except Exception as e:
        raise Exception("Could not call subprocess to do regression test: " +
                        str(e))
    with open(temp_file) as ft:
        ft_text = ft.readlines()
    with open(kin_file) as fk:
        fk_text = fk.readlines()
    instructions = (
        "  Use KiNG or another program to see what changed and then determine if the "
        + "differences are expected.  If so, replace " + kin_file +
        " with the new file.")
    if len(ft_text) != len(fk_text):
        raise Exception("Different number of lines in " + temp_file + " and " +
                        kin_file + instructions)
    for i in range(3, len(ft_text)):
        if ft_text[i] != fk_text[i]:
            print('Line', i, 'from each file:')
            print(ft_text[i])
            print(fk_text[i])
            raise Exception("Line " + str(i) + " in " + temp_file + " and " +
                            kin_file + "differ." + instructions)