예제 #1
0
def loadModel(filename):
    dm = DataManager()  #   Initialize the DataManager and call it dm
    dm.set_overwrite(
        True)  #   tell the DataManager to overwrite files with the same name
    #print("Reading file")
    manager = dm.get_model(filename)
    return manager
예제 #2
0
    def __init__(self,
                 program_class,
                 custom_process_arguments=None,
                 logger=None,
                 *args,
                 **kwargs):
        '''
    '''
        # program name
        self.prog = os.getenv('LIBTBX_DISPATCHER_NAME')
        if (self.prog is None):
            self.prog = sys.argv[0]
        self.prefix = self.prog.split('.')[-1]

        # PHIL filenames
        self.data_filename = self.prefix + '_data.eff'
        self.modified_filename = self.prefix + '_modified.eff'
        self.all_filename = self.prefix + '_all.eff'

        # terminal width
        self.text_width = 79

        # print header
        border = '-' * self.text_width
        description = border + program_class.description + border
        epilog = border + program_class.epilog
        super(CCTBXParser, self).__init__(
            prog=self.prog,
            description=description,
            epilog=epilog,
            formatter_class=argparse.RawDescriptionHelpFormatter,
            *args,
            **kwargs)

        # default values
        self.program_class = program_class
        self.custom_process_arguments = custom_process_arguments
        self.logger = logger
        if (self.logger is None):
            self.logger = logging.getLogger('main')
        self.data_manager = DataManager(datatypes=program_class.datatypes,
                                        logger=self.logger)

        # add PHIL converters if available
        if (len(program_class.phil_converters) > 0):
            iotbx.phil.default_converter_registry = \
              libtbx.phil.extended_converter_registry(
                additional_converters=program_class.phil_converters,
                base_registry=iotbx.phil.default_converter_registry)

        # set up master and working PHIL scopes
        self.master_phil = iotbx.phil.parse(program_class.master_phil_str,
                                            process_includes=True)
        required_output_phil = iotbx.phil.parse(
            ProgramTemplate.output_phil_str)
        self.master_phil.adopt_scope(required_output_phil)
        self.working_phil = None

        self.add_default_options()
예제 #3
0
def test_02():

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_d7 = os.path.join(data_dir, 'data', 'D7.ccp4')

    # find_separated atoms in a map
    dm = DataManager()
    mm = dm.get_real_map(data_d7)
    sites_cart = mm.trace_atoms_in_map(dist_min=1, n_atoms=10)
    assert sites_cart.size() == 10  # Note: zero if not available
예제 #4
0
def loadModel(filename):
    dm = DataManager()  #   Initialize the DataManager and call it dm
    dm.set_overwrite(
        True)  #   tell the DataManager to overwrite files with the same name
    #print("Reading file")
    model = dm.get_model(filename)
    #print("Processing model")
    #model.process_input_model(make_restraints=True)
    # removed because Restraints Manager will not operate
    # on unfamiliar residues  KPB 6/10/2021
    return model
예제 #5
0
def run(args, log=sys.stdout):
  print("-"*79, file=log)
  print(legend, file=log)
  print("-"*79, file=log)
  inputs = mmtbx.utils.process_command_line_args(args = args,
    master_params = master_params(),
    suppress_symmetry_related_errors = True)
  params = inputs.params.extract()
  # model
  broadcast(m="Input PDB:", log=log)
  file_names = inputs.pdb_file_names
  if(len(file_names) != 1): raise Sorry("PDB file has to given.")
  from iotbx.data_manager import DataManager
  dm = DataManager()
  dm.set_overwrite(True)
  model = dm.get_model(file_names[0])

  # map
  broadcast(m="Input map:", log=log)
  if(inputs.ccp4_map is None): raise Sorry("Map file has to given.")

  from iotbx.map_model_manager import map_model_manager
  mam = map_model_manager(model = model, map_manager = inputs.ccp4_map,
     wrapping = params.wrapping,
     ignore_symmetry_conflicts = params.ignore_symmetry_conflicts)

  mam.model().setup_scattering_dictionaries(
     scattering_table=params.scattering_table)
  mam.model().get_xray_structure().show_summary(f=log, prefix="  ")
  inputs.ccp4_map.show_summary(prefix="  ")

  # estimate resolution
  d_min = params.resolution
  if(d_min is None):
    raise Sorry("Map resolution must be given.")
  print("  d_min: %6.4f"%d_min, file=log)
  #
  result_obj = compdiff(
    map_data_obs = mam.map_manager().map_data(), # NOTE this will always wrap map
    xrs          = mam.model().get_xray_structure(),
    d_min        = d_min,
    vector_map   = False)

  output_map_manager=mam.map_manager().customized_copy(
      map_data=result_obj.map_result)
  dm.write_real_map_file(output_map_manager, "map_model_difference_1.ccp4")

  #
  result_obj = compdiff(
    map_data_obs = mam.map_manager().map_data(),
    xrs          = mam.model().get_xray_structure(),
    d_min        = d_min,
    vector_map   = True)

  output_map_manager=mam.map_manager().customized_copy(
      map_data=result_obj.map_result)
  dm.write_real_map_file(output_map_manager, "map_model_difference_2.ccp4")
예제 #6
0
def read_map_and_model(file_name_1, file_name_2):
    '''
    Identify which file is map and which is model, read in and
    create map_model_manager
  '''

    map_file_name = None
    model_file_name = None
    for f in [file_name_1, file_name_2]:
        for ending in ['.ccp4', '.mrc', '.map']:
            if f.endswith(ending):
                map_file_name = f
        for ending in ['.pdb', '.cif']:
            if f.endswith(ending):
                model_file_name = f
    if not map_file_name or not model_file_name:
        raise Sorry("Unable to identify map and model from %s and %s" %
                    (file_name_1, file_name_2))

    from iotbx.data_manager import DataManager
    from iotbx.map_model_manager import map_model_manager
    dm = DataManager()
    dm.process_real_map_file(map_file_name)
    mm = dm.get_real_map(map_file_name)

    dm.process_model_file(model_file_name)
    model = dm.get_model(model_file_name)
    mam = map_model_manager(model=model, map_manager=mm)
    return mam
def test_default_filenames():
    datatypes = [
        'model', 'ncs_spec', 'phil', 'real_map', 'restraint', 'sequence'
    ]
    extensions = ['cif', 'ncs_spec', 'eff', 'mrc', 'cif', 'seq']
    dm = DataManager(datatypes)
    for datatype, extension in zip(datatypes, extensions):
        filename = getattr(
            dm, 'get_default_output_{datatype}_filename'.format(
                datatype=datatype))()
        assert filename == 'cctbx_program.' + extension

    filename = dm.get_default_output_model_filename(extension='.abc')
    assert filename == 'cctbx_program.abc'
예제 #8
0
def exercise_ss_creation_crash():
  pdb_str = """
CRYST1  145.350  135.090  157.320  90.00  90.00  90.00 P 1
SCALE1      0.006880  0.000000  0.000000        0.00000
SCALE2      0.000000  0.007402  0.000000        0.00000
SCALE3      0.000000  0.000000  0.006356        0.00000
ATOM      1  N   ASN A   1      47.095 160.279  31.220  1.00 30.00           N
ATOM      2  CA  ASN A   1      65.985 120.233  34.727  1.00 30.00           C
ATOM      3  C   ASN A   1      56.657 138.700  33.374  1.00 30.00           C
ATOM      4  O   ASN A   1      56.353 138.977  34.561  1.00 30.00           O
ATOM      5  CB  ASN A   1      65.238 120.133  36.068  1.00 30.00           C
ATOM      6  CG  ASN A   1      66.087 119.360  37.057  1.00 30.00           C
ATOM      7  OD1 ASN A   1      65.746 118.217  37.441  1.00 30.00           O
ATOM      8  ND2 ASN A   1      67.240 119.920  37.395  1.00 30.00           N
ATOM      9  N   ASN A   2      56.939 137.441  33.021  1.00 30.00           N
ATOM     10  CA  ASN A   2      67.135 117.384  35.354  1.00 30.00           C
ATOM     11  C   ASN A   2      74.935 104.398  35.546  1.00 30.00           C
ATOM     12  O   ASN A   2      74.423 104.166  34.444  1.00 30.00           O
ATOM     13  CB  ASN A   2      65.828 116.703  35.809  1.00 30.00           C
ATOM     14  CG  ASN A   2      66.092 115.518  36.718  1.00 30.00           C
ATOM     15  OD1 ASN A   2      66.641 114.515  36.266  1.00 30.00           O
ATOM     16  ND2 ASN A   2      65.744 115.556  38.000  1.00 30.00           N
ATOM     17  N   ASN A   3      76.102 103.886  35.920  1.00 30.00           N
ATOM     18  CA  ASN A   3      68.960 115.076  35.163  1.00 30.00           C
ATOM     19  C   ASN A   3      86.047  90.376  35.591  1.00 30.00           C
ATOM     20  O   ASN A   3      87.134  90.903  35.535  1.00 30.00           O
ATOM     21  CB  ASN A   3      70.251 115.882  34.903  1.00 30.00           C
ATOM     22  CG  ASN A   3      71.023 116.208  36.192  1.00 30.00           C
ATOM     23  OD1 ASN A   3      70.637 117.096  36.957  1.00 30.00           O
ATOM     24  ND2 ASN A   3      72.106 115.481  36.436  1.00 30.00           N
ATOM     25  OXT ASN A   3      85.912  89.104  36.045  1.00 30.00           O
TER
END


"""
  with open("exercise_ss_creation_crash_model.pdb","w") as fo:
    fo.write(pdb_str)
  from iotbx.data_manager import DataManager
  dm=DataManager()
  params = mmtbx.model.manager.get_default_pdb_interpretation_params()
  params.pdb_interpretation.secondary_structure.enabled=True
  model = dm.get_model('exercise_ss_creation_crash_model.pdb')
  model.set_pdb_interpretation_params(params)
  model.process_input_model(make_restraints=True)
예제 #9
0
def run(prefix="tst_00_mmtbx_building_ligands"):
  # Ligand file
  with open("%s.pdb"%prefix,"w") as fo:
    fo.write(pdb_str)
  # Read map and model
  from iotbx.data_manager import DataManager
  dm = DataManager()
  map_file = libtbx.env.find_in_repositories(
    relative_path="mmtbx/building/ligands/tst_00_mmtbx_building_ligands.map",
    test=os.path.isfile)
  mm = dm.get_real_map(map_file)
  model = dm.get_model("%s.pdb"%prefix)
  model.set_crystal_symmetry(mm.crystal_symmetry())
  model.process(make_restraints=True)
  # Create map_model_manager
  mmm = iotbx.map_model_manager.map_model_manager(map_manager=mm, model=model)
  # Build ligand
  o = ligands.lifi.run(map_model_manager = mmm, d_min = 2.5)
예제 #10
0
def read_model(filename):
    from iotbx.data_manager import DataManager
    dm = DataManager()
    dm.set_overwrite(True)
    dm.process_model_file(filename)
    model = dm.get_model(filename)
    model.add_crystal_symmetry_if_necessary()
    return model
예제 #11
0
def read_map_and_model(file_name_1,
                       file_name_2,
                       regression_directory=None,
                       prefix=None):
    '''
    Identify which file is map and which is model, read in and
    create map_model_manager
    If regression_directory is specified, look there for these files, assuming
    prefix of $PHENIX/modules/phenix_regression/
  '''

    if regression_directory and not prefix:
        import libtbx.load_env
        prefix = libtbx.env.under_dist(module_name="phenix_regression",
                                       path=regression_directory,
                                       test=os.path.isdir)

    if prefix:
        file_name_1 = os.path.join(prefix, file_name_1)
        file_name_2 = os.path.join(prefix, file_name_2)

    map_file_name = None
    model_file_name = None
    for f in [file_name_1, file_name_2]:
        for ending in ['.ccp4', '.mrc', '.map']:
            if f.endswith(ending):
                map_file_name = f
        for ending in ['.pdb', '.cif']:
            if f.endswith(ending):
                model_file_name = f
    if not map_file_name or not model_file_name:
        raise Sorry("Unable to guess map and model from %s and %s" %
                    (file_name_1, file_name_2))

    from iotbx.data_manager import DataManager
    from iotbx.map_model_manager import map_model_manager
    dm = DataManager()
    dm.process_real_map_file(map_file_name)
    mm = dm.get_real_map(map_file_name)

    dm.process_model_file(model_file_name)
    model = dm.get_model(model_file_name)
    mam = map_model_manager(model=model, map_manager=mm)
    return mam
예제 #12
0
def run(args):
    show_citation()
    if (len(args) != 1):
        raise Sorry("Need to provide CCP4 formatted map file.")
    # map
    dm = DataManager()
    dm.set_overwrite(True)
    map_manager = dm.get_real_map(args[0])
    map_manager.shift_origin()

    cs = map_manager.crystal_symmetry()
    m = map_manager.map_data().as_double()
    # show general statistics
    show_overall_statistics(m=m, header="Map basic info (%s):" % args[0])
    # HE
    m_he = maptbx.volume_scale(map=m, n_bins=10000).map_data()
    show_overall_statistics(m=m_he, header="Rank-scaled (HE) map info:")
    #
    file_name = args[0] + "_rank_scaled.ccp4"
    he_map_manager = map_manager.customized_copy(map_data=m_he)
    he_map_manager.add_label("Histogram-equalized map")
    dm.write_real_map_file(he_map_manager, file_name)
예제 #13
0
def test_sequence_datatype():

    # 1sar.fa
    seq_filename = 'test_seq.fa'
    seq_str = '''>1SAR A
DVSGTVCLSALPPEATDTLNLIASDGPFPYSQDGVVFQNRESVLPTQSYGYYHEYTVITPGARTRGTRRIICGEATQEDY
YTGDHYATFSLIDQTC
'''

    with open(seq_filename, 'w') as f:
        f.write(seq_str)

    dm = DataManager(['sequence'])
    dm.process_sequence_file(seq_filename)
    assert seq_filename in dm.get_sequence_names()

    seq = dm.get_sequence()
    new_str = dm.get_sequence_as_string(seq_filename)
    for a, b in zip(new_str, seq_str):
        assert a == b

    os.remove(seq_filename)
예제 #14
0
def test_model_and_restraint():

    # from 3tpj
    model_str = '''
CRYST1  104.428  128.690   76.662  90.00  90.00  90.00 C 2 2 21
ATOM   5877  O   URE A 403     -37.796 -38.296   5.693  1.00 15.43           O
ATOM   5878  C   URE A 403     -36.624 -38.509   5.800  1.00 20.53           C
ATOM   5879  N2  URE A 403     -36.191 -39.836   6.120  1.00 27.82           N
ATOM   5880  N1  URE A 403     -35.679 -37.450   5.644  1.00 21.36           N
ATOM   5881 HN11 URE A 403     -34.792 -37.617   5.732  1.00 25.63           H
ATOM   5882 HN12 URE A 403     -35.965 -36.613   5.445  1.00 25.63           H
ATOM   5883 HN21 URE A 403     -35.307 -40.015   6.211  1.00 33.38           H
ATOM   5884 HN22 URE A 403     -36.801 -40.499   6.221  1.00 33.38           H
'''

    restraint_str = '''
#
data_comp_list
loop_
_chem_comp.id
_chem_comp.three_letter_code
_chem_comp.name
_chem_comp.group
_chem_comp.number_atoms_all
_chem_comp.number_atoms_nh
_chem_comp.desc_level
URE URE Unknown                   ligand 8 4 .
#
data_comp_URE
#
loop_
_chem_comp_atom.comp_id
_chem_comp_atom.atom_id
_chem_comp_atom.type_symbol
_chem_comp_atom.type_energy
_chem_comp_atom.partial_charge
_chem_comp_atom.x
_chem_comp_atom.y
_chem_comp_atom.z
URE        C       C   C     .          0.4968   -0.0000   -0.0000
URE        O       O   O     .          1.7184   -0.0000   -0.0000
URE        N1      N   NH2   .         -0.2180   -0.0000    1.2381
URE        N2      N   NH2   .         -0.2180    0.0000   -1.2381
URE        HN11    H   HNH2  .          0.2355   -0.0000    2.0237
URE        HN12    H   HNH2  .         -1.1251    0.0000    1.2382
URE        HN21    H   HNH2  .          0.2355    0.0000   -2.0237
URE        HN22    H   HNH2  .         -1.1251   -0.0000   -1.2382
#
loop_
_chem_comp_bond.comp_id
_chem_comp_bond.atom_id_1
_chem_comp_bond.atom_id_2
_chem_comp_bond.type
_chem_comp_bond.value_dist
_chem_comp_bond.value_dist_esd
URE  C       O      double        1.222 0.020
URE  C       N1     single        1.430 0.020
URE  C       N2     single        1.430 0.020
URE  N1      HN11   single        0.907 0.020
URE  N1      HN12   single        0.907 0.020
URE  N2      HN21   single        0.907 0.020
URE  N2      HN22   single        0.907 0.020
#
loop_
_chem_comp_angle.comp_id
_chem_comp_angle.atom_id_1
_chem_comp_angle.atom_id_2
_chem_comp_angle.atom_id_3
_chem_comp_angle.value_angle
_chem_comp_angle.value_angle_esd
URE  N2      C       N1           120.00 3.000
URE  N2      C       O            120.00 3.000
URE  N1      C       O            120.00 3.000
URE  HN12    N1      HN11         120.00 3.000
URE  HN12    N1      C            120.00 3.000
URE  HN11    N1      C            120.00 3.000
URE  HN22    N2      HN21         120.00 3.000
URE  HN22    N2      C            120.00 3.000
URE  HN21    N2      C            120.00 3.000
#
loop_
_chem_comp_tor.comp_id
_chem_comp_tor.id
_chem_comp_tor.atom_id_1
_chem_comp_tor.atom_id_2
_chem_comp_tor.atom_id_3
_chem_comp_tor.atom_id_4
_chem_comp_tor.value_angle
_chem_comp_tor.value_angle_esd
_chem_comp_tor.period
URE CONST_01      HN11    N1      C       O              0.00   0.0 0
URE CONST_02      HN12    N1      C       O            180.00   0.0 0
URE CONST_03      HN21    N2      C       O             -0.00   0.0 0
URE CONST_04      HN22    N2      C       O            180.00   0.0 0
URE CONST_05      HN21    N2      C       N1           180.00   0.0 0
URE CONST_06      HN22    N2      C       N1            -0.00   0.0 0
URE CONST_07      HN11    N1      C       N2          -180.00   0.0 0
URE CONST_08      HN12    N1      C       N2            -0.00   0.0 0
#
loop_
_chem_comp_plane_atom.comp_id
_chem_comp_plane_atom.plane_id
_chem_comp_plane_atom.atom_id
_chem_comp_plane_atom.dist_esd
URE plan-1  C      0.020
URE plan-1  O      0.020
URE plan-1  N1     0.020
URE plan-1  N2     0.020
URE plan-1  HN11   0.020
URE plan-1  HN12   0.020
URE plan-1  HN21   0.020
URE plan-1  HN22   0.020
'''

    model_filename = 'ure.pdb'
    restraint_filename = 'ure.cif'

    dm = DataManager(['model', 'restraint'])
    dm.write_model_file(model_str, filename=model_filename, overwrite=True)
    dm.write_restraint_file(restraint_str,
                            filename=restraint_filename,
                            overwrite=True)

    # fails because no restraints are loaded
    dm.process_model_file(model_filename)
    model = dm.get_model()
    try:
        model.get_restraints_manager()
    except Sorry:
        pass

    # automatically add restraints
    dm.process_restraint_file(restraint_filename)
    model = dm.get_model()
    model.get_restraints_manager()

    os.remove(model_filename)
    os.remove(restraint_filename)
예제 #15
0
def test_01():

  data_dir = os.path.dirname(os.path.abspath(__file__))
  data_ccp4 = os.path.join(data_dir, 'data',
                          'non_zero_origin_map.ccp4')
  data_pdb = os.path.join(data_dir, 'data',
                          'non_zero_origin_map.ccp4')

  dm = DataManager(['miller_array','real_map', 'phil'])
  dm.set_overwrite(True)
  dm.process_real_map_file(data_ccp4)

  # test writing and reading file
  mm = dm.get_real_map()
  mm.shift_origin()
  mm.show_summary()
  dm.write_map_with_map_manager(mm, filename='test_map_manager.ccp4', overwrite=True)

  # get map_data
  map_data=mm.map_data()
  assert approx_equal(map_data[15,10,19], 0.38,eps=0.01)

  # get crystal_symmetry
  cs=mm.crystal_symmetry()
  assert approx_equal(cs.unit_cell().parameters()[0] ,22.41,eps=0.01)

  # and full cell symmetry
  full_cs=mm.unit_cell_crystal_symmetry()
  assert approx_equal(full_cs.unit_cell().parameters()[0] ,149.4066,eps=0.01)

  # write map directly:
  mm.write_map('test_direct.ccp4')

  # read back directly
  new_mm=map_manager('test_direct.ccp4')
  assert (not new_mm.is_similar(mm))

  new_mm.shift_origin()
  assert mm.is_similar(new_mm)

  # deep_copy
  new_mm=mm.deep_copy()
  assert new_mm.is_similar(mm)

  # customized_copy
  new_mm=mm.customized_copy(map_data=mm.map_data().deep_copy())
  assert new_mm.is_similar(mm)


  # Initialize with parameters
  mm_para=map_manager(
     unit_cell_grid= mm.unit_cell_grid,
     unit_cell_crystal_symmetry= mm.unit_cell_crystal_symmetry(),
     origin_shift_grid_units= mm.origin_shift_grid_units,
     map_data=mm.map_data())
  assert mm_para.is_similar(mm)

  # Adjust origin and gridding:
  mm_read=map_manager(data_ccp4)
  mm_read.set_origin_and_gridding((10,10,10),gridding=(100,100,100))
  assert (not mm_read.is_similar(mm))
  assert (not mm_read.already_shifted())

  # Adjust origin and gridding should fail if origin already shifted:
  mm_read=map_manager(data_ccp4)
  mm_read.shift_origin()
  mm_read.set_origin_and_gridding((10,10,10),gridding=(100,100,100))
  assert (mm_read.is_similar(mm))  # not shifted as it failed
  assert (mm_read.already_shifted())

  # Set input_file name
  mm_read.set_input_file_name('test input_file')
  assert mm_read.input_file_name=='test input_file'

  # Set program name
  mm_read.set_program_name('test program')
  assert mm_read.program_name=='test program'

  # Set limitation
  mm_read.add_limitation('map_is_sharpened')
  assert mm_read.limitations==['map_is_sharpened']

  # Add a label
  mm_read.add_label('TEST LABEL')
  assert mm_read.labels[0]=='TEST LABEL'
  mm_read.write_map('map_with_labels.mrc')
  new_mm=map_manager('map_with_labels.mrc')
  assert 'TEST LABEL' in new_mm.labels
  assert new_mm.is_in_limitations('map_is_sharpened')
  assert new_mm.labels[0].find('test program')>-1

  # Read a map directly
  mm_read=map_manager(data_ccp4)
  mm_read.shift_origin()
  assert mm_read.is_similar(mm)

  # Set log
  import sys
  mm.set_log(sys.stdout)

  # Add map_data
  mm_read.replace_map_data(map_data=mm.map_data().deep_copy())
  assert mm_read.is_similar(mm)



  dm.process_real_map_file('test_map_manager.ccp4')
  new_mm=dm.get_real_map('test_map_manager.ccp4')
  new_mm.show_summary()
  assert (not new_mm.is_similar(mm))
  new_mm.shift_origin()
  new_mm.show_summary()
  assert new_mm.is_similar(mm)
  os.remove('test_map_manager.ccp4')

  # Convert to map coeffs, write out, read back, convert back to map

  map_coeffs = mm.map_as_fourier_coefficients(high_resolution = 3)
  mtz_dataset = map_coeffs.as_mtz_dataset(column_root_label='F')
  mtz_object=mtz_dataset.mtz_object()
  dm.write_miller_array_file(mtz_object, filename="map_coeffs.mtz")
  # Note these Fourier coeffs correspond to working map (not original position)

  array_labels=dm.get_miller_array_labels("map_coeffs.mtz")
  labels=array_labels[0]
  dm.get_reflection_file_server(filenames=["map_coeffs.mtz"],labels=[labels])
  miller_arrays=dm.get_miller_arrays()
  new_map_coeffs=miller_arrays[0]
  map_data_from_map_coeffs=mm.fourier_coefficients_as_map(
      map_coeffs=new_map_coeffs)

  mm_from_map_coeffs=mm.customized_copy(map_data=map_data_from_map_coeffs)
  assert mm_from_map_coeffs.is_similar(mm)
예제 #16
0
def test_model_datatype():
    import mmtbx.monomer_library.server
    try:
        mon_lib_srv = mmtbx.monomer_library.server.server()
    except mmtbx.monomer_library.server.MonomerLibraryServerError:
        print(
            "Can not initialize monomer_library, skipping test_model_datatype."
        )
        return

    # 1yjp
    model_str = '''
CRYST1   21.937    4.866   23.477  90.00 107.08  90.00 P 1 21 1      2
ORIGX1      1.000000  0.000000  0.000000        0.00000
ORIGX2      0.000000  1.000000  0.000000        0.00000
ORIGX3      0.000000  0.000000  1.000000        0.00000
SCALE1      0.045585  0.000000  0.014006        0.00000
SCALE2      0.000000  0.205508  0.000000        0.00000
SCALE3      0.000000  0.000000  0.044560        0.00000
ATOM      1  N   GLY A   1      -9.009   4.612   6.102  1.00 16.77           N
ATOM      2  CA  GLY A   1      -9.052   4.207   4.651  1.00 16.57           C
ATOM      3  C   GLY A   1      -8.015   3.140   4.419  1.00 16.16           C
ATOM      4  O   GLY A   1      -7.523   2.521   5.381  1.00 16.78           O
ATOM      5  N   ASN A   2      -7.656   2.923   3.155  1.00 15.02           N
ATOM      6  CA  ASN A   2      -6.522   2.038   2.831  1.00 14.10           C
ATOM      7  C   ASN A   2      -5.241   2.537   3.427  1.00 13.13           C
ATOM      8  O   ASN A   2      -4.978   3.742   3.426  1.00 11.91           O
ATOM      9  CB  ASN A   2      -6.346   1.881   1.341  1.00 15.38           C
ATOM     10  CG  ASN A   2      -7.584   1.342   0.692  1.00 14.08           C
ATOM     11  OD1 ASN A   2      -8.025   0.227   1.016  1.00 17.46           O
ATOM     12  ND2 ASN A   2      -8.204   2.155  -0.169  1.00 11.72           N
ATOM     13  N   ASN A   3      -4.438   1.590   3.905  1.00 12.26           N
ATOM     14  CA  ASN A   3      -3.193   1.904   4.589  1.00 11.74           C
ATOM     15  C   ASN A   3      -1.955   1.332   3.895  1.00 11.10           C
ATOM     16  O   ASN A   3      -1.872   0.119   3.648  1.00 10.42           O
ATOM     17  CB  ASN A   3      -3.259   1.378   6.042  1.00 12.15           C
ATOM     18  CG  ASN A   3      -2.006   1.739   6.861  1.00 12.82           C
ATOM     19  OD1 ASN A   3      -1.702   2.925   7.072  1.00 15.05           O
ATOM     20  ND2 ASN A   3      -1.271   0.715   7.306  1.00 13.48           N
ATOM     21  N   GLN A   4      -1.005   2.228   3.598  1.00 10.29           N
ATOM     22  CA  GLN A   4       0.384   1.888   3.199  1.00 10.53           C
ATOM     23  C   GLN A   4       1.435   2.606   4.088  1.00 10.24           C
ATOM     24  O   GLN A   4       1.547   3.843   4.115  1.00  8.86           O
ATOM     25  CB  GLN A   4       0.656   2.148   1.711  1.00  9.80           C
ATOM     26  CG  GLN A   4       1.944   1.458   1.213  1.00 10.25           C
ATOM     27  CD  GLN A   4       2.504   2.044  -0.089  1.00 12.43           C
ATOM     28  OE1 GLN A   4       2.744   3.268  -0.190  1.00 14.62           O
ATOM     29  NE2 GLN A   4       2.750   1.161  -1.091  1.00  9.05           N
ATOM     30  N   GLN A   5       2.154   1.821   4.871  1.00 10.38           N
ATOM     31  CA  GLN A   5       3.270   2.361   5.640  1.00 11.39           C
ATOM     32  C   GLN A   5       4.594   1.768   5.172  1.00 11.52           C
ATOM     33  O   GLN A   5       4.768   0.546   5.054  1.00 12.05           O
ATOM     34  CB  GLN A   5       3.056   2.183   7.147  1.00 11.96           C
ATOM     35  CG  GLN A   5       1.829   2.950   7.647  1.00 10.81           C
ATOM     36  CD  GLN A   5       1.344   2.414   8.954  1.00 13.10           C
ATOM     37  OE1 GLN A   5       0.774   1.325   9.002  1.00 10.65           O
ATOM     38  NE2 GLN A   5       1.549   3.187  10.039  1.00 12.30           N
ATOM     39  N   ASN A   6       5.514   2.664   4.856  1.00 11.99           N
ATOM     40  CA  ASN A   6       6.831   2.310   4.318  1.00 12.30           C
ATOM     41  C   ASN A   6       7.854   2.761   5.324  1.00 13.40           C
ATOM     42  O   ASN A   6       8.219   3.943   5.374  1.00 13.92           O
ATOM     43  CB  ASN A   6       7.065   3.016   2.993  1.00 12.13           C
ATOM     44  CG  ASN A   6       5.961   2.735   2.003  1.00 12.77           C
ATOM     45  OD1 ASN A   6       5.798   1.604   1.551  1.00 14.27           O
ATOM     46  ND2 ASN A   6       5.195   3.747   1.679  1.00 10.07           N
ATOM     47  N   TYR A   7       8.292   1.817   6.147  1.00 14.70           N
ATOM     48  CA  TYR A   7       9.159   2.144   7.299  1.00 15.18           C
ATOM     49  C   TYR A   7      10.603   2.331   6.885  1.00 15.91           C
ATOM     50  O   TYR A   7      11.041   1.811   5.855  1.00 15.76           O
ATOM     51  CB  TYR A   7       9.061   1.065   8.369  1.00 15.35           C
ATOM     52  CG  TYR A   7       7.665   0.929   8.902  1.00 14.45           C
ATOM     53  CD1 TYR A   7       6.771   0.021   8.327  1.00 15.68           C
ATOM     54  CD2 TYR A   7       7.210   1.756   9.920  1.00 14.80           C
ATOM     55  CE1 TYR A   7       5.480  -0.094   8.796  1.00 13.46           C
ATOM     56  CE2 TYR A   7       5.904   1.649  10.416  1.00 14.33           C
ATOM     57  CZ  TYR A   7       5.047   0.729   9.831  1.00 15.09           C
ATOM     58  OH  TYR A   7       3.766   0.589  10.291  1.00 14.39           O
ATOM     59  OXT TYR A   7      11.358   2.999   7.612  1.00 17.49           O
TER      60      TYR A   7
HETATM   61  O   HOH A   8      -6.471   5.227   7.124  1.00 22.62           O
HETATM   62  O   HOH A   9      10.431   1.858   3.216  1.00 19.71           O
HETATM   63  O   HOH A  10     -11.286   1.756  -1.468  1.00 17.08           O
HETATM   64  O   HOH A  11      11.808   4.179   9.970  1.00 23.99           O
HETATM   65  O   HOH A  12      13.605   1.327   9.198  1.00 26.17           O
HETATM   66  O   HOH A  13      -2.749   3.429  10.024  1.00 39.15           O
HETATM   67  O   HOH A  14      -1.500   0.682  10.967  1.00 43.49           O
MASTER      238    0    0    0    0    0    0    6   66    1    0    1
END
'''

    # test reading/writing PDB
    test_filename = 'test_model.pdb'
    test_output_filename = 'test_model_output.pdb'
    test_eff = 'model.eff'
    dm = DataManager(['model'])
    dm.process_model_str(test_filename, model_str)
    dm.write_model_file(model_str,
                        filename=test_output_filename,
                        overwrite=True)
    m = dm.get_model(test_output_filename)
    assert test_output_filename in dm.get_model_names()
    dm.write_model_file(m, overwrite=True)
    pdb_filename = 'cctbx_program.pdb'
    assert os.path.exists(pdb_filename)
    dm.process_model_file(pdb_filename)
    assert not dm.get_model(pdb_filename).input_model_format_cif()
    dm.write_model_file(m, test_filename, overwrite=True)

    # test reading PDB writing CIF
    test_filename = 'test_model.pdb'
    test_output_filename = 'test_model.cif'
    dm = DataManager(['model'])
    dm.process_model_str(test_filename, model_str)
    m = dm.get_model(test_filename)
    dm.write_model_file(m,
                        filename=test_output_filename,
                        format='cif',
                        overwrite=True)
    m = dm.get_model(test_output_filename)
    assert test_output_filename in dm.get_model_names()
    dm.write_model_file(m, overwrite=True)
    cif_filename = 'cctbx_program.cif'
    assert os.path.exists(cif_filename)
    dm.process_model_file(cif_filename)
    assert dm.get_model(cif_filename).input_model_format_cif()

    # test type
    assert dm.get_model_type() == 'x_ray'
    dm.set_model_type(test_filename, 'neutron')
    assert dm.get_model_type() == 'neutron'
    phil_scope = dm.export_phil_scope()
    extract = phil_scope.extract()
    assert extract.data_manager.model[0].type == 'neutron'
    with open(test_eff, 'w') as f:
        f.write(phil_scope.as_str())
    new_phil_scope = iotbx.phil.parse(file_name=test_eff)
    new_dm = DataManager(['model'])
    new_dm.load_phil_scope(new_phil_scope)
    assert new_dm.get_model_type(test_filename) == 'neutron'
    new_dm = DataManager(['model'])
    try:
        new_dm.set_default_model_type('nonsense')
    except Sorry:
        pass
    new_dm.set_default_model_type('electron')
    new_dm.process_model_file(test_filename)
    assert new_dm.get_model_type() == 'electron'
    assert len(new_dm.get_model_names()) == 1
    assert len(new_dm.get_model_names(model_type='electron')) == 1
    assert len(new_dm.get_model_names(model_type='neutron')) == 0

    os.remove(test_eff)
    os.remove(test_filename)

    # test reading/writing CIF
    test_filename = 'test_model_datatype.cif'
    dm.write_model_file(dm.get_model().model_as_mmcif(),
                        filename=test_filename,
                        overwrite=True)
    dm.process_model_file(test_filename)
    os.remove(test_filename)
    assert test_filename in dm.get_model_names()
    m = dm.get_model(test_filename)
    dm.write_model_file(m, overwrite=True)
    cif_filename = 'cctbx_program.cif'
    assert os.path.exists(cif_filename)
    dm.process_model_file(cif_filename)
    assert dm.get_model(cif_filename).input_model_format_cif()
    os.remove(pdb_filename)
    os.remove(cif_filename)

    # test pdb_interpretation
    extract = mmtbx.model.manager.get_default_pdb_interpretation_params()
    extract.pdb_interpretation.use_neutron_distances = True
    dm.update_pdb_interpretation_for_model(test_filename, extract)
    assert dm.get_model(test_filename).restraints_manager is None
예제 #17
0
def test_data_manager():
    a = DataManager(['model'])

    a.add_model('a', 'b')
    a.add_model('c', 'd')
    assert a.get_model() == 'b'
    assert a.get_model('a') == 'b'
    assert a.get_model('c') == 'd'
    assert a.get_model_names() == ['a', 'c']

    assert a.has_models()
    assert a.has_models(exact_count=True, expected_n=2)
    assert not a.has_models(expected_n=3, raise_sorry=False)

    # exporting phil
    working_phil = a.export_phil_scope()
    assert len(working_phil.extract().data_manager.model) == 2

    # data tracking
    try:
        a.has_models(expected_n=3, raise_sorry=True)
    except Sorry:
        pass

    try:
        a.has_models(exact_count=True, raise_sorry=True)
    except Sorry:
        pass

    a.set_default_model('c')
    assert a.get_model() == 'd'

    assert a.get_model_names() == ['a', 'c'
                                   ] or a.get_model_names() == ['c', 'a']

    a.remove_model('c')
    try:
        a.get_model()
    except Sorry:
        pass
    try:
        a.get_model('missing')
    except Sorry:
        pass
    try:
        a.set_default_model('missing')
    except Sorry:
        pass

    a = DataManager(datatypes=['sequence', 'phil'])
    assert a.get_sequence_names() == []
    assert not hasattr(a, 'get_model')

    # phil functions
    test_phil_str = '''
data_manager {
  phil_files = data_manager_test.eff
}
'''
    with open('data_manager_test.eff', 'w') as f:
        f.write(test_phil_str)

    # loading file with get function
    assert len(a.get_phil_names()) == 0
    p = a.get_phil('data_manager_test.eff')
    assert type(p) == libtbx.phil.scope
    assert 'data_manager_test.eff' in a.get_phil_names()

    # loading file with phil
    a = DataManager(datatypes=['phil'])
    test_phil = iotbx.phil.parse(test_phil_str)
    a.load_phil_scope(test_phil)

    assert 'data_manager_test.eff' in a.get_phil_names()
    assert a.get_default_phil_name() == 'data_manager_test.eff'

    os.remove('data_manager_test.eff')

    # writing
    a = DataManager(datatypes=['model', 'phil', 'sequence'])
    a.add_model('a', 'b')
    a.add_phil('c', 'd')
    a.add_sequence('e', 'f')

    a.write_model_file(a.get_model(), filename='a.dat', overwrite=True)
    a.write_phil_file(a.get_phil(), filename='c.dat', overwrite=True)
    a.write_sequence_file(a.get_sequence(), filename='e.dat', overwrite=True)

    with open('a.dat', 'r') as f:
        lines = f.readlines()
    assert lines[0] == 'b'

    os.remove('a.dat')
    os.remove('c.dat')
    os.remove('e.dat')
def exercise(file_name=None, pdb_file_name = None, map_file_name = None ,
    split_pdb_file_name = None,
    out = sys.stdout):

  # Set up source data

  if not os.path.isfile(file_name):
    raise Sorry("Missing the file: %s" %(file_name)+"\n")

  print ("Reading from %s" %(file_name))
  from iotbx.map_manager import map_manager
  m = map_manager(file_name)

  print ("Header information from %s:" %(file_name))
  m.show_summary(out = out)

  map_data = m.map_data().deep_copy()
  crystal_symmetry = m.crystal_symmetry()
  unit_cell_parameters = m.crystal_symmetry().unit_cell().parameters()

  print ("\nMap origin: %s Extent %s"  %( map_data.origin(), map_data.all()))
  print ("Original unit cell, not just unit cell of part in this file): %s" %(
     str(unit_cell_parameters)))

  grid_point = (1, 2, 3)
  if map_data.origin() !=  (0, 0, 0): # make sure it is inside
    from scitbx.matrix import col
    grid_point = tuple (col(grid_point)+col(map_data.origin()))
  print ("\nValue of map_data at grid point %s: %.3f" %(str(grid_point),
    map_data[grid_point]))
  print ("Map data is %s" %(type(map_data)))

  random_position = (10, 5, 7.9)
  point_frac = crystal_symmetry.unit_cell().fractionalize(random_position)
  value_at_point_frac = map_data.eight_point_interpolation(point_frac)
  print ("Value of map_data at coordinates %s: %.3f" %(
      str(random_position), value_at_point_frac))

  map_data_as_float = map_data.as_float()
  print ("Map data as float is %s" %(type(map_data_as_float)))


  # make a little model
  sites_cart = flex.vec3_double( ((8, 10, 12), (14, 15, 16)))
  model = model_manager.from_sites_cart(
         atom_name = ' CA ',
         resname = 'ALA',
         chain_id = 'A',
         b_iso = 30.,
         occ = 1.,
         scatterer = 'C',
         sites_cart = sites_cart,
         crystal_symmetry = crystal_symmetry)


  # Move map and a model to place origin at (0, 0, 0)
  # map data is new copy but model is shifted in place.

  from iotbx.map_model_manager import map_model_manager
  mam = map_model_manager(
          map_manager =  m,
          model     = model.deep_copy(),
    )

  # Read in map and model and split up
  dm = DataManager()
  aa = dm.get_map_model_manager(model_file=pdb_file_name,
    map_files=map_file_name)
  bb = dm.get_map_model_manager(model_file=split_pdb_file_name,
    map_files=map_file_name)

  for selection_method in ['by_chain', 'by_segment','supplied_selections',
      'boxes']:
    if selection_method == 'boxes':
      choices = [True, False]
    else:
      choices = [True]
    if selection_method == 'by_chain':
      mask_choices = [True,False]
    else:
      mask_choices = [False]
    for select_final_boxes_based_on_model in choices:
      for skip_empty_boxes in choices:
        for mask_choice in mask_choices:
          if mask_choice: # use split model
            a=bb.deep_copy()
          else: # usual
            a=aa.deep_copy()
          print ("\nRunning split_up_map_and_model with \n"+
            "select_final_boxes_based_on_model="+
           "%s   skip_empty_boxes=%s selection_method=%s" %(
            select_final_boxes_based_on_model,skip_empty_boxes,selection_method))

          if selection_method == 'by_chain':
            print ("Mask around unused atoms: %s" %(mask_choice))
            box_info = a.split_up_map_and_model_by_chain(
              mask_around_unselected_atoms=mask_choice)
          elif selection_method == 'by_segment':
            box_info = a.split_up_map_and_model_by_segment()
          elif selection_method == 'supplied_selections':
            selection = a.model().selection('all')
            box_info = a.split_up_map_and_model_by_supplied_selections(
              selection_list = [selection])
          elif selection_method == 'boxes':
            box_info = a.split_up_map_and_model_by_boxes(
              skip_empty_boxes = skip_empty_boxes,
              select_final_boxes_based_on_model =
                select_final_boxes_based_on_model)
          print (selection_method,skip_empty_boxes,
              len(box_info.selection_list),
              box_info.selection_list[0].count(True))
          assert (selection_method,skip_empty_boxes,
              len(box_info.selection_list),
              box_info.selection_list[0].count(True)) in [
                ('by_chain',True,3,19),
                ("by_chain",True,1,86,),
                ("by_segment",True,1,86,),
                ("supplied_selections",True,1,86,),
                ("boxes",True,13,1,),
                ("boxes",False,36,0,),
                ("boxes",True,13,1,),
                ("boxes",False,36,0,),
                ]




          # Change the coordinates in one box
          small_model = box_info.mmm_list[0].model()
          small_sites_cart = small_model.get_sites_cart()
          from scitbx.matrix import col
          small_sites_cart += col((1,0,0))
          small_model.set_crystal_symmetry_and_sites_cart(
            sites_cart = small_sites_cart,
            crystal_symmetry = small_model.crystal_symmetry())
          # Put everything back together
          a.merge_split_maps_and_models(box_info = box_info)


  mam.box_all_maps_around_model_and_shift_origin()

  shifted_crystal_symmetry = mam.model().crystal_symmetry()
  shifted_model = mam.model()
  shifted_map_data = mam.map_data()

  print ("\nOriginal map origin (grid units):", map_data.origin())
  print ("Original model:\n", model.model_as_pdb())

  print ("Shifted map origin:", shifted_map_data.origin())
  print ("Shifted model:\n", shifted_model.model_as_pdb())


  # Save the map_model manager
  mam_dc=mam.deep_copy()
  print ("dc",mam)
  print ("dc mam_dc",mam_dc)

  # Mask map around atoms
  mam=mam_dc.deep_copy()
  print ("dc mam_dc dc",mam_dc)
  print (mam)
  mam.mask_all_maps_around_atoms(mask_atoms_atom_radius = 3,
     set_outside_to_mean_inside=True, soft_mask=False)
  print ("Mean before masking", mam.map_data().as_1d().min_max_mean().mean)
  assert approx_equal(mam.map_data().as_1d().min_max_mean().mean,
      -0.0585683621466)
  print ("Max before masking", mam.map_data().as_1d().min_max_mean().max)
  assert approx_equal(mam.map_data().as_1d().min_max_mean().max,
      -0.0585683621466)

  # Mask map around atoms, with soft mask
  mam=mam_dc.deep_copy()
  mam.mask_all_maps_around_atoms(mask_atoms_atom_radius = 3, soft_mask = True,
    soft_mask_radius = 5, set_outside_to_mean_inside=True)
  print ("Mean after first masking", mam.map_data().as_1d().min_max_mean().mean)
  assert approx_equal(mam.map_data().as_1d().min_max_mean().mean,
      -0.00177661714805)
  print ("Max after first masking", mam.map_data().as_1d().min_max_mean().max)
  assert approx_equal(mam.map_data().as_1d().min_max_mean().max,
       0.236853733659)

  # Mask map around atoms again
  mam.mask_all_maps_around_atoms(mask_atoms_atom_radius = 3,
     set_outside_to_mean_inside = True, soft_mask=False)
  print ("Mean after second masking", mam.map_data().as_1d().min_max_mean().mean)
  assert approx_equal(mam.map_data().as_1d().min_max_mean().mean,
     -0.0585683621466)
  print ("Max after second masking", mam.map_data().as_1d().min_max_mean().max)
  assert approx_equal(mam.map_data().as_1d().min_max_mean().max,
      -0.0585683621466)

  # Mask around edges
  mam=mam_dc.deep_copy()
  mam.mask_all_maps_around_edges( soft_mask_radius = 3)
  print ("Mean after masking edges", mam.map_data().as_1d().min_max_mean().mean)
  assert approx_equal(mam.map_data().as_1d().min_max_mean().mean,
      0.0155055604192)
  print ("Max after masking edges", mam.map_data().as_1d().min_max_mean().max)
  assert approx_equal(mam.map_data().as_1d().min_max_mean().max,
      0.249827131629)


  print ("\nWriting map_data and model in shifted position (origin at 0, 0, 0)")

  output_file_name = 'shifted_map.ccp4'
  print ("Writing to %s" %(output_file_name))
  mrcfile.write_ccp4_map(
      file_name = output_file_name,
      crystal_symmetry = shifted_crystal_symmetry,
      map_data = shifted_map_data, )

  output_file_name = 'shifted_model.pdb'
  f = open(output_file_name, 'w')
  print (shifted_model.model_as_pdb(), file=f)
  f.close()


  print ("\nWriting map_data and model in original position (origin at %s)" %(
      str(mam.map_manager().origin_shift_grid_units)))

  output_file_name = 'new_map_original_position.ccp4'
  print ("Writing to %s" %(output_file_name))
  mrcfile.write_ccp4_map(
      file_name = output_file_name,
      crystal_symmetry = shifted_crystal_symmetry,
      map_data = shifted_map_data,
      origin_shift_grid_units = mam.map_manager().origin_shift_grid_units)
  print (shifted_model.model_as_pdb())
  output_pdb_file_name = 'new_model_original_position.pdb'
  f = open(output_pdb_file_name, 'w')
  print (shifted_model.model_as_pdb(), file=f)
  f.close()

  # Write as mmcif
  output_cif_file_name = 'new_model_original_position.cif'
  f = open(output_cif_file_name, 'w')
  print (shifted_model.model_as_mmcif(),file = f)
  f.close()


  # Read the new map and model
  import iotbx.pdb
  new_model =  model_manager(
     model_input = iotbx.pdb.input(
         source_info = None,
         lines = flex.split_lines(open(output_pdb_file_name).read())),
         crystal_symmetry = crystal_symmetry)
  assert new_model.model_as_pdb() == model.model_as_pdb()

  new_model_from_cif =  model_manager(
     model_input = iotbx.pdb.input(
         source_info = None,
         lines = flex.split_lines(open(output_cif_file_name).read())),
         crystal_symmetry = crystal_symmetry)
  assert new_model_from_cif.model_as_pdb() == model.model_as_pdb()

  # Read and box the original file again in case we modified m in any
  #   previous tests
  m = map_manager(file_name)
  mam=map_model_manager(model=model.deep_copy(),map_manager=m)
  mam.box_all_maps_around_model_and_shift_origin()

  file_name = output_file_name
  print ("Reading from %s" %(file_name))
  new_map = iotbx.mrcfile.map_reader(file_name = file_name, verbose = False)
  new_map.data = new_map.data.shift_origin()
  print ("Header information from %s:" %(file_name))
  new_map.show_summary(out = out)
  assert new_map.map_data().origin() == mam.map_manager().map_data().origin()
  assert new_map.crystal_symmetry().is_similar_symmetry(mam.map_manager().crystal_symmetry())

  # make a map_model_manager with lots of maps and model and ncs
  from mmtbx.ncs.ncs import ncs
  ncs_object=ncs()
  ncs_object.set_unit_ncs()
  mam = map_model_manager(
          map_manager =  m,
          ncs_object =  ncs_object,
          map_manager_1 =  m.deep_copy(),
          map_manager_2 =  m.deep_copy(),
          extra_model_list =  [model.deep_copy(),model.deep_copy()],
          extra_model_id_list = ["model_1","model_2"],
          extra_map_manager_list =  [m.deep_copy(),m.deep_copy()],
          extra_map_manager_id_list = ["extra_1","extra_2"],
          model     = model.deep_copy(),
    )


  # make a map_model_manager with lots of maps and model and ncs and run
  # with wrapping and ignore_symmetry_conflicts on
  from mmtbx.ncs.ncs import ncs
  ncs_object=ncs()
  ncs_object.set_unit_ncs()
  m.set_ncs_object(ncs_object.deep_copy())
  mam2 = map_model_manager(
          map_manager =  m.deep_copy(),
          ncs_object =  ncs_object.deep_copy(),
          map_manager_1 =  m.deep_copy(),
          map_manager_2 =  m.deep_copy(),
          extra_model_list =  [model.deep_copy(),model.deep_copy()],
          extra_model_id_list = ["model_1","model_2"],
          extra_map_manager_list =  [m.deep_copy(),m.deep_copy()],
          extra_map_manager_id_list = ["extra_1","extra_2"],
          model     = model.deep_copy(),
          ignore_symmetry_conflicts = True,
          wrapping = m.wrapping(),
    )
  assert mam.map_manager().is_similar(mam2.map_manager())
  assert mam.map_manager().is_similar(mam2.map_manager_1())
  for m in mam2.map_managers():
    assert mam.map_manager().is_similar(m)
  assert mam.model().shift_cart() == mam2.model().shift_cart()
  assert mam.model().shift_cart() == mam2.get_model_by_id('model_2').shift_cart()



  print ("OK")
def exercise(file_name, out=sys.stdout):

    # Set up source data
    if not os.path.isfile(file_name):
        raise Sorry("Missing the file: %s" % (file_name) + "\n")

    print("Reading from %s" % (file_name))
    from iotbx.map_manager import map_manager

    m = map_manager(file_name)

    # make a little model
    sites_cart = flex.vec3_double(((8, 10, 12), (14, 15, 16)))
    model = model_manager.from_sites_cart(
        atom_name=' CA ',
        resname='ALA',
        chain_id='A',
        b_iso=30.,
        occ=1.,
        scatterer='C',
        sites_cart=sites_cart,
        crystal_symmetry=m.crystal_symmetry())

    # make a map_model_manager with lots of maps and model and ncs
    from iotbx.map_model_manager import map_model_manager

    from mmtbx.ncs.ncs import ncs
    ncs_object = ncs()
    ncs_object.set_unit_ncs()
    mask_mm = m.deep_copy()
    mask_mm.set_is_mask(True)
    mam = map_model_manager(
        map_manager=m,
        ncs_object=ncs_object,
        map_manager_1=m.deep_copy(),
        map_manager_2=m.deep_copy(),
        extra_map_manager_list=[m.deep_copy(),
                                m.deep_copy(),
                                m.deep_copy()],
        extra_map_manager_id_list=["extra_1", "extra_2", "map_manager_mask"],
        model=model.deep_copy(),
    )
    print(mam.map_manager())
    print(mam.model())
    print(mam.map_manager_1())
    print(mam.map_manager_2())
    print(mam.map_manager_mask())
    print(mam.map_manager().ncs_object())
    all_map_names = mam.map_id_list()
    for id in all_map_names:
        print("Map_manager %s: %s " % (id, mam.get_map_manager_by_id(id)))

    dm = DataManager(['model', 'miller_array', 'real_map', 'phil', 'ncs_spec'])
    dm.set_overwrite(True)

    # Create a model with ncs
    from iotbx.regression.ncs.tst_ncs import pdb_str_5
    file_name = 'tst_mam.pdb'
    f = open(file_name, 'w')
    print(pdb_str_5, file=f)
    f.close()

    # Generate map data from this model (it has ncs)
    mmm = map_model_manager()
    mmm.generate_map(box_cushion=0, file_name=file_name, n_residues=500)
    ncs_mam = mmm.deep_copy()
    ncs_mam_copy = mmm.deep_copy()

    # Make sure this model has 126 sites (42 sites times 3-fold ncs)
    assert ncs_mam.model().get_sites_cart().size() == 126
    assert approx_equal(ncs_mam.model().get_sites_cart()[0],
                        (23.560999999999996, 8.159, 10.660000000000002))

    # Get just unique part (42 sites)
    unique_mam = ncs_mam.extract_all_maps_around_model(
        select_unique_by_ncs=True)
    assert unique_mam.model().get_sites_cart().size() == 42
    assert approx_equal(unique_mam.model().get_sites_cart()[0],
                        (18.740916666666664, 13.1794, 16.10544))

    # Make sure that the extraction did not change the original but does change
    #   the extracted part
    assert (unique_mam.model().get_sites_cart()[0] !=
            ncs_mam.model().get_sites_cart()[0]
            )  # it was a deep copy so original stays

    # Shift back the extracted part and make sure it matches the original now
    shifted_back_unique_model = mmm.get_model_from_other(
        unique_mam.deep_copy())
    assert approx_equal(shifted_back_unique_model.get_sites_cart()[0],
                        (23.560999999999996, 8.158999999999997, 10.66))

    # Change the extracted model
    sites_cart = unique_mam.model().get_sites_cart()
    sites_cart[0] = (1, 1, 1)
    unique_mam.model().get_hierarchy().atoms().set_xyz(sites_cart)
    # Note; setting xyz in hierarchy does not set xrs by itself. do that now:
    unique_mam.model().set_sites_cart_from_hierarchy(multiply_ncs=False)

    # Make sure we really changed it
    assert approx_equal(unique_mam.model().get_sites_cart()[0], (1, 1, 1))

    # Now propagate all the changes in this unique part to entire original model
    #   using NCS
    ncs_mam.propagate_model_from_other(other=unique_mam,
                                       model_id='model',
                                       other_model_id='model')
    # ...and check that copy 1 and copy 2 both change
    assert approx_equal(
        ncs_mam.model().get_sites_cart()[0],
        (5.820083333333333, -4.020400000000001, -4.445440000000001))
    assert approx_equal(
        ncs_mam.model().get_sites_cart()[42],
        (38.41904613024224, 17.233251085893276, 2.5547442135142524))

    # Find ncs from map or model
    nn = ncs_mam_copy
    nn.write_map('ncs.ccp4')
    nn.write_model('ncs.pdb')
    ncs_object = nn.get_ncs_from_model()
    dm.write_ncs_spec_file(ncs_object, 'ncs.ncs_spec')
    print("NCS from map", ncs_object)
    nn.set_ncs_object(ncs_object)
    print("NCS now: ", nn.ncs_object())
    nn.get_ncs_from_map(ncs_object=ncs_object)
    print("ncs cc:", nn.ncs_cc())
    assert approx_equal(nn.ncs_cc(), 0.961915979834, eps=0.01)

    # Make a deep_copy
    dc = mam.deep_copy()
    new_mam = mam.deep_copy()
    assert mam.map_manager().map_data()[0] == new_mam.map_manager().map_data(
    )[0]

    # Make a customized_copy
    new_mam = mam.customized_copy(model_dict={'model': mam.model()})
    assert new_mam.model() is mam.model()
    assert not new_mam.map_dict() is mam.map_dict()

    new_mam = mam.customized_copy(model_dict={'model': mam.model()},
                                  map_dict=mam.map_dict())
    assert new_mam.model() is mam.model()
    assert new_mam.map_dict() is mam.map_dict()
    print(mam)

    # Add a map
    mam = dc.deep_copy()
    print(mam.map_id_list())
    assert len(mam.map_id_list()) == 6
    mam.add_map_manager_by_id(mam.map_manager().deep_copy(), 'new_map_manager')
    print(mam.map_id_list())
    assert len(mam.map_id_list()) == 7

    # duplicate a map
    mam = dc.deep_copy()
    print(mam.map_id_list())
    assert len(mam.map_id_list()) == 6
    mam.duplicate_map_manager('map_manager', 'new_map_manager')
    print(mam.map_id_list())
    assert len(mam.map_id_list()) == 7

    # resolution_filter a map
    mam = dc.deep_copy()
    print(mam.map_id_list())
    mam.duplicate_map_manager('map_manager', 'new_map_manager')
    mam.resolution_filter(map_id='new_map_manager', d_min=3.5, d_max=6)

    # Add a model
    mam = dc.deep_copy()
    print(mam.model_id_list())
    assert len(mam.model_id_list()) == 1
    mam.add_model_by_id(mam.model().deep_copy(), 'new_model')
    print(mam.model_id_list())
    assert len(mam.model_id_list()) == 2

    # Initialize a map
    mam1 = new_mam.deep_copy()
    mam1.initialize_maps(map_value=6)
    assert mam1.map_manager().map_data()[225] == 6

    # Create mask around density and apply to all maps
    mam1 = new_mam.deep_copy()
    mam1.mask_all_maps_around_density(
        solvent_content=0.5,
        soft_mask=True,
    )
    s = (mam1.get_map_manager_by_id('mask').map_data() > 0.5)
    assert approx_equal((s.count(True), s.size()), (1024, 2048))

    # Create mask around edges and apply to all maps
    mam1 = new_mam.deep_copy()
    mam1.mask_all_maps_around_edges()
    s = (mam1.get_map_manager_by_id('mask').map_data() > 0.5)
    assert approx_equal((s.count(True), s.size()), (1176, 2048))

    # Create a soft mask around model and apply to all maps
    new_mam.mask_all_maps_around_atoms(mask_atoms_atom_radius=8,
                                       soft_mask=True)
    s = (new_mam.get_map_manager_by_id('mask').map_data() > 0.5)
    assert approx_equal((s.count(True), s.size()), (1944, 2048))

    # Create a soft mask around model and do not do anything with it
    new_mam.create_mask_around_atoms(mask_atoms_atom_radius=8, soft_mask=True)
    s = (new_mam.get_map_manager_by_id('mask').map_data() > 0.5)
    assert approx_equal((s.count(True), s.size()), (1944, 2048))

    # Create a soft mask around model and do not do anything with it, wrapping =true
    dummy_mam = new_mam.deep_copy()
    dummy_mam.map_manager().set_wrapping(True)
    dummy_mam.create_mask_around_atoms(mask_atoms_atom_radius=8,
                                       soft_mask=True)
    s = (dummy_mam.get_map_manager_by_id('mask').map_data() > 0.5)
    assert approx_equal((s.count(True), s.size()), (1944, 2048))

    # Create a sharp mask around model and do not do anything with it
    new_mam.create_mask_around_atoms(soft_mask=False, mask_atoms_atom_radius=8)
    s = (new_mam.get_map_manager_by_id('mask').map_data() > 0.5)
    assert approx_equal((s.count(True), s.size()), (138, 2048))

    # Mask around edges and do not do anything with it
    mam = dc.deep_copy()
    mam.create_mask_around_edges()
    s = (mam.get_map_manager_by_id('mask').map_data() > 0.5)
    assert approx_equal((s.count(True), s.size()), (1176, 2048))

    # Mask around density and to not do anything with it
    mam = dc.deep_copy()
    mam.create_mask_around_density(soft_mask=False)
    s = (mam.get_map_manager_by_id('mask').map_data() > 0.5)
    assert approx_equal((s.count(True), s.size()), (1000, 2048))

    # Apply the current mask to one map
    mam.apply_mask_to_map('map_manager')
    s = (mam.map_manager().map_data() > 0.)
    assert approx_equal((s.count(True), s.size()), (640, 2048))
    s = (mam.map_manager().map_data() != 0.)
    assert approx_equal((s.count(True), s.size()), (1000, 2048))
    assert approx_equal((mam.map_manager().map_data()[225]), -0.0418027862906)

    # Apply any mask to one map
    mam.apply_mask_to_map('map_manager', mask_id='mask')
    s = (mam.map_manager().map_data() > 0.)
    assert approx_equal((s.count(True), s.size()), (640, 2048))
    s = (mam.map_manager().map_data() != 0.)
    assert approx_equal((s.count(True), s.size()), (1000, 2048))
    assert approx_equal((mam.map_manager().map_data()[225]), -0.0418027862906)

    # Apply the mask to all maps
    mam.apply_mask_to_maps()
    s = (mam.map_manager().map_data() > 0.)
    assert approx_equal((s.count(True), s.size()), (640, 2048))
    s = (mam.map_manager().map_data() != 0.)
    assert approx_equal((s.count(True), s.size()), (1000, 2048))
    assert approx_equal((mam.map_manager().map_data()[225]), -0.0418027862906)

    # Apply the mask to all maps, setting outside value to mean inside
    mam.apply_mask_to_maps(set_outside_to_mean_inside=True)
    s = (mam.map_manager().map_data() > 0.)
    assert approx_equal((s.count(True), s.size()), (1688, 2048))
    s = (mam.map_manager().map_data() != 0.)
    assert approx_equal((s.count(True), s.size()), (2048, 2048))
    assert approx_equal((mam.map_manager().map_data()[2047]), -0.0759598612785)
    s = (mam.get_map_manager_by_id('mask').map_data() > 0).as_1d()
    inside = mam.map_manager().map_data().as_1d().select(s)
    outside = mam.map_manager().map_data().as_1d().select(~s)
    assert approx_equal(
        (inside.min_max_mean().max, outside.min_max_mean().max),
        (0.335603952408, 0.0239064293122))

    # Make a new map and model, get mam and box with selection
    mmm = map_model_manager()
    mmm.generate_map(box_cushion=0, wrapping=True)
    mam = mmm
    mam_dc = mam.deep_copy()

    new_mm_1 = mam.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1.map_data().all()),
                        ((18, 25, 20), (18, 25, 20)))

    # Get local fsc or randomized map
    dc = mam_dc.deep_copy()
    dc.map_manager().set_wrapping(False)
    map_coeffs = dc.map_manager().map_as_fourier_coefficients(d_min=3)
    from cctbx.development.create_models_or_maps import generate_map
    new_mm_1 = generate_map(map_coeffs=map_coeffs,
                            d_min=3,
                            low_resolution_real_space_noise_fraction=1,
                            high_resolution_real_space_noise_fraction=50,
                            map_manager=dc.map_manager(),
                            random_seed=124321)
    new_mm_2 = generate_map(map_coeffs=map_coeffs,
                            d_min=3,
                            low_resolution_real_space_noise_fraction=1,
                            high_resolution_real_space_noise_fraction=50,
                            map_manager=dc.map_manager(),
                            random_seed=734119)
    dc.add_map_manager_by_id(new_mm_1, 'map_manager_1')
    dc.add_map_manager_by_id(new_mm_2, 'map_manager_2')
    cc = dc.map_map_cc()
    fsc_curve = dc.map_map_fsc()
    dc.set_log(sys.stdout)
    dc.local_fsc(n_boxes=1)

    # Get map-map FSC
    dc = mam_dc.deep_copy()
    dc.duplicate_map_manager(map_id='map_manager', new_map_id='filtered')
    dc.resolution_filter(d_min=3.5, d_max=10, map_id='filtered')
    dc.create_mask_around_atoms()
    fsc_curve = dc.map_map_fsc(map_id_1='map_manager',
                               map_id_2='filtered',
                               mask_id='mask',
                               resolution=3.5,
                               fsc_cutoff=0.97)
    assert approx_equal(fsc_curve.d_min, 3.91175024213, eps=0.01)
    assert approx_equal(fsc_curve.fsc.fsc[-1], 0.695137718033)

    # Get map-map CC
    dc = mam_dc.deep_copy()
    dc.duplicate_map_manager(map_id='map_manager', new_map_id='filtered')
    dc.resolution_filter(d_min=3.5, d_max=6, map_id='filtered')
    cc = dc.map_map_cc('map_manager', 'filtered')
    assert approx_equal(cc, 0.706499206126)

    # Get map-map CC with mask
    dc = mam_dc.deep_copy()
    dc.duplicate_map_manager(map_id='map_manager', new_map_id='filtered')
    dc.create_mask_around_density(mask_id='filtered')
    cc = dc.map_map_cc('map_manager', 'filtered', mask_id='mask')
    assert approx_equal(cc, 0.411247493741)

    # box around model
    mam = mam_dc.deep_copy()
    mam.box_all_maps_around_model_and_shift_origin(
        selection_string="resseq 221:221")
    new_mm_1 = mam.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1.map_data().all()),
                        ((18, 25, 20), (24, 20, 20)))

    # extract_around_model (get new mam)
    new_mam_dc = mam_dc.extract_all_maps_around_model(
        selection_string="resseq 221:221")
    new_mm_1a = new_mam_dc.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1a.map_data().all()),
                        ((18, 25, 20), (24, 20, 20)))
    assert approx_equal(new_mm_1.map_data(), new_mm_1a.map_data())

    # box around_density
    mam2 = mam_dc.deep_copy()
    mam2.box_all_maps_around_density_and_shift_origin(box_cushion=0)
    new_mm_2 = mam2.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_2.map_data().all()),
                        ((18, 25, 20), (16, 23, 18)))

    # extract_around_density (get new mam)
    mam2 = mam_dc.deep_copy()
    mam2_b = mam2.extract_all_maps_around_density(box_cushion=0)
    new_mm_2 = mam2_b.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_2.map_data().all()),
                        ((18, 25, 20), (16, 23, 18)))

    # Repeat as map_model_manager:
    mmm = mam_dc.as_map_model_manager().deep_copy()
    mmm.box_all_maps_around_model_and_shift_origin(
        selection_string="resseq 221:221")
    new_mm_1a = mmm.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1a.map_data().all()),
                        ((24, 20, 20), (24, 20, 20)))
    assert approx_equal(new_mm_1.map_data(), new_mm_1a.map_data())

    # box around density
    mam.box_all_maps_around_density_and_shift_origin(box_cushion=0)
    new_mm_1 = mam.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1.map_data().all()),
                        ((24, 20, 20), (22, 18, 18)))

    # extract around density (get new mam)
    mam1 = mam_dc.deep_copy()
    mam1.extract_all_maps_around_density(box_cushion=0)
    new_mm_1 = mam1.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1.map_data().all()),
                        ((24, 20, 20), (18, 25, 20)))

    # create mask around density, then box around mask (i.e., box around density)
    mam.create_mask_around_density(soft_mask=False)
    mam.box_all_maps_around_mask_and_shift_origin(box_cushion=3)
    new_mm_1 = mam.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1.map_data().all()),
                        ((24, 20, 20), (22, 18, 18)))

    # box with bounds
    mam.box_all_maps_with_bounds_and_shift_origin(lower_bounds=(10, 10, 10),
                                                  upper_bounds=(15, 15, 15))
    new_mm_1 = mam.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1.map_data().all()),
                        ((24, 20, 20), (6, 6, 6)))

    # extract with bounds
    mam = mam_dc.deep_copy()
    mam_1 = mam.extract_all_maps_with_bounds(lower_bounds=(10, 10, 10),
                                             upper_bounds=(15, 15, 15))
    new_mm_1 = mam_1.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1.map_data().all()),
                        ((24, 20, 20), (6, 6, 6)))

    # box with unique
    mam = mam_dc.deep_copy()
    mam.box_all_maps_around_unique_and_shift_origin(molecular_mass=2500,
                                                    resolution=3)
    new_mm_1 = mam.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1.map_data().all()),
                        ((24, 20, 20), (18, 25, 20)))

    # extract with unique
    mam = mam_dc.deep_copy()
    mam_1 = mam.extract_all_maps_around_unique(molecular_mass=2500,
                                               resolution=3)
    new_mm_1 = mam_1.map_manager()
    assert approx_equal((mmm.map_data().all(), new_mm_1.map_data().all()),
                        ((24, 20, 20), (18, 25, 20)))

    # extract a box and then restore model into same reference as current mam
    mam = mam_dc.deep_copy()
    mam.box_all_maps_with_bounds_and_shift_origin(lower_bounds=(2, 2, 2),
                                                  upper_bounds=(17, 17, 17))
    print("mam:",
          mam.model().get_sites_cart()[0],
          mam.map_manager().origin_is_zero())
    # extract a box
    box_mam = mam.extract_all_maps_with_bounds(lower_bounds=(10, 10, 10),
                                               upper_bounds=(15, 15, 15))
    box_model = box_mam.model()
    matched_box_model = mam.get_model_from_other(box_mam)
    assert approx_equal(matched_box_model.get_sites_cart()[0],
                        mam.model().get_sites_cart()[0])

    # Convert a map to fourier coefficients
    mam = mam_dc.deep_copy()
    ma = mam.map_as_fourier_coefficients(d_min=3)
    assert approx_equal(ma.d_min(), 3.01655042414)

    mam.add_map_from_fourier_coefficients(ma, map_id='new_map_manager')
    cc = flex.linear_correlation(
        mam.get_map_manager_by_id('map_manager').map_data().as_1d(),
        mam.get_map_manager_by_id(
            'new_map_manager').map_data().as_1d()).coefficient()
    assert (cc >= 0.99)

    # Get map-model CC
    dc = mam_dc.extract_all_maps_around_model(
        selection_string="(name ca or name cb or name c or name o) " +
        "and resseq 221:221",
        box_cushion=0)
    cc = dc.map_model_cc(resolution=3)
    assert approx_equal(cc, 0.450025539936)

    # Remove model outside map
    dc.remove_model_outside_map(boundary=0)
    assert (mam_dc.model().get_sites_cart().size(),
            dc.model().get_sites_cart().size()) == (86, 4)

    # shift a model to match the map
    dc = mam_dc.extract_all_maps_around_model(
        selection_string="(name ca or name cb or name c or name o) " +
        "and resseq 221:221",
        box_cushion=0)
    actual_model = dc.model().deep_copy()
    working_model = dc.model().deep_copy()
    working_model.set_shift_cart((0, 0, 0))
    working_model.set_sites_cart(working_model.get_sites_cart() -
                                 actual_model.shift_cart())
    dc.shift_any_model_to_match(working_model)
    assert approx_equal(actual_model.get_sites_cart()[0],
                        working_model.get_sites_cart()[0])
예제 #20
0
def test_miller_array_datatype():

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_mtz = os.path.join(data_dir, 'data',
                            'insulin_unmerged_cutted_from_ccp4.mtz')

    dm = DataManager(['miller_array', 'phil'])
    dm.process_miller_array_file(data_mtz)

    # test labels
    labels = [
        'M_ISYM', 'BATCH', 'I,SIGI,merged', 'IPR,SIGIPR,merged',
        'FRACTIONCALC', 'XDET', 'YDET', 'ROT', 'WIDTH', 'LP', 'MPART', 'FLAG',
        'BGPKRATIOS'
    ]
    for label in dm.get_miller_array_labels():
        assert label in labels

    assert len(dm.get_miller_arrays()) == len(dm.get_miller_array_labels())

    # test access by label
    label = dm.get_miller_array_labels()[3]
    new_label = dm.get_miller_arrays(labels=[label])[0].info().label_string()
    assert label == new_label

    # test custom PHIL
    dm.write_phil_file(dm.export_phil_scope().as_str(),
                       filename='test.phil',
                       overwrite=True)
    loaded_phil = iotbx.phil.parse(file_name='test.phil')
    new_dm = DataManager(['miller_array', 'phil'])
    new_dm.load_phil_scope(loaded_phil)
    assert data_mtz == new_dm.get_default_miller_array_name()
    for label in new_dm.get_miller_array_labels():
        assert label in labels

    os.remove('test.phil')

    # test type
    assert dm.get_miller_array_type() == 'x_ray'
    label = labels[3]
    dm.set_miller_array_type(data_mtz, label, 'electron')
    assert dm.get_miller_array_type(label=label) == 'electron'
    dm.write_phil_file(dm.export_phil_scope().as_str(),
                       filename='test_phil',
                       overwrite=True)
    loaded_phil = iotbx.phil.parse(file_name='test_phil')
    new_dm.load_phil_scope(loaded_phil)
    assert new_dm.get_miller_array_type(label=label) == 'electron'
    new_dm = DataManager(['miller_array'])
    try:
        new_dm.set_default_miller_array_type('q')
    except Sorry:
        pass
    new_dm.set_default_miller_array_type('neutron')
    new_dm.process_miller_array_file(data_mtz)
    assert new_dm.get_miller_array_type(label=label) == 'neutron'

    os.remove('test_phil')

    # test writing file
    arrays = dm.get_miller_arrays()
    dataset = arrays[2].as_mtz_dataset(column_root_label='label1')
    dataset.add_miller_array(miller_array=arrays[3],
                             column_root_label='label2')
    mtz_object = dataset.mtz_object()
    dm.write_miller_array_file(mtz_object, filename='test.mtz', overwrite=True)
    dm.process_miller_array_file('test.mtz')
    new_labels = dm.get_miller_array_labels('test.mtz')
    assert 'label1,SIGlabel1' in new_labels
    assert 'label2,SIGlabel2' in new_labels

    os.remove('test.mtz')

    # test file server
    fs1 = dm.get_reflection_file_server()
    fs2 = dm.get_reflection_file_server([data_mtz, data_mtz])
    assert 2 * len(fs1.miller_arrays) == len(fs2.miller_arrays)
    cs = crystal.symmetry(
        unit_cell=dm.get_miller_arrays()[0].crystal_symmetry().unit_cell(),
        space_group_symbol='P1')
    fs = dm.get_reflection_file_server(crystal_symmetry=cs)
    assert fs.crystal_symmetry.is_similar_symmetry(cs)
    assert not fs.crystal_symmetry.is_similar_symmetry(
        dm.get_miller_arrays()[0].crystal_symmetry())
    fs = dm.get_reflection_file_server(labels=['I,SIGI,merged'])
    assert len(fs.get_miller_arrays(None)) == 1
    miller_array = fs.get_amplitudes(None, None, True, None, None)
    assert miller_array.info().label_string() == 'I,as_amplitude_array,merged'

    for label in dm.get_miller_array_labels():
        dm.set_miller_array_type(label=label, array_type='electron')
    fs = dm.get_reflection_file_server(array_type='x_ray')
    assert len(fs.get_miller_arrays(None)) == 0
    fs = dm.get_reflection_file_server(array_type='electron')
    assert len(fs.get_miller_arrays(None)) == 13
    fs = dm.get_reflection_file_server(
        filenames=[data_mtz],
        labels=[['I,SIGI,merged', 'IPR,SIGIPR,merged']],
        array_type='neutron')
    assert len(fs.get_miller_arrays(None)) == 0
    for label in ['I,SIGI,merged', 'IPR,SIGIPR,merged']:
        dm.set_miller_array_type(label=label, array_type='x_ray')
    fs = dm.get_reflection_file_server(
        filenames=[data_mtz],
        labels=[['I,SIGI,merged', 'IPR,SIGIPR,merged']],
        array_type='x_ray')
    assert len(fs.get_miller_arrays(data_mtz)) == 2
    fs = dm.get_reflection_file_server(filenames=[data_mtz],
                                       array_type='x_ray')
    assert len(fs.get_miller_arrays(data_mtz)) == 2
    fs = dm.get_reflection_file_server(filenames=[data_mtz],
                                       array_type='electron')
    assert len(fs.get_miller_arrays(data_mtz)) == 11

    # test subset of labels
    label_subset = labels[3:8]
    dm = DataManager(['miller_array', 'phil'])
    dm.process_miller_array_file(data_mtz)
    dm._miller_array_labels[data_mtz] = label_subset
    dm.set_miller_array_type(label=label_subset[2], array_type='electron')
    assert dm.get_miller_array_type(label=label_subset[2]) == 'electron'
    dm.write_phil_file(dm.export_phil_scope().as_str(),
                       filename='test.phil',
                       overwrite=True)
    loaded_phil = iotbx.phil.parse(file_name='test.phil')
    new_dm = DataManager(['miller_array', 'phil'])
    new_dm.load_phil_scope(loaded_phil)
    assert new_dm.get_miller_array_type(label=label_subset[2]) == 'electron'
    fs = new_dm.get_reflection_file_server(array_type='x_ray')
    assert len(fs.get_miller_arrays(None)) == 4
    fs = new_dm.get_reflection_file_server(array_type='electron')
    assert len(fs.get_miller_arrays(None)) == 1
    os.remove('test.phil')

    label_subset = list()
    dm = DataManager(['miller_array', 'phil'])
    dm.process_miller_array_file(data_mtz)
    dm._miller_array_labels[data_mtz] = label_subset
    dm.write_phil_file(dm.export_phil_scope().as_str(),
                       filename='test.phil',
                       overwrite=True)
    loaded_phil = iotbx.phil.parse(file_name='test.phil')
    new_dm = DataManager(['miller_array', 'phil'])
    new_dm.load_phil_scope(loaded_phil)
    fs = new_dm.get_reflection_file_server(array_type='x_ray')
    assert len(fs.get_miller_arrays(None)) == 13
    fs = new_dm.get_reflection_file_server(array_type='electron')
    assert len(fs.get_miller_arrays(None)) == 0
    os.remove('test.phil')
예제 #21
0
def create_model_from_file(path_to_pdb_file):
  from iotbx.data_manager import DataManager    # Load in the DataManager
  dm = DataManager()             # Initialize the DataManager and call it dm
  model = dm.get_model(path_to_pdb_file)
  return model
예제 #22
0
def test_map_mixins():
    regression_dir = libtbx.env.find_in_repositories(
        relative_path='phenix_regression/maps')
    if not regression_dir:
        print('Skipping test, phenix_regression missing')
        return

    dm = DataManager(['real_map'])
    assert not hasattr(dm, 'has_real_maps_or_map_coefficients')
    assert hasattr(dm, 'has_real_maps')
    assert not hasattr(dm, 'has_map_coefficients')

    dm = DataManager(['map_coefficients'])
    assert not hasattr(dm, 'has_real_maps_or_map_coefficients')
    assert not hasattr(dm, 'has_real_maps')
    assert hasattr(dm, 'has_map_coefficients')

    dm = DataManager()
    assert hasattr(dm, 'has_real_maps_or_map_coefficients')
    assert hasattr(dm, 'has_real_maps')
    assert hasattr(dm, 'has_map_coefficients')

    cwd = os.getcwd()
    model_file = os.path.join(regression_dir, 'test_maps4.pdb')
    mtz_file = os.path.join(regression_dir, 'test_maps4.mtz')
    make_map([
        model_file, mtz_file, 'output.directory={cwd}'.format(cwd=cwd),
        'output.prefix=tmm'
    ],
             use_output_directory=False,
             log=null_out())
    real_map_file = 'tmm_2mFo-DFc_map.ccp4'
    map_coefficients_file = 'tmm_map_coeffs.mtz'

    assert not dm.has_real_maps_or_map_coefficients(expected_n=1,
                                                    exact_count=True)
    dm.process_real_map_file(real_map_file)
    assert dm.has_real_maps(expected_n=1, exact_count=True)
    assert dm.has_real_maps_or_map_coefficients(expected_n=1, exact_count=True)
    dm.process_map_coefficients_file(map_coefficients_file)
    assert dm.has_map_coefficients(expected_n=1, exact_count=True)
    assert not dm.has_real_maps_or_map_coefficients(expected_n=1,
                                                    exact_count=True)
    assert dm.has_real_maps_or_map_coefficients(expected_n=1,
                                                exact_count=False)
    assert dm.has_real_maps_or_map_coefficients(expected_n=2, exact_count=True)

    os.remove(real_map_file)
    os.remove(map_coefficients_file)
예제 #23
0
def test_real_map_datatype():

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_ccp4 = os.path.join(data_dir, 'data', 'non_zero_origin_map.ccp4')

    dm = DataManager(['real_map', 'phil'])
    dm.process_real_map_file(data_ccp4)
    assert dm.has_real_maps()

    # test custom PHIL
    dm.write_phil_file(dm.export_phil_scope().as_str(),
                       filename='test.phil',
                       overwrite=True)
    loaded_phil = iotbx.phil.parse(file_name='test.phil')
    new_dm = DataManager(['real_map', 'phil'])
    new_dm.load_phil_scope(loaded_phil)
    assert data_ccp4 == new_dm.get_default_real_map_name()
    os.remove('test.phil')

    # test writing and reading file
    mm = dm.get_real_map()
    mm.shift_origin()
    dm.write_real_map_file(mm, filename='test.ccp4', overwrite=True)
    dm.process_real_map_file('test.ccp4')
    new_mm = dm.get_real_map('test.ccp4')
    assert not new_mm.is_similar(mm)
    new_mm.shift_origin()
    assert new_mm.is_similar(mm)

    os.remove('test.ccp4')
예제 #24
0
def main():
    dm = DataManager()
    dm.process_model_str('testing', model_1yjp)
    model = dm.get_model()
    rc = model.restraints_as_geo(force=True)
    rc = check_geo(rc)
    assert rc == count_1yjp, check_diff(rc, count_1yjp)

    dm = DataManager()
    dm.process_model_str('testing', model_1yjp_with_waters)
    model = dm.get_model()
    rc = model.restraints_as_geo(force=True)
    rc = check_geo(rc)
    assert rc == count_1yjp_with_waters, rc

    params = model.get_default_pdb_interpretation_params()
    edits_1yjp = params.geometry_restraints.edits

    edits_1yjp.bond[0].action = 'add'
    edits_1yjp.bond[0].atom_selection_1 = 'resname HOH and resid 10 and name O'
    edits_1yjp.bond[
        0].atom_selection_2 = 'resname ASN and resid 2 and name ND2'
    edits_1yjp.bond[0].distance_ideal = 2.1
    edits_1yjp.bond[0].sigma = 0.1
    model.set_pdb_interpretation_params(params)
    rc = model.restraints_as_geo(force=True)
    rc = check_geo(rc)
    current = count_1yjp_with_waters.copy()
    current['User supplied restraints'] = 1
    current['Nonbonded interactions'] = 1176
    assert rc == current, check_diff(rc, current)

    edits_1yjp.angle[0].action = 'add'
    edits_1yjp.angle[
        0].atom_selection_1 = 'resname HOH and resid 10 and name O'
    edits_1yjp.angle[
        0].atom_selection_2 = 'resname ASN and resid 2 and name ND2'
    edits_1yjp.angle[
        0].atom_selection_3 = 'resname ASN and resid 2 and name CG'
    edits_1yjp.angle[0].angle_ideal = 21.9
    edits_1yjp.angle[0].sigma = 1.1
    model.set_pdb_interpretation_params(params)
    rc = model.restraints_as_geo(force=True)
    rc = check_geo(rc)
    current = count_1yjp_with_waters.copy()
    current['User supplied restraints'] = 1
    current['User supplied angle restraints'] = 1
    current['Nonbonded interactions'] = 1176
    assert rc == current, check_diff(rc, current)

    edits_1yjp.dihedral[0].action = 'add'
    edits_1yjp.dihedral[
        0].atom_selection_1 = 'resname HOH and resid 10 and name O'
    edits_1yjp.dihedral[
        0].atom_selection_2 = 'resname ASN and resid 2 and name ND2'
    edits_1yjp.dihedral[
        0].atom_selection_3 = 'resname ASN and resid 2 and name CG'
    edits_1yjp.dihedral[
        0].atom_selection_4 = 'resname ASN and resid 2 and name CB'
    edits_1yjp.dihedral[0].angle_ideal = 121.9
    edits_1yjp.dihedral[0].sigma = 1.12
    edits_1yjp.dihedral[0].periodicity = 10
    model.set_pdb_interpretation_params(params)
    rc = model.restraints_as_geo(force=True)
    rc = check_geo(rc)
    current = count_1yjp_with_waters.copy()
    current['User supplied restraints'] = 1
    current['User supplied angle restraints'] = 1
    current['User supplied torsion angle restraints'] = 1
    #current['  sinusoidal'] = 16
    current['Nonbonded interactions'] = 1176
    assert rc == current, check_diff(rc, current)
    print('OK')
예제 #25
0
def test_default_filenames():
    datatypes = [
        'model', 'ncs_spec', 'phil', 'real_map', 'restraint', 'sequence'
    ]
    extensions = ['cif', 'ncs_spec', 'eff', 'mrc', 'cif', 'seq']
    dm = DataManager(datatypes)
    for datatype, extension in zip(datatypes, extensions):
        filename = getattr(
            dm, 'get_default_output_{datatype}_filename'.format(
                datatype=datatype))()
        assert filename == 'cctbx_program.' + extension

    filename = dm.get_default_output_model_filename(extension='.abc')
    assert filename == 'cctbx_program.abc'

    class TestProgram(ProgramTemplate):
        master_phil_str = """
output {
  serial = 0
    .type = int
}
"""

    master_phil = iotbx.phil.parse(TestProgram.master_phil_str)
    required_output_phil = iotbx.phil.parse(ProgramTemplate.output_phil_str)
    master_phil.adopt_scope(required_output_phil)
    working_phil = iotbx.phil.parse(ProgramTemplate.master_phil_str)
    params = master_phil.fetch(working_phil).extract()
    p = ProgramTemplate(dm, params, master_phil)
    assert dm.get_default_output_filename() == 'cctbx_program_000'
    dm.set_overwrite(True)
    dm.write_model_file('abc')  # cctbx_program_000.cif
    dm.write_phil_file('123')  # cctbx_program_000.eff
    dm.write_phil_file('456')  # cctbx_program_001.eff
    dm.write_model_file('def')  # cctbx_program_001.cif
    assert dm.get_default_output_filename() == 'cctbx_program_001'
    dm.write_sequence_file('ghi')  # cctbx_program_001.seq
    dm.write_sequence_file('hkl')  # cctbx_program_002.seq
    assert dm.get_default_output_filename() == 'cctbx_program_002'
    assert os.path.isfile('cctbx_program_000.cif')
    assert os.path.isfile('cctbx_program_001.cif')
    assert os.path.isfile('cctbx_program_000.eff')
    assert os.path.isfile('cctbx_program_001.eff')
    assert os.path.isfile('cctbx_program_001.seq')
    assert os.path.isfile('cctbx_program_002.seq')
    os.remove('cctbx_program_000.cif')
    os.remove('cctbx_program_001.cif')
    os.remove('cctbx_program_000.eff')
    os.remove('cctbx_program_001.eff')
    os.remove('cctbx_program_001.seq')
    os.remove('cctbx_program_002.seq')

    # test output.filename, output.file_name
    assert p.get_default_output_filename() == 'cctbx_program_002'
    assert p.get_default_output_filename(filename='abc') == 'abc'
    working_phil_str = 'output.filename=def'
    working_phil = iotbx.phil.parse(working_phil_str)
    params = master_phil.fetch(working_phil).extract()
    p = ProgramTemplate(dm, params, master_phil)
    assert params.output.filename == params.output.file_name == 'def'
    assert p.get_default_output_filename() == 'def'
    assert dm.get_default_output_filename() == 'def'
    working_phil_str = 'output.file_name=ghi'
    working_phil = iotbx.phil.parse(working_phil_str)
    params = master_phil.fetch(working_phil).extract()
    p = ProgramTemplate(dm, params, master_phil)
    assert params.output.filename == params.output.file_name == 'ghi'
    assert p.get_default_output_filename() == 'ghi'
    assert dm.get_default_output_filename() == 'ghi'
예제 #26
0
def RunProbeTests(inFileName):

    #========================================================================
    # Call the test functions for the libraries we test.

    ret = probeext.DotSpheres_test()
    assert len(ret) == 0, "DotSpheres_test() failed: " + ret

    ret = probeext.SpatialQuery_test()
    assert len(ret) == 0, "SpatialQuery_test() failed: " + ret

    ret = probeext.Scoring_test()
    assert len(ret) == 0, "Scoring_test() failed: " + ret

    AtomTypes.Test()
    Helpers.Test()

    #========================================================================
    # Now ensure that we can use the C++-wrapped classes as intended to make sure
    # that the wrapping code or parameters have not changed.

    #========================================================================
    # Make sure we can get at the DotSphere objects and their methods
    cache = probeext.DotSphereCache(10)
    sphere1 = cache.get_sphere(1)
    dots = sphere1.dots()

    #========================================================================
    # Make sure we can fill in an ExtraAtomInfoList and pass it to scoring
    # Generate an example data model with a small molecule in it
    if inFileName is not None and len(inFileName) > 0:
        # Read a model from a file using the DataManager
        dm = DataManager()
        dm.process_model_file(inFileName)
        model = dm.get_model(inFileName)
    else:
        # Generate a small-molecule model using the map model manager
        mmm = map_model_manager(
        )  #   get an initialized instance of the map_model_manager
        mmm.generate_map(
        )  #   get a model from a generated small library model and calculate a map for it
        model = mmm.model()  #   get the model

    # Fix up bogus unit cell when it occurs by checking crystal symmetry.
    cs = model.crystal_symmetry()
    if (cs is None) or (cs.unit_cell() is None):
        model = shift_and_box_model(model=model)

    # Get the list of all atoms in the model
    atoms = model.get_atoms()

    # Get the bonding information we'll need to exclude our bonded neighbors.
    try:
        p = mmtbx.model.manager.get_default_pdb_interpretation_params()
        model.process(make_restraints=True,
                      pdb_interpretation_params=p)  # make restraints
        geometry = model.get_restraints_manager().geometry
        sites_cart = model.get_sites_cart()  # cartesian coordinates
        bond_proxies_simple, asu = \
            geometry.get_all_bond_proxies(sites_cart = sites_cart)
    except Exception as e:
        raise Exception("Could not get bonding information for input file: " +
                        str(e))
    bondedNeighbors = Helpers.getBondedNeighborLists(atoms,
                                                     bond_proxies_simple)

    # Traverse the hierarchy and look up the extra data to be filled in.
    ret = Helpers.getExtraAtomInfo(model)
    extra = ret.extraAtomInfo

    # Construct a SpatialQuery and fill in the atoms.  Ensure that we can make a
    # query within 1000 Angstroms of the origin.
    sq = probeext.SpatialQuery(atoms)
    nb = sq.neighbors((0, 0, 0), 0, 1000)

    # Construct a DotScorer object.
    # Find the radius of each atom in the structure and construct dot spheres for
    # them. Find the atoms that are bonded to them and add them to an excluded list.
    # Then compute the score for each of them and report the summed score over the
    # whole molecule the way that Reduce will.
    ds = probeext.DotScorer(extra)
    total = 0
    badBumpTotal = 0
    for a in atoms:
        rad = extra.getMappingFor(a).vdwRadius
        assert rad > 0, "Invalid radius for atom look-up: " + a.name + " rad = " + str(
            rad)
        sphere = cache.get_sphere(rad)

        # Excluded atoms that are bonded to me or to one of my neightbors.
        # It has the side effect of excluding myself if I have any neighbors.
        # Construct as a set to avoid duplicates.
        exclude = set()
        for n in bondedNeighbors[a]:
            exclude.add(n)
            for n2 in bondedNeighbors[n]:
                exclude.add(n2)
        exclude = list(exclude)

        dots = sphere.dots()
        res = ds.score_dots(a, 1.0, sq, rad * 3, 0.25, exclude, sphere.dots(),
                            sphere.density(), False)
        total += res.totalScore()
        if res.hasBadBump:
            badBumpTotal += 1

    # Test calling the single-dot checking code as will be used by Probe to make sure
    # all of the Python linkage is working
    dotOffset = [1, 0, 0]
    check = ds.check_dot(atoms[0], dotOffset, 1, atoms, [atoms[0]])
    overlapType = check.overlapType

    # Test calling the interaction_type method to be sure Python linkage is working
    interactionType = ds.interaction_type(check.overlapType, check.gap)
예제 #27
0
def Test(inFileName = None):

  #========================================================================
  # Make sure we can fill in mmtbx.probe.ExtraAtomInfoList info.
  # Generate an example data model with a small molecule in it unless we
  # were given a file name on the command line.
  if inFileName is not None and len(inFileName) > 0:
    # Read a model from a file using the DataManager
    dm = DataManager()
    dm.process_model_file(inFileName)
    model = dm.get_model(inFileName)
  else:
    # Generate a small-molecule model using the map model manager
    mmm=map_model_manager()         #   get an initialized instance of the map_model_manager
    mmm.generate_map()              #   get a model from a generated small library model and calculate a map for it
    model = mmm.model()             #   get the model

  # Fill in an ExtraAtomInfoList with an entry for each atom in the hierarchy.
  # We first find the largest i_seq sequence number in the model and reserve that
  # many entries so we will always be able to fill in the entry for an atom.
  atoms = model.get_atoms()
  maxI = atoms[0].i_seq
  for a in atoms:
    if a.i_seq > maxI:
      maxI = a.i_seq
  extra = []
  for i in range(maxI+1):
    extra.append(probe.ExtraAtomInfo())

  # Traverse the hierarchy and look up the extra data to be filled in.
  # Get a list of all the atoms in the chain while we're at it
  at = AtomTypes()
  ph = model.get_hierarchy()
  for m in ph.models():
    for chain in m.chains():
      for rg in chain.residue_groups():
        for ag in rg.atom_groups():
          for a in ag.atoms():
            ei, warn = at.FindProbeExtraAtomInfo(a)
            extra[a.i_seq] = ei
            # User code should test for and print warnings
            #if len(warn) > 0:
            #  print(warn)

  #========================================================================
  # Find an Oxygen atom and ask for its radii with explicit Hydrogen, implicit Hydrogen,
  # and Nuclear radii.
  o = None
  ph = model.get_hierarchy()
  for a in ph.models()[0].atoms():
    if a.element.strip() == 'O':
      o = a
  assert o is not None, "AtomTypes.Test(): Could not find Oxygen (internal test failure)"
  explicitH = AtomTypes(useNeutronDistances = False,
                        useImplicitHydrogenDistances = False).FindProbeExtraAtomInfo(o)[0].vdwRadius
  implicitH = AtomTypes(useNeutronDistances = False,
                        useImplicitHydrogenDistances = True).FindProbeExtraAtomInfo(o)[0].vdwRadius
  neutronH = AtomTypes(useNeutronDistances = True,
                        useImplicitHydrogenDistances = False).FindProbeExtraAtomInfo(o)[0].vdwRadius
  assert explicitH != implicitH, "AtomTypes.Test(): Implicit and explicit Oxygen radii did not differ as expected"

  #========================================================================
  # Check MaximumVDWRadius, calling it twice to make sure both the cached and non-cached
  # results work.
  for i in range(2):
    assert at.MaximumVDWRadius() == 2.5, "AtomTypes.Test(): Unexpected MaximumVDWRadius(): got "+str(MaximumVDWRadius())+", expected 2.5"

  #========================================================================
  # Check IsAromatic() to ensure it gives results when expected and not when not.
  aromaticChecks = [
      ['PHE', 'CE2', True],
      ['  U', 'HN3', True],
      ['ASN',   'O', False]
    ]
  for a in aromaticChecks:
    assert IsAromatic(a[0],a[1]) == a[2], "AtomTypes.Test(): {} {} not marked as aromatic {}".format(a[0],a[1],a[2])
예제 #28
0
  def get_statistics(self):
    if self.json_data['success_composition'] is False:
      return
    make_sub_header('Get statistics', out=self.logger)
    self.save_json()
    success = True
    self.json_data['success_statistics'] = None

    sc, r_sc = None, None
    self.pickle_data.min_reso = None
    self.pickle_data.d_inv = None
    self.pickle_data.r_d_inv = None
    self.pickle_data.d_inv_half = None
    self.pickle_data.fsc_model = None
    self.pickle_data.r_fsc_model = None
    self.pickle_data.fsc_half = None

    if self.json_data['restraints_file']:
      cif_file = self.json_data['restraints_file']
      cif_objects = monomer_library.server.read_cif(file_name=to_str(cif_file))
      cif_objects = [(cif_file, cif_objects)]
    else: cif_objects = None

    for p, fn in zip(('', 'r_'),
      [self.json_data['pdb_file_updated'], self.json_data['pdb_file_refined']]):
    # TODO which file to use for initial!!
      if (not fn or not os.path.isfile(fn)): continue

      if p=='': print('Initial model', file=self.logger)
      if p=='r_': print('\nRefined model', file=self.logger)

      dm = DataManager()

      dm.process_real_map_file(self.json_data['map_file'])
      map_inp = dm.get_real_map(self.json_data['map_file'])
      #map_inp   = iotbx.ccp4_map.map_reader(file_name = self.json_data['map_file'])
      map_inp_1, map_inp_2 = None, None
      if self.json_data['map_file_1'] is not None:
        #map_inp_1 = iotbx.ccp4_map.map_reader(file_name = self.json_data['map_file_1'])
        dm.process_real_map_file(self.json_data['map_file_1'])
        map_inp_1 = dm.get_real_map(self.json_data['map_file_1'])
        #map_data_1 = map_inp_1.map_data()
      if self.json_data['map_file_2'] is not None:
        dm.process_real_map_file(self.json_data['map_file_2'])
        map_inp_2 = dm.get_real_map(self.json_data['map_file_2'])
        #map_inp_2 = iotbx.ccp4_map.map_reader(file_name = self.json_data['map_file_2'])
        #map_data_2 = map_inp_2.map_data()

      print('\tGet model class with restraints...', file=self.logger)
      pdb_inp = iotbx.pdb.input(file_name = fn)
      try:
        model = mmtbx.model.manager(
          model_input       = pdb_inp,
          restraint_objects = cif_objects,
          build_grm         = True,
          stop_for_unknowns = False,
          crystal_symmetry  = map_inp.crystal_symmetry(),
          log               = null_out())
      except Exception as e:
        #success = False
        self.json_data['success_statistics'] = False
        msg = traceback.format_exc()
        print(msg, file=self.logger)
        self.write_log(
          step = 'Statistics: Model class (with restraints) from pdb ',
          msg  = msg)
        self.save_json()
        continue

      if not success: continue

      # Emringer
      if self.resolution < 4.0:
        print('\tCalculate Emringer score', file=self.logger)
        try:
          emringer_score = self.get_emringer_score(
            model   = model.deep_copy(),
            map_inp = map_inp.deep_copy())
          self.json_data[p+'emringer'] = emringer_score
        except Exception as e:
          msg = traceback.format_exc()
          print(msg, file=self.logger)
          self.write_log(step = 'EMRinger failed ', msg = msg)
          # TODO: save as success_statistics False?

      # Rama-z score
      try:
        rama_z_score = rama_z.rama_z(
        models = [model],
        log = self.logger)
        z_scores = rama_z_score.get_z_scores()
        self.json_data[p+'z_score'] = z_scores['W'][0]
      except Exception as e:
        msg = traceback.format_exc()
        print(msg, file=self.logger)
        self.write_log(step = 'Rama z-score failed ', msg = msg)

#      base = map_and_model.input(
#        map_manager   = map_inp,
#        map_manager_1 = map_inp_1,
#        map_manager_2 = map_inp_2,
#        model         = model,
#        box           = True)

      checked = map_model_manager(
        map_manager         = map_inp,
        map_manager_1       = map_inp_1,
        map_manager_2       = map_inp_2,
        model      = model,
        wrapping   = None)
      checked.box_all_maps_around_model_and_shift_origin()

      params = validation_cryoem.master_params().extract()
      params.resolution = self.resolution
      params.slim = False

      if (p == ''):
        print('\tGet map parameters...', file=self.logger)
        self.get_map_parameters(base = checked)

      print('\tRun Mtriage...', file=self.logger)
      try:
        o = validation_cryoem.validation(
          model      = checked.model(),
          map_data   = checked.map_data(),
          map_data_1 = checked.map_data_1(),
          map_data_2 = checked.map_data_2(),
          params     = params)
      except Exception as e:
        # success = False
        self.json_data['success_statistics'] = False
        msg = traceback.format_exc()
        print(msg, file=self.logger)
        self.write_log(step = 'Statistics ', msg = msg)
        self.save_json()
        continue

      print('\tStore info in pkl...', file=self.logger)
      o_dict = self.fill_dictionary(o)
      if (p == ''):
        self.json_data['o'] = o_dict
        sc = o.model.get_sites_cart()
      elif (p == 'r_'):
        self.json_data['o_refined'] = o_dict
        r_sc = o.model.get_sites_cart()
      if o is not None:
        if o.model_vs_data is not None:
          if o.model_vs_data.cc is not None:
            if o.model_vs_data.cc.cc_per_chain is not None:
              if (p == ''):
                self.pickle_data.cc_per_chain = o.model_vs_data.cc.cc_per_chain
              if (p == 'r_'):
                self.pickle_data.r_cc_per_chain = o.model_vs_data.cc.cc_per_chain
            if o.model_vs_data.cc.cc_per_residue is not None:
              if (p == ''):
                self.pickle_data.cc_per_residue = o.model_vs_data.cc.cc_per_residue
              if (p == 'r_'):
                self.pickle_data.r_cc_per_residue = o.model_vs_data.cc.cc_per_residue
        if o.model_stats.geometry.ramachandran.ramalyze is not None:
          rl = o.model_stats.geometry.ramachandran.ramalyze
          if (p == ''): self.pickle_data.ramalyze = rl
          if (p == 'r_'): self.pickle_data.r_ramalyze = rl
      # values for the curve go up to very high reso, get highest reso limit
#      if o is not None:
#        reso_list = [self.json_data['d_pdb'],self.json_data['d_cif'],
#          self.json_data['d_map'], self.json_data['o']['d99'],
#          self.json_data['o']['d_model'], self.json_data['o']['d_fsc']]
#      else:
#        reso_list = [self.json_data['d_pdb'],self.json_data['d_cif'],
#          self.json_data['d_map']]
#      min_reso = min([elem for elem in reso_list if elem is not None])
#      self.pickle_data.min_reso = min_reso
      min_reso = self.resolution
      self.pickle_data.min_reso = self.resolution
      # apply some arbitrary buffer
      min_reso = min_reso - 0.5
      if o is not None:
        masked = o.data_stats.masked
        if (masked.fsc_curve_model is not None):
          if ((masked.fsc_curve_model.d_inv is not None) and
              (masked.fsc_curve_model.fsc is not None)):
            if (p == ''):
              self.pickle_data.d_inv = masked.fsc_curve_model.d_inv
              self.pickle_data.fsc_model   = masked.fsc_curve_model.fsc
            if (p == 'r_'):
              self.pickle_data.r_d_inv = masked.fsc_curve_model.d_inv
              self.pickle_data.r_fsc_model   = masked.fsc_curve_model.fsc
        if (p == ''):
          if (masked.fsc_curve is not None):
            if (masked.fsc_curve.fsc is not None):
              if ((masked.fsc_curve.fsc.d_inv is not None) and
                  (masked.fsc_curve.fsc.fsc is not None)):
                self.pickle_data.d_inv_half = masked.fsc_curve.fsc.d_inv
                self.pickle_data.fsc_half   = masked.fsc_curve.fsc.fsc

      if o is not None:
        print("\nd99                  :", o.data_stats.masked.d99)
        print("d_model              :", o.data_stats.masked.d_model)
        print("d_fsc                :", o.data_stats.masked.d_fsc)
        print("ramachandran.outliers:", o.model_stats.geometry.ramachandran.outliers)
        print("cc_mask              :", o.model_vs_data.cc.cc_mask)
        print("cc_box               :", o.model_vs_data.cc.cc_box)

    if sc is not None and r_sc is not None:
      self.json_data['rmsd'] = sc.rms_difference(r_sc)

    if self.json_data['success_statistics'] is not False:
      self.json_data['success_statistics'] = success
예제 #29
0
def test_01():

    # Source data

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_ccp4 = os.path.join(data_dir, 'data', 'non_zero_origin_map.ccp4')
    data_pdb = os.path.join(data_dir, 'data', 'non_zero_origin_model.pdb')
    data_ncs_spec = os.path.join(data_dir, 'data',
                                 'non_zero_origin_ncs_spec.ncs_spec')

    # Read in data

    dm = DataManager(['ncs_spec', 'model', 'real_map', 'phil'])
    dm.set_overwrite(True)

    map_file = data_ccp4
    dm.process_real_map_file(map_file)
    mm = dm.get_real_map(map_file)

    model_file = data_pdb
    dm.process_model_file(model_file)
    model = dm.get_model(model_file)

    ncs_file = data_ncs_spec
    dm.process_ncs_spec_file(ncs_file)
    ncs = dm.get_ncs_spec(ncs_file)

    mmm = map_model_manager(model=model,
                            map_manager_1=mm.deep_copy(),
                            map_manager_2=mm.deep_copy(),
                            ncs_object=ncs,
                            wrapping=False)
    mmm.add_map_manager_by_id(map_id='external_map',
                              map_manager=mmm.map_manager().deep_copy())
    mmm.set_resolution(3)
    mmm.set_log(sys.stdout)

    dc = mmm.deep_copy()

    # Model sharpening
    mmm = dc.deep_copy()
    tls_info = mmm.tls_from_map(
        n_bins=10,
        model_id='model',
        map_id='map_manager',
        iterations=1,
    )
    tlso = tls_info.tlso_list[0]
    print("t:", tlso.t)
    print("l:", tlso.l)
    print("s:", tlso.s)
    print("origin:", tlso.origin)

    assert approx_equal(
        tlso.t,
        (1.180418902258779, 1.1747521845606608, 1.178996799712174,
         -0.08474662674769494, -0.022609295693646402, 0.0649209491344932))
    assert approx_equal(
        tlso.l,
        (-0.002159404807991249, -0.002107964765763024, 0.0008301439376854558,
         -5.973347993775719e-05, -0.000134276871934738, -9.05515898670584e-05))
    assert approx_equal(
        tlso.s,
        (2.9348223335616302e-08, 5.52441087256425e-09, -5.382459681103171e-09,
         4.3530347434547015e-09, -2.3559464233595e-08, 4.217968590464982e-09,
         -4.380707049750269e-09, 1.9232725033868253e-09,
         -5.788759082799497e-09))
    assert approx_equal(
        tlso.origin,
        (-64.70331931297399, -62.30573551948903, -63.743687240164604))

    print("TLS: ", tlso.t, tlso.l, tlso.s, tlso.origin)
예제 #30
0
def exercise_around_model():

    from cctbx.maptbx.box import make_list_symmetric
    a = [3, 4, 5, 3, 9, 1, 6, 3, 2, 5, 6, 6]
    new_a = make_list_symmetric(a)
    from scitbx.array_family import flex
    aa = flex.double(a)
    new_aa = flex.double(new_a)
    assert (aa.size(), new_aa.size()) == (12, 12)
    assert aa.min_max_mean().mean == new_aa.min_max_mean().mean
    print(a, new_a)

    a = [3, 4, 5, 3, 8, 1, 6, 7, 3, 2, 5, 6, 6]
    new_a = make_list_symmetric(a)
    from scitbx.array_family import flex
    aa = flex.double(a)
    new_aa = flex.double(new_a)
    print(a, new_a)
    assert (aa.size(), new_aa.size()) == (13, 13)
    assert aa.min_max_mean().mean == new_aa.min_max_mean().mean

    mam = get_random_structure_and_map(use_static_structure=True)

    map_data_orig = mam.mm.map_data().deep_copy()
    sites_frac_orig = mam.model.get_sites_frac().deep_copy()
    sites_cart_orig = mam.model.get_sites_cart().deep_copy()
    cs_orig = mam.model.crystal_symmetry()

    box = cctbx.maptbx.box.around_model(map_manager=mam.mm,
                                        model=mam.model.deep_copy(),
                                        box_cushion=10,
                                        wrapping=True)
    new_mm1 = box.map_manager()
    new_mm2 = box.apply_to_map(map_manager=mam.mm.deep_copy())
    assert approx_equal(new_mm1.map_data(), new_mm2.map_data())

    new_model1 = box.model()
    new_model2 = box.apply_to_model(model=mam.model.deep_copy())
    assert new_model1.crystal_symmetry().is_similar_symmetry(
        new_model2.crystal_symmetry())
    assert new_model1.crystal_symmetry().is_similar_symmetry(
        box.crystal_symmetry)

    assert approx_equal(new_model1.get_sites_cart()[0],
                        (19.705233333333336, 15.631525, 13.5040625))
    # make sure things did change
    assert new_mm2.map_data().size() != map_data_orig.size()

    # make sure things are changed in-place and are therefore different from start
    assert box.map_manager().map_data().size() != map_data_orig.size()
    assert box.model().get_sites_frac() != sites_frac_orig
    assert box.model().get_sites_cart() != sites_cart_orig
    assert (not cs_orig.is_similar_symmetry(box.model().crystal_symmetry()))

    # make sure box, model and map_manager remember original crystal symmetry
    assert cs_orig.is_similar_symmetry(
        box.map_manager().unit_cell_crystal_symmetry())
    assert cs_orig.is_similar_symmetry(
        box.map_manager().unit_cell_crystal_symmetry())

    assert approx_equal(
        box.model().shift_cart(),
        [5.229233333333334, 5.061524999999999, 5.162062499999999])

    assert box.model().unit_cell_crystal_symmetry().is_similar_symmetry(
        cs_orig)
    assert (not box.model().crystal_symmetry().is_similar_symmetry(cs_orig))

    assert approx_equal(
        box.model()._figure_out_hierarchy_to_output(
            do_not_shift_back=False).atoms().extract_xyz()[0],
        (14.476, 10.57, 8.342))

    # make sure we can stack shifts
    sel = box.model().selection("resseq 219:219")
    m_small = box.model().select(selection=sel)

    assert approx_equal(box.model().shift_cart(), m_small.shift_cart())

    # Now box again:
    small_box = cctbx.maptbx.box.around_model(map_manager=mam.mm,
                                              model=mam.model.deep_copy(),
                                              box_cushion=5,
                                              wrapping=True)

    # Make sure nothing was zeroed out in this map (wrapping = True)
    assert new_mm1.map_data().as_1d().count(0) == 0

    # Now without wrapping...
    box = cctbx.maptbx.box.around_model(map_manager=mam.mm,
                                        model=mam.model.deep_copy(),
                                        box_cushion=10,
                                        wrapping=False)

    # make sure things are changed in-place and are therefore different from start
    assert box.map_manager().map_data().size() != map_data_orig.size()
    assert box.model().get_sites_frac() != sites_frac_orig
    assert box.model().get_sites_cart() != sites_cart_orig
    assert (not cs_orig.is_similar_symmetry(box.model().crystal_symmetry()))

    # make sure box, model and map_manager remember original crystal symmetry
    assert cs_orig.is_similar_symmetry(
        box.model().unit_cell_crystal_symmetry())
    assert cs_orig.is_similar_symmetry(
        box.map_manager().unit_cell_crystal_symmetry())

    assert box.map_manager().map_data().as_1d().count(0) == 81264

    # Now specify bounds directly
    new_box = cctbx.maptbx.box.with_bounds(map_manager=mam.mm.deep_copy(),
                                           lower_bounds=(-7, -7, -7),
                                           upper_bounds=(37, 47, 39),
                                           wrapping=False)

    new_model = new_box.apply_to_model(mam.model.deep_copy())
    # make sure things are changed in-place and are therefore different from start
    assert new_box.map_manager().map_data().size() != map_data_orig.size()
    assert new_model.get_sites_frac() != sites_frac_orig
    assert new_model.get_sites_cart() != sites_cart_orig
    assert (not cs_orig.is_similar_symmetry(new_model.crystal_symmetry()))

    # make sure box, model and map_manager remember original crystal symmetry
    assert cs_orig.is_similar_symmetry(
        box.model().unit_cell_crystal_symmetry())
    assert cs_orig.is_similar_symmetry(
        box.map_manager().unit_cell_crystal_symmetry())

    assert box.map_manager().map_data().as_1d().count(0) == 81264

    # Now specify bounds directly and init with model
    box = cctbx.maptbx.box.with_bounds(map_manager=mam.mm.deep_copy(),
                                       lower_bounds=(-7, -7, -7),
                                       upper_bounds=(37, 47, 39),
                                       wrapping=False,
                                       model=mam.model.deep_copy())

    new_model = box.model()
    # make sure things are changed in-place and are therefore different from start
    assert box.map_manager().map_data().size() != map_data_orig.size()
    assert new_model.get_sites_frac() != sites_frac_orig
    assert new_model.get_sites_cart() != sites_cart_orig
    assert (not cs_orig.is_similar_symmetry(new_model.crystal_symmetry()))

    # make sure box, model and map_manager remember original crystal symmetry
    assert cs_orig.is_similar_symmetry(
        box.model().unit_cell_crystal_symmetry())
    assert cs_orig.is_similar_symmetry(
        box.map_manager().unit_cell_crystal_symmetry())

    assert box.map_manager().map_data().as_1d().count(0) == 81264

    # Extract using around_unique

    data_dir = os.path.dirname(os.path.abspath(__file__))
    data_ccp4 = os.path.join(data_dir, 'data', 'D7.ccp4')
    data_ncs = os.path.join(data_dir, 'data', 'D7.ncs_spec')
    data_seq = os.path.join(data_dir, 'data', 'D7.seq')

    dm = DataManager(['real_map', 'phil', 'ncs_spec', 'sequence'])
    dm.process_real_map_file(data_ccp4)
    mm = dm.get_real_map(data_ccp4)

    dm.process_ncs_spec_file(data_ncs)
    ncs_obj = dm.get_ncs_spec(data_ncs)

    dm.process_sequence_file(data_seq)
    sequence = dm.get_sequence(data_seq)
    sequence_as_text = sequence[0].sequence

    map_model_mgr = map_model_manager(map_manager=mm, ncs_object=ncs_obj)
    mm = map_model_mgr.map_manager()
    mm.show_summary()

    box = cctbx.maptbx.box.around_unique(
        map_manager=mm.deep_copy(),
        resolution=3,
        box_cushion=1,
        sequence=sequence_as_text,
        soft_mask=True,
        wrapping=False,
    )

    box.map_manager().write_map('new_box.ccp4')

    # run again from map_manager

    map_model_mgr.box_all_maps_around_unique_and_shift_origin(
        resolution=3,
        box_cushion=1,
        sequence=sequence_as_text,
        soft_mask=True,
    )

    # Get bounds around density
    box = cctbx.maptbx.box.around_density(map_manager=mam.mm.deep_copy(),
                                          wrapping=False)

    # Create a mask

    mm = mam.mm.deep_copy()

    mm.create_mask_around_density(
        resolution=3,
        molecular_mass=2100,
        sequence="GAVAGA",
        solvent_content=0.5,
    )
    mask_mm = mm.get_mask_as_map_manager()
    assert approx_equal(
        (mask_mm.map_data().count(0), mask_mm.map_data().count(1),
         mask_mm.map_data().size()), (19184, 19216, 38400))

    # Box around the mask
    box = cctbx.maptbx.box.around_mask(
        map_manager=mam.mm.deep_copy(),
        mask_as_map_manager=mask_mm,
        wrapping=False,
    )

    assert (box.gridding_first, box.gridding_last) == ([0, 0, 0], [29, 39, 31])

    # Box around the mask with cubic box
    box = cctbx.maptbx.box.around_mask(
        map_manager=mam.mm.deep_copy(),
        mask_as_map_manager=mask_mm,
        use_cubic_boxing=True,
        wrapping=False,
    )

    assert (box.gridding_first, box.gridding_last) == ([1, 6, 2], [30, 35, 31])

    #
    # IF you are about to change this - THINK TWICE!
    #
    from libtbx.introspection import getfullargspec
    r = getfullargspec(cctbx.maptbx.box.around_model.__init__)
    assert r.args == [
        'self', 'map_manager', 'model', 'box_cushion', 'wrapping',
        'model_can_be_outside_bounds', 'stay_inside_current_map',
        'use_cubic_boxing', 'require_match_unit_cell_crystal_symmetry', 'log'
    ], r.args
    r = getfullargspec(cctbx.maptbx.box.with_bounds.__init__)
    assert r.args == [
        'self', 'map_manager', 'lower_bounds', 'upper_bounds', 'model',
        'wrapping', 'model_can_be_outside_bounds', 'stay_inside_current_map',
        'use_cubic_boxing', 'log'
    ], r.args

    print("OK")