def selection_by_symmetry (crystal_symmetries, source_infos, exclude_space_group=None, only_space_group=None, log=null_out()) : """ Return a list of indices for those crystal symmetries which either match the only_space_group parameter, or do not match exclude_space_group. """ from cctbx import sgtbx if (type(exclude_space_group).__name__ != 'info') : exclude_space_group = sgtbx.space_group_info(exclude_space_group) if (type(only_space_group).__name__ != 'info') : only_space_group = sgtbx.space_group_info(only_space_group) selection = [] for k, symm in enumerate(symmetries) : skip = False sg = None if (only_space_group is not None) : if (symm is None) : skip = True else : sg = str(symm.space_group_info()) if (sg != str(only_space_group)) : skip = True elif (exclude_space_group is not None) : if (symm is not None) : sg = str(symm.space_group_info()) if (sg == str(exclude_space_group)) : skip = True if (skip) : print >> log, " %s is in space group %s, skipping" %(source_infos[k],sg) else : selection.append(k) return selection
def run(): for space_group_symbol in ("P-1", "P2/m", "C2/m", "Pmmm", "Cmmm", "Fmmm", "Immm", "P4/mmm", "I4/mmm", "R-3m", "P6/mmm", "Pm-3m", "Im-3m", "Fm-3m"): centric_info = sgtbx.space_group_info(space_group_symbol) non_centric = sgtbx.space_group() for i_ltr in xrange(centric_info.group().n_ltr()): for i_smx in xrange(centric_info.group().n_smx()): s = centric_info.group()(i_ltr,0,i_smx) non_centric.expand_smx(s) assert non_centric.f_inv() == 1 assert non_centric.order_z() * 2 == centric_info.group().order_z() non_centric_info = sgtbx.space_group_info(group=non_centric) centric_stats = subgroup_stats(centric_info) non_centric_stats = subgroup_stats(non_centric_info) assert len(centric_stats.subgroups) >= 2*len(non_centric_stats.subgroups) assert centric_stats.n_non_centric >= non_centric_stats.n_non_centric assert centric_stats.n_chiral == non_centric_stats.n_chiral assert non_centric_stats.n_non_centric == len(non_centric_stats.subgroups) assert non_centric_stats.n_non_centric == non_centric_stats.n_chiral print "OK"
def space_group_info(self, unit_cell=None): if (self.symbol is None): return None if (self.category is None): try: return sgtbx.space_group_info(self.symbol) except RuntimeError: return None if (isinstance(unit_cell, uctbx.ext.unit_cell)): unit_cell = unit_cell.parameters() if (self.category == rhombohedral): if (unit_cell is None): return None (a, b, c, alpha, beta, gamma) = unit_cell if (abs(a - b) <= 0.01 and is90(alpha) and is90(beta) and is120(gamma)): basis_symbol = "H" elif (equiv(a,b,c) and equiv(alpha,beta,gamma)): basis_symbol = "R" else: return None return sgtbx.space_group_info( rhombohedral[self.symbol] + ":" + basis_symbol) if (self.category == short_mono): if (unit_cell is None): return None Z, T = self.symbol[0], self.symbol[1:] (a, b, c, alpha, beta, gamma) = unit_cell if (is90(alpha) and is90(gamma)): if (Z == "B"): return None return sgtbx.space_group_info(Z + " 1 " + T + " 1") if (is90(alpha) and is90(beta)): if (Z == "C"): return None return sgtbx.space_group_info(Z + " 1 1 " + T) if (self.category == special): return sgtbx.space_group_info(special[self.symbol]) raise RuntimeError("Programming error (should be unreachable).")
def make_and_place_nodes_and_connections(self, input_sg): # make the object and name please object = sgtbx.space_group_info(group=input_sg) name = str(object) sg_relations = sub_super_point_group_relations( input_sg, self.pg_high, self.assert_pg) # loop over the possible outgoing edges edge_list = {} for possible_super_sg, used_symops, unused_symops \ in zip(sg_relations.return_next_sg(), sg_relations.return_next_set(), sg_relations.return_next_left_over_set()): # This is enough info to make connections from the given node edge = edge_object(used =used_symops, unused = unused_symops, as_xyz = self.as_xyz) edge_list[str(sgtbx.space_group_info(group=possible_super_sg)) ] = edge # place the sg's generated on the queue if not possible_super_sg in self.queue: self.queue.append(possible_super_sg) # place/insert the node with the proper connections please # print object, type(object) self.graph.insert_node(name = name, edge_object = edge_list, node_object = object)
def exercise_generator_set(): sgi = sgtbx.space_group_info('P1') sg_generator_set = sgtbx.any_generator_set(sgi.group()) assert sg_generator_set.non_primitive_generators == () sgi = sgtbx.space_group_info('P21') sg_generator_set = sgtbx.any_generator_set(sgi.group()) assert (map(str, sg_generator_set.non_primitive_generators) == ['-x,y+1/2,-z']) sgi = sgtbx.space_group_info('Pmmm') sg_generator_set = sgtbx.any_generator_set(sgi.group()) assert (map(str, sg_generator_set.non_primitive_generators) == ['-x,-y,-z', 'x,-y,-z', '-x,y,-z']) for i in xrange(1, 231): sgi = sgtbx.space_group_info(number=i) sg = sgi.group() sg_gen = sgtbx.any_generator_set(sg) if sg.z2p_op().is_identity_op(): assert sg_gen.non_primitive_generators == sg_gen.primitive_generators for i in xrange(1, 231): sgi = sgtbx.space_group_info(number=i) sg = sgi.group() sg_gen = sgtbx.any_generator_set(sg) sg1 = sgtbx.space_group("P1") for op in sg_gen.non_primitive_generators: sg1.expand_smx(op) for t in sg.ltr(): sg1.expand_ltr(t) assert sg1.type().number() == sg.type().number()
def exercise_d_metrical_matrix_d_params(): def finite_differences(unit_cell, eps=1e-6): grads = [] for i in range(6): params = list(unit_cell.parameters()) params[i] += eps uc = uctbx.unit_cell(parameters=params) qm = matrix.col(uc.metrical_matrix()) params[i] -= 2*eps uc = uctbx.unit_cell(parameters=params) qp = matrix.col(uc.metrical_matrix()) dq = (qm-qp)/(2*eps) grads.extend(list(dq)) grads = flex.double(grads) grads.resize(flex.grid((6,6))) return grads.matrix_transpose() from cctbx import sgtbx p1 = sgtbx.space_group_info('P1') uc = p1.any_compatible_unit_cell(27) grads = uc.d_metrical_matrix_d_params() fd_grads = finite_differences(uc) assert approx_equal(grads, fd_grads) sgi = sgtbx.space_group_info('I-4') uc = sgi.any_compatible_unit_cell(volume=18000) grads = uc.d_metrical_matrix_d_params() fd_grads = finite_differences(uc) assert approx_equal(grads, fd_grads)
def show(self, out=None): if out is None: out = sys.stdout print >> out, ( "Input subgroup : %s" % sgtbx.space_group_info(group=self.sg_low)) print >> out, ( "Input lattice group : %s" % sgtbx.space_group_info(group=self.sg_high)) print >> out print >> out for set,group,leftover in zip(self.grouped_symops, self.sg_groups, self.left_over_symops): assert(len(set)+len(leftover)==len(self.symops)) print >> out, ( "Supergroup : %s" % sgtbx.space_group_info(group=group)) print >> out, " Used symops:" for symop in set: print >> out, " (%s) "%(symop.r().as_hkl()) print >> out print >> out, " Left over symops:" for symop in leftover: if symop is not None: print >> out, " (%s) "%(symop.r().as_hkl()) else: print >> out, " None" print >> out print >> out
def reference_setting_choices(space_group): # we used to have cyclic_permutations = ['x,y,z', 'y,z,x', 'z,x,y' ] adams_group = sgtbx.space_group_info( group=space_group.build_derived_group(False, False)) space_group = sgtbx.space_group_info(group=space_group) # please check that we have something in reference setting # just to make sure that thne thing is used for it's original purpose assert space_group.is_reference_setting() info = [] identity_op = sgtbx.change_of_basis_op('x,y,z').c().r() for cyclic_permutation in cyclic_permutations: cob_op = sgtbx.change_of_basis_op(cyclic_permutation) transformed_adams_group = adams_group.change_basis(cob_op) transformed_space_group = space_group.change_basis(cob_op) cob_to_ref_sg = transformed_space_group.\ change_of_basis_op_to_reference_setting() cob_to_ref_pg = transformed_adams_group.\ change_of_basis_op_to_reference_setting() adams_norm = False space_norm = False # check if the rotation part of the cb_op to ref is # the identity operator # if hall symbols are equal, sg's are equal if (identity_op == cob_to_ref_pg.c().r()): adams_norm=True if (identity_op == cob_to_ref_sg.c().r()): space_norm=True info_tuple = (cob_op, cob_to_ref_sg, adams_norm, space_norm) info.append(info_tuple) possible_additional_transforms = [] # we have to of course take into account the identity operator possible_additional_transforms.append(info[0][0]*info[0][1]) for ii in info: if ii[2]: # should fall in the adams normalizer if not ii[3]: # should NOT fall in the space normalizer # cob should ONLY be applied on unit cell, not to the sg. possible_additional_transforms.append(ii[0]) return possible_additional_transforms
def run(): import sys libtbx.utils.show_times_at_exit() parser = space_group_option_parser() parser.option(None, '--skip_extra_tests', action='store_true', default=False) parser.option(None, '--fixed_random_seed', default=True) command_line = parser.process(sys.argv[1:]) if not command_line.options.skip_extra_tests: # the solution-to-target change-of-basis computed as # reference-to-target x solution-to-reference # is not a mere translation exercise( sgtbx.space_group_info( 'hall: -I 4 2c (1/2*x+1/2*y+1/12,-1/2*x+1/2*y-1/12,z-1/4)'), shifted_origin=(0, 0.4, 0.9), verbose=command_line.options.verbose) exercise(sgtbx.space_group_info('hall: C 2c 2 (x-y,x+y,z)'), shifted_origin=(0.4, 0.9, 0.6), verbose=command_line.options.verbose) # the centring translation search peaks (1/2, 0, 0) which is # a structure seminvariant: test whether this is rejected exercise(sgtbx.space_group_info('hall: -P 2a 2a'), shifted_origin=(0.1, 0.2, 0.6), verbose=command_line.options.verbose) # the traditional loop over a selection of space-groups # the last one, -I 4 2c (1/2*x+1/2*y+1/12,-1/2*x+1/2*y-1/12,z-1/4), # results in solution_to_target_cb_op being non-trivial. command_line.loop_over_space_groups(exercise, shifted_origin=(0.1, 0.2, 0.6))
def __init__(self, f_in_p1, **kwds): isinstance(f_in_p1, miller.array) assert f_in_p1.space_group().type().hall_symbol() == ' P 1' self.f_in_p1 = f_in_p1 adopt_optional_init_args(self, kwds) self.search_parameters = maptbx.peak_search_parameters( peak_search_level=3, interpolate=True, min_distance_sym_equiv=0.25, ) self.space_group = sgtbx.space_group('P 1', t_den=sg_t_den) self.origin = None self.symmetry_pool = [] self.find_centring_translations() if self.space_group.order_z() > 1: f_in_centered = self.f_in_p1.customized_copy( space_group_info=sgtbx.space_group_info(group=self.space_group) ).eliminate_sys_absent().merge_equivalents().array() self.cb_op_to_primitive = \ f_in_centered.change_of_basis_op_to_primitive_setting() self.f_in_p1 = f_in_centered.change_basis(self.cb_op_to_primitive) self.space_group = self.f_in_p1.space_group() else: self.cb_op_to_primitive = sgtbx.change_of_basis_op() self.f_in_p1 = self.f_in_p1.generate_bijvoet_mates() self.find_space_group() self.space_group = sgtbx.space_group(self.space_group.type().hall_symbol(), t_den=sgtbx.sg_t_den) self.space_group_info = sgtbx.space_group_info(group=self.space_group)
def tst_compare(): sg1 = construct_rational_point_group( sgtbx.space_group_info( "P 2 2 2" ).group() ) sg2 = construct_rational_point_group( sgtbx.space_group_info( "P 2 2 2" ).group() ) sg3 = construct_rational_point_group( sgtbx.space_group_info( "P 4 2 2" ).group() ) sg4 = construct_rational_point_group( sgtbx.space_group_info( "P 2 2 2 (a+b,a-b,2c)").group() ) assert compare_groups( sg1, sg2 ) assert not compare_groups( sg1, sg3 ) assert not compare_groups( sg1, sg4 )
def combine_symops_and_symbol(space_group_from_ops, space_group_symbol): space_group_symbol = space_group_symbol.replace(" ","").upper() z = space_group_symbol[:1] if ("PABCIFRH".find(z) < 0): raise RuntimeError( "Cannot determine lattice centring type given space group symbol" " %s" % show_string(space_group_symbol)) if (z == "P"): return sgtbx.space_group_info(group=space_group_from_ops) if (z == "H"): space_group_symbol = "R" + space_group_symbol[1:] + ":H" z = "R" elif (z == "R" and not space_group_symbol.endswith(":H")): if (space_group_symbol.endswith(":R")): z = None else: for s in space_group_from_ops: r_info = s.r().info() if (abs(r_info.type()) == 3): if (r_info.ev() == (0,0,1)): space_group_symbol = "R" + space_group_symbol[1:] + ":H" break elif (r_info.ev() == (1,1,1)): space_group_symbol += ":R" z = None break space_group_exp = sgtbx.space_group(space_group_from_ops) if (z is not None): try: space_group_exp.expand_conventional_centring_type(z) except RuntimeError: space_group_exp = None if (space_group_exp is not None): try: space_group_from_symbol = sgtbx.space_group_info( symbol=space_group_symbol).group() except RuntimeError: space_group_from_symbol = None if ( space_group_exp is None or space_group_from_symbol is None or space_group_exp != space_group_from_symbol): if space_group_from_symbol: warnings.warn(""" WARNING: Symmetry operations in input file are for space group %(space_group_exp)s However space group symbol is: %(space_group_symbol)s This may be a format error in the Scalepack file! Using %(space_group_symbol)s """ % {"space_group_exp" : str(space_group_exp.info()), "space_group_symbol" : show_string(space_group_symbol), }, UserWarning, stacklevel=10) space_group_exp = space_group_from_symbol else: raise RuntimeError( "Symmetry operations in unmerged SCALEPACK file incompatible with" " space group symbol %s" % show_string(space_group_symbol)) return sgtbx.space_group_info(group=space_group_exp)
def get_all_axes(space_group_symbol=None, space_group_info=None, extension=0): assert space_group_symbol is None or space_group_info is None shift_range = 1 # RWGK Works for the 230 reference settings; it is not # RWGK clear to me (rwgk) what value is needed in general. if (space_group_symbol is not None): space_group_info = sgtbx.space_group_info(symbol=space_group_symbol) #space_group_info.show_summary() axes_dict = {} for smx in space_group_info.group(): r = smx.r() t = smx.t() shift = [0,0,0] for shift[0] in range(-shift_range,shift_range+1): for shift[1] in range(-shift_range,shift_range+1): for shift[2] in range(-shift_range,shift_range+1): ts = t.plus(sgtbx.tr_vec(shift, 1)).new_denominator(t.den()) m = sgtbx.rt_mx(r, ts) #print m rtmxanal = rlc_RTMxAnalysis(m) #print r, t, shift, ts, m if rtmxanal: #print rtmxanal axes_dict[rtmxanal] = 0 axes_list = axes_dict.keys() axes_list.sort() # reject nonenantiomorphic space groups if len(axes_list) > 0 and not re.compile("[A-z]").search(space_group_symbol[1:]): try: sgtbx.space_group_info(space_group_symbol).show_summary(), #print len(axes_list), space_group_symbol except: print space_group, space_group_symbol print sys.exit(1) axes = [] for a in axes_list: if len(a) == 3 and len(a[1]) == 3 and len(a[2]) == 3: tmp_dict = {} print "%4s %7.4f %7.4f %7.4f %7.4f %7.4f %7.4f " % (a[0],a[1][0],a[1][1],a[1][2],a[2][0],a[2][1],a[2][2]) tmp_dict['symb'] = a[0] start_array = N.asarray(a[1]) end_array = N.asarray(a[2]) start_vec = start_array - (end_array - start_array)*extension end_vec = end_array + (end_array - start_array)*extension tmp_dict['start'] = start_vec tmp_dict['end'] = end_vec #rlc# tmp_dict['start'] = a[1] #rlc# tmp_dict['end'] = a[2] axes.append(tmp_dict) else: print a else: return None return axes
def exercise () : m = iotbx.symmetry.manager(prefer_pdb_space_group=True) (uc_mismatch, sg_mismatch) = m.add_reflections_file( file_name="data.mtz", space_group=sgtbx.space_group_info("P222"), unit_cell=uctbx.unit_cell("50 60 70 90 90 90")) assert (m.get_current_as_strings() == ('P 2 2 2', '50 60 70 90 90 90')) (uc_mismatch, sg_mismatch) = m.add_pdb_file( file_name="model.pdb", space_group=sgtbx.space_group_info("P212121"), unit_cell=uctbx.unit_cell("50 60 70 90 90 90")) assert (not (uc_mismatch or sg_mismatch)) (uc_mismatch, sg_mismatch) = m.add_pdb_file( file_name="reference_model.pdb", space_group=sgtbx.space_group_info("P63"), unit_cell=uctbx.unit_cell("40 40 75 90 90 120")) assert ((uc_mismatch, sg_mismatch) == (True, True)) assert (m.get_current_as_strings() == ('P 21 21 21', '50 60 70 90 90 90')) (uc_mismatch, sg_mismatch) = m.add_reflections_file( file_name="data_neutron.mtz", space_group=sgtbx.space_group_info("P222"), unit_cell=uctbx.unit_cell("50.1 60 70.1 90 90 90")) assert (not (uc_mismatch or sg_mismatch)) (uc_mismatch, sg_mismatch) = m.add_reflections_file( file_name="data_rfree.hkl", space_group=None, unit_cell=None) assert (not (uc_mismatch or sg_mismatch)) assert (m.get_current_as_strings() == ('P 21 21 21', '50 60 70 90 90 90')) assert (m.check_cell_compatibility("phenix.refine")) symm_choices = m.get_symmetry_choices() assert (symm_choices.space_group_files == [('model.pdb', 'P 21 21 21'), ('reference_model.pdb', 'P 63'), ('data.mtz', 'P 2 2 2'), ('data_neutron.mtz', 'P 2 2 2')]) assert (symm_choices.unit_cell_files == [ ('model.pdb', '(50, 60, 70, 90, 90, 90)'), ('reference_model.pdb', '(40, 40, 75, 90, 90, 120)'), ('data.mtz', '(50, 60, 70, 90, 90, 90)'), ('data_neutron.mtz', '(50.1, 60, 70.1, 90, 90, 90)')]) m.set_current_as_strings("P63", "50 60 70 90 90 90") try : m.check_cell_compatibility( program_name="phenix.refine", raise_error_if_incomplete=True) except Sorry : pass else : raise Exception_expected out = StringIO() m.show(out=out) assert (out.getvalue() == """\ model.pdb: (50, 60, 70, 90, 90, 90) P 21 21 21 reference_model.pdb: (40, 40, 75, 90, 90, 120) P 63 data.mtz: (50, 60, 70, 90, 90, 90) P 2 2 2 data_neutron.mtz: (50.1, 60, 70.1, 90, 90, 90) P 2 2 2 data_rfree.hkl: None None """)
def digest(self,add_inv=False): return '#'.join([ '%.5f'%self['max_angular_difference'], self.pretty_cb_op(), self['bravais'], sgtbx.space_group_info(group=self['reduced_group']).type().lookup_symbol(), sgtbx.space_group_info(group=self['best_group']).type().lookup_symbol(), ' '.join([str(int(x)) for x in self['constraints']]), ])
def run_00(): time_aniso_u_scaler = 0 for symbol in sgtbx.bravais_types.acentric + sgtbx.bravais_types.centric: #print symbol, "-"*50 space_group_info = sgtbx.space_group_info(symbol = symbol) xrs = random_structure.xray_structure( space_group_info = space_group_info, elements = ["N"]*100, volume_per_atom = 50.0, random_u_iso = True) # XXX ad a method to adptbx to do this point_group = sgtbx.space_group_info( symbol=symbol).group().build_derived_point_group() adp_constraints = sgtbx.tensor_rank_2_constraints( space_group=point_group, reciprocal_space=True) u_star = adptbx.u_cart_as_u_star(xrs.unit_cell(), adptbx.random_u_cart(u_scale=1,u_min=0.1)) u_indep = adp_constraints.independent_params(all_params=u_star) u_star = adp_constraints.all_params(independent_params=u_indep) b_cart_start=adptbx.u_as_b(adptbx.u_star_as_u_cart(xrs.unit_cell(), u_star)) # tr = (b_cart_start[0]+b_cart_start[1]+b_cart_start[2])/3 b_cart_start = [b_cart_start[0]-tr,b_cart_start[1]-tr,b_cart_start[2]-tr, b_cart_start[3],b_cart_start[4],b_cart_start[5]] tr = (b_cart_start[0]+b_cart_start[1]+b_cart_start[2])/3 # #print "Input b_cart :", " ".join(["%8.4f"%i for i in b_cart_start]), "tr:", tr F = xrs.structure_factors(d_min = 2.0).f_calc() u_star = adptbx.u_cart_as_u_star( F.unit_cell(), adptbx.b_as_u(b_cart_start)) fbc = mmtbx.f_model.ext.k_anisotropic(F.indices(), u_star) fc = F.structure_factors_from_scatterers(xray_structure=xrs).f_calc() f_obs = F.customized_copy(data = flex.abs(fc.data()*fbc)) t0 = time.time() # obj = bulk_solvent.aniso_u_scaler( f_model_abs = flex.abs(fc.data()), f_obs = f_obs.data(), miller_indices = f_obs.indices(), adp_constraint_matrix = adp_constraints.gradient_sum_matrix()) time_aniso_u_scaler += (time.time()-t0) b_cart_final = adptbx.u_as_b(adptbx.u_star_as_u_cart(f_obs.unit_cell(), adp_constraints.all_params(tuple(obj.u_star_independent)))) # obj = bulk_solvent.aniso_u_scaler( f_model_abs = flex.abs(fc.data()), f_obs = f_obs.data(), miller_indices = f_obs.indices()) b_cart_final2 = adptbx.u_as_b(adptbx.u_star_as_u_cart(f_obs.unit_cell(), tuple(obj.u_star))) # assert approx_equal(b_cart_final, b_cart_final2) #print "Output b_cart:", " ".join(["%8.4f"%i for i in b_cart_final]) assert approx_equal(b_cart_start, b_cart_final, 1.e-4) print "Time (aniso_u_scaler only): %6.4f"%time_aniso_u_scaler
def is_subgroup(self, g, h): tst_group = str( sgtbx.space_group_info(group=g) ) tst_group = sgtbx.space_group_info( tst_group ).group() h = [s for s in h] for s in h: tst_group.expand_smx( s ) if str(sgtbx.space_group_info(group=tst_group)) == str( sgtbx.space_group_info(group=g) ): return True else: return False
def exercise_writer () : from iotbx import file_reader from cctbx import uctbx, sgtbx from scitbx.array_family import flex file_name = libtbx.env.find_in_repositories( relative_path="phenix_regression/wizards/partial_refine_001_map_coeffs.mtz", test=os.path.isfile) if file_name is None : print "Can't find map coefficients file, skipping." return mtz_in = file_reader.any_file(file_name, force_type="hkl").file_object miller_arrays = mtz_in.as_miller_arrays() map_coeffs = miller_arrays[0] fft_map = map_coeffs.fft_map(resolution_factor=1/3.0) fft_map.apply_sigma_scaling() fft_map.as_ccp4_map(file_name="2mFo-DFc.map") m = iotbx.ccp4_map.map_reader(file_name="2mFo-DFc.map") real_map = fft_map.real_map_unpadded() mmm = flex.double(list(real_map)).min_max_mean() assert approx_equal(m.unit_cell_parameters, map_coeffs.unit_cell().parameters()) assert approx_equal(mmm.min, m.header_min) assert approx_equal(mmm.max, m.header_max) #assert approx_equal(mmm.mean, m.header_mean) # random small maps of different sizes for nxyz in flex.nested_loop((1,1,1),(4,4,4)): mt = flex.mersenne_twister(0) grid = flex.grid(nxyz) map = mt.random_double(size=grid.size_1d()) map.reshape(grid) real_map = fft_map.real_map_unpadded() iotbx.ccp4_map.write_ccp4_map( file_name="random.map", unit_cell=uctbx.unit_cell((1,1,1,90,90,90)), space_group=sgtbx.space_group_info("P1").group(), gridding_first=(0,0,0), gridding_last=tuple(fft_map.n_real()), map_data=real_map, labels=flex.std_string(["iotbx.ccp4_map.tst"])) m = iotbx.ccp4_map.map_reader(file_name="random.map") mmm = flex.double(list(real_map)).min_max_mean() assert approx_equal(m.unit_cell_parameters, (1,1,1,90,90,90)) assert approx_equal(mmm.min, m.header_min) assert approx_equal(mmm.max, m.header_max) # gridding_first = (0,0,0) gridding_last = tuple(fft_map.n_real()) map_box = maptbx.copy(map, gridding_first, gridding_last) map_box.reshape(flex.grid(map_box.all())) iotbx.ccp4_map.write_ccp4_map( file_name="random_box.map", unit_cell=uctbx.unit_cell((1,1,1,90,90,90)), space_group=sgtbx.space_group_info("P1").group(), map_data=map_box, labels=flex.std_string(["iotbx.ccp4_map.tst"]))
def exercise_combine_symmetry () : """ Test the extraction of symmetry from both a PDB file and an MTZ file. """ from mmtbx.regression import model_1yjp import mmtbx.command_line import iotbx.pdb.hierarchy from cctbx import sgtbx from cctbx import uctbx # 1yjp, as usual pdb_in = iotbx.pdb.hierarchy.input(pdb_string=model_1yjp) xrs = pdb_in.input.xray_structure_simple() f = open("tst_combine_symmetry.pdb", "w") f.write(pdb_in.hierarchy.as_pdb_string(crystal_symmetry=xrs)) f.close() f_calc = abs(xrs.structure_factors(d_min=1.5).f_calc()) # Make up slightly more exact unit cell, but set SG to P2 f_calc = f_calc.customized_copy( crystal_symmetry=f_calc.crystal_symmetry().customized_copy( space_group_info=sgtbx.space_group_info("P2"), unit_cell=uctbx.unit_cell((21.9371, 4.8659, 23.4774, 90.0, 107.0832, 90.00)))) flags = f_calc.generate_r_free_flags() mtz = f_calc.as_mtz_dataset(column_root_label="F") mtz.add_miller_array(flags, column_root_label="FreeR_flag") mtz.mtz_object().write("tst_combine_symmetry.mtz") cmdline = mmtbx.command_line.load_model_and_data( args=["tst_combine_symmetry.pdb", "tst_combine_symmetry.mtz"], master_phil=mmtbx.command_line.generate_master_phil_with_inputs(""), process_pdb_file=False, create_fmodel=True, out=null_out()) symm = cmdline.xray_structure.crystal_symmetry() assert (approx_equal(symm.unit_cell().parameters(), (21.9371, 4.8659, 23.4774, 90.0, 107.0832, 90.0))) assert (str(symm.space_group_info()) == "P 1 21 1") # Part 2: incompatible space groups f_calc_2 = f_calc.customized_copy( crystal_symmetry=f_calc.crystal_symmetry().customized_copy( space_group_info=sgtbx.space_group_info("P1"))) flags_2 = f_calc_2.generate_r_free_flags() mtz = f_calc_2.as_mtz_dataset(column_root_label="F") mtz.add_miller_array(flags_2, column_root_label="FreeR_flag") mtz.mtz_object().write("tst_combine_symmetry_2.mtz") try : cmdline = mmtbx.command_line.load_model_and_data( args=["tst_combine_symmetry.pdb", "tst_combine_symmetry_2.mtz"], master_phil=mmtbx.command_line.generic_simple_input_phil(), process_pdb_file=False, create_fmodel=True, out=null_out()) except Sorry, s : assert ("Incompatible space groups" in str(s))
def tst_find_best_cell(): uc_array=[ uctbx.unit_cell( '40, 50, 60, 90, 90, 90' ), uctbx.unit_cell( '40, 60, 50, 90, 90, 90' ), uctbx.unit_cell( '50, 40, 60, 90, 90, 90' ), uctbx.unit_cell( '50, 60, 40, 90, 90, 90' ), uctbx.unit_cell( '60, 40, 50, 90, 90, 90' ), uctbx.unit_cell( '60, 50, 40, 90, 90, 90' ) ] uc_correct = [ uctbx.unit_cell( '40, 50, 60, 90, 90, 90' ), uctbx.unit_cell( '40, 60, 50, 90, 90, 90' ), uctbx.unit_cell( '40, 50, 60, 90, 90, 90' ), uctbx.unit_cell( '50, 60, 40, 90, 90, 90' ), uctbx.unit_cell( '40, 60, 50, 90, 90, 90' ), uctbx.unit_cell( '50, 60, 40, 90, 90, 90' ) ] sg_info = sgtbx.space_group_info( 'P 21 21 2' ) sg_info_2 = sgtbx.space_group_info( 'I 21 21 21' ) sg = sg_info.group() sg_2 = sg_info_2.group() for uc, correct in zip(uc_array,uc_correct): best_cell_finder = fbc( uc, sg ) assert approx_equal( correct.parameters(), best_cell_finder.return_best_cell().parameters() ) cb_op = best_cell_finder.return_change_of_basis_op_to_best_cell() xs = crystal.symmetry( uc, space_group=sg) assert approx_equal( correct.parameters(), xs.change_basis(cb_op).unit_cell().parameters() ) best_cell_finder = fbc( uc, sg_2 ) assert approx_equal( uc_array[0].parameters(), best_cell_finder.return_best_cell().parameters() ) xs = crystal.symmetry( uc, space_group=sg_2) cb_op = best_cell_finder.return_change_of_basis_op_to_best_cell() assert approx_equal( uc_array[0].parameters(), xs.change_basis(cb_op).unit_cell().parameters() ) # test with incomming sg not in reference setting uc = uctbx.unit_cell( '60, 40, 30, 90, 90, 90' ) sg_info_3 = sgtbx.space_group_info( 'P 1 1 21' ) sg_3 = sg_info_3.group() best_cell_finder = fbc( uc, sg_3 ) xs_best = best_cell_finder.return_best_xs() uc_correct = uctbx.unit_cell( '40, 30, 60, 90, 90, 90' ) sg_correct = sgtbx.space_group_info( 'P 1 21 1' ).group() assert approx_equal( xs_best.unit_cell().parameters(), uc_correct.parameters() ) assert sg_correct == xs_best.space_group()
def exercise_monoclinic_cell_choices_core(space_group_number, verbose): # transformation matrices for cell choices # columns are basis vectors "new in terms of old" # see Int. Tab. Vol. A, p. 22, Fig. 2.2.6.4. b1 = (1, 0, 0, 0, 1, 0, 0, 0, 1) b2 = (-1, 0, 1, 0, 1, 0, -1, 0, 0) b3 = (0, 0, -1, 0, 1, 0, 1, 0, -1) flip = (0, 0, 1, 0, -1, 0, 1, 0, 0) p3s = sgtbx.space_group("P 3*") done = {} ref = sgtbx.space_group_info(number=space_group_number) ref_uhm = ref.type().universal_hermann_mauguin_symbol() for i_fl,fl in enumerate([b1, flip]): rfl = sgtbx.rot_mx(fl) cfl = sgtbx.change_of_basis_op(sgtbx.rt_mx(rfl)) for i_rt,rt in enumerate(p3s): rp3 = rt.r() cp3 = sgtbx.change_of_basis_op(sgtbx.rt_mx(rp3)) for i_cs,cs in enumerate([b1,b2,b3]): rcs = sgtbx.rot_mx(cs).inverse() ccs = sgtbx.change_of_basis_op(sgtbx.rt_mx(rcs)) cb_all = cp3 * cfl * ccs refcb = ref.change_basis(cb_all) refcb2 = sgtbx.space_group_info(symbol=ref_uhm+"("+str(cb_all.c())+")") assert refcb2.group() == refcb.group() s = sgtbx.space_group_symbols(str(refcb)) q = s.qualifier() hm = str(refcb) if (0 or verbose): print hm, q, cb_all.c() if (i_fl == 0): assert q[0] == "bca"[i_rt] if (len(q) == 2): assert q[1] == "123"[i_cs] elif (q[0] == "-"): assert q[1] == "bca"[i_rt] if (len(q) == 3): assert q[2] == "123"[i_cs] else: assert q[0] == "bca"[i_rt] if (len(q) == 2 and q[1] != "123"[i_cs]): assert done[hm] == 1 done.setdefault(hm, 0) done[hm] += 1 assert len(done) in [3, 9, 18] assert done.values() == [18/len(done)]*len(done) if (0 or verbose): print return done
def exercise(): from cctbx import sgtbx import sys for symbol in acentric + centric: space_group_info = sgtbx.space_group_info(symbol=symbol) assert str(space_group_info) == symbol assert space_group_info.is_reference_setting() if ("--Verbose" in sys.argv[1:]): for symbol in centric: print "/* %s */ %d," % ( symbol, sgtbx.space_group_info(symbol=symbol).type().number()) assert tst_bravais_types(verbose=("--Verbose" in sys.argv[1:])) print "OK"
def exercise_quick(): for space_group_symbol in ("P-1", "P2/m", "C2/m", "Pmmm", "Cmmm", "Fmmm", "Immm", "P4/mmm", "I4/mmm", "R-3m", "P6/mmm", "Pm-3m", "Im-3m", "Fm-3m"): parent_group_info = sgtbx.space_group_info(space_group_symbol) non_centric = sgtbx.space_group() for i_ltr in xrange(parent_group_info.group().n_ltr()): for i_smx in xrange(parent_group_info.group().n_smx()): s = parent_group_info.group()(i_ltr,0,i_smx) non_centric.expand_smx(s) assert non_centric.f_inv() == 1 assert non_centric.order_z() * 2 == parent_group_info.group().order_z() non_centric_info = sgtbx.space_group_info(group=non_centric) unit_cell = non_centric_info.any_compatible_unit_cell(volume=1000) crystal_symmetry = crystal.symmetry( unit_cell=unit_cell, space_group_info=non_centric_info) minimum_symmetry = crystal_symmetry.minimum_cell() lattice_group = lattice_symmetry.group( minimum_symmetry.unit_cell(), max_delta=0.5) lattice_group_info = sgtbx.space_group_info(group=lattice_group) assert lattice_group_info.group() == minimum_symmetry.space_group() subgrs = subgroups.subgroups(lattice_group_info).groups_parent_setting() for group in subgrs: subsym = crystal.symmetry( unit_cell=minimum_symmetry.unit_cell(), space_group=group, assert_is_compatible_unit_cell=False) assert subsym.unit_cell().is_similar_to(minimum_symmetry.unit_cell()) assert lattice_symmetry.find_max_delta( reduced_cell=minimum_symmetry.unit_cell(), space_group=group) < 0.6 minimum_symmetry = crystal.symmetry( unit_cell="106.04, 181.78, 110.12, 90, 90, 90", space_group_symbol="P 1").minimum_cell() for max_delta in xrange(10,100,10): lattice_group = lattice_symmetry.group( minimum_symmetry.unit_cell(), max_delta=max_delta) lattice_group_info = sgtbx.space_group_info(group=lattice_group) assert str(lattice_group_info) == "P 4 2 2"
def show(self, out=None): if out == None: out=sys.stdout print >> out, "Input space group : ", sgtbx.space_group_info( group=self.x_sg) print >> out, "Input unit cell : ", self.x_uc.parameters() print >> out, "Likely point group : ", sgtbx.space_group_info( group=self.lpg) print >> out print >> out, "Possible crystal symmetries: " for sx_cb in self.allowed_under_pg_and_sys_abs: sx_cb.show(out) print >> out
def tst_build_reticular_twin_laws(): sg1 = construct_rational_point_group( sgtbx.space_group_info( "P 2 2 2" ).group() ) sg2 = construct_rational_point_group( sgtbx.space_group_info( "P 4 2 2 (a,b,2c)").group() ) result = build_reticular_twin_laws(sg1, sg2) assert len(result)==1 for ii in result: assert as_hkl(ii.transpose() )=="k,-h,l" sg1 = construct_rational_point_group( sgtbx.space_group_info( "P 2 2 2" ).group() ) sg2 = construct_rational_point_group( sgtbx.space_group_info( "P 4 2 2").group() ) result = build_reticular_twin_laws(sg1, sg2) assert len(result)==1 for ii in result: assert as_hkl(ii.transpose() )=="k,-h,l"
def exercise_orthorhombic_hm_qualifier_as_cb_symbol(): cb_symbols = { "cab": ["c,a,b", "z,x,y"], "a-cb": ["a,-c,b", "x,-z,y"], "-cba": ["-c,b,a", "-z,y,x"], "bca": ["b,c,a", "y,z,x"], "ba-c": ["b,a,-c", "y,x,-z"]} for sgsyms1 in sgtbx.space_group_symbol_iterator(): n = sgsyms1.number() if (n < 16 or n > 74): continue q = sgsyms1.qualifier() if (len(q) == 0): continue e = sgsyms1.extension() if (e == "\0"): e = "" ehm = sgtbx.space_group_symbols( space_group_number=n, extension=e).universal_hermann_mauguin() cabc, cxyz = cb_symbols[q] assert sgtbx.change_of_basis_op(cxyz).as_abc() == cabc assert sgtbx.change_of_basis_op(cabc).as_xyz() == cxyz uhm_xyz = ehm + " ("+cxyz+")" sgsyms2 = sgtbx.space_group_symbols(symbol=uhm_xyz) assert sgsyms2.change_of_basis_symbol() == cxyz assert sgsyms2.extension() == sgsyms1.extension() assert sgsyms2.universal_hermann_mauguin() == uhm_xyz g1 = sgtbx.space_group(space_group_symbols=sgsyms1) g2 = sgtbx.space_group(space_group_symbols=sgsyms2) assert g2 == g1 g2 = sgtbx.space_group( sgtbx.space_group_symbols(symbol=ehm)).change_basis( sgtbx.change_of_basis_op(sgtbx.rt_mx(cxyz))) assert g2 == g1 for c in [cxyz, cabc]: g2 = sgtbx.space_group_info( group=sgtbx.space_group( sgtbx.space_group_symbols(symbol=ehm))).change_basis(c).group() assert g2 == g1 cit = sgtbx.rt_mx(cxyz).r().inverse().transpose() cit_xyz = cit.as_xyz() g2 = sgtbx.space_group_info( group=sgtbx.space_group( sgtbx.space_group_symbols(symbol=ehm))).change_basis(cit_xyz).group() assert g2 == g1 assert cit.as_xyz(False, "abc") == cabc uhm_abc = ehm + " ("+cabc+")" sgsyms2 = sgtbx.space_group_symbols(symbol=uhm_abc) assert sgsyms2.change_of_basis_symbol() == cxyz assert sgsyms2.extension() == sgsyms1.extension() assert sgsyms2.universal_hermann_mauguin() == uhm_xyz g2 = sgtbx.space_group(space_group_symbols=sgsyms2) assert g2 == g1
def exercise_ss_continuous_shifts_are_principal(): for i in xrange(1, 231): sgi = sgtbx.space_group_info(number=i) ss = sgi.structure_seminvariants() assert ss.continuous_shifts_are_principal() for symbols in sgtbx.space_group_symbol_iterator(): sgi = sgtbx.space_group_info(group=sgtbx.space_group( space_group_symbols=symbols)) ss = sgi.structure_seminvariants() if (not ss.continuous_shifts_are_principal()): assert symbols.universal_hermann_mauguin() in [ "R 3 :R", "R 3 m :R", "R 3 c :R"]
def run(args): if (len(args) == 0): from libtbx.utils import Usage import libtbx.load_env raise Usage( "%s all|list-of-space-group-symbols-or-numbers" % libtbx.env.dispatcher_name) from cctbx.sgtbx import space_group_info from cctbx.sgtbx.subgroups import show if (args == ["all"]): for space_group_number in xrange(1,231): show(parent_group_info=space_group_info(space_group_number)) else: for arg in args: show(parent_group_info=space_group_info(symbol=arg))
def tst_groups(): cb_ops = sub_lattice_tools.generate_cb_op_up_to_order(7) mats = sub_lattice_tools.generate_matrix_up_to_order(7) base_group = sgtbx.space_group_info( "P 2 2 2" ).group() for cb_op, mat in zip(cb_ops, mats): rat_cb_op = mat extended_group=None try: extended_group = sgtbx.space_group_info( "P 2 2 2 (%s)"%cb_op ).group() except Exception: pass rbg = construct_rational_point_group( base_group, rat_cb_op ) reg = None if extended_group is not None: reg = construct_rational_point_group( extended_group ) assert compare_groups(reg, rbg)
def make_new_xs(self, mat, cb_op, to_reference=True): # make new lattice new_basis = self.basis * mat.as_float() new_uc = uctbx.unit_cell(orthogonalization_matrix=new_basis) tmp_xs = crystal.symmetry( unit_cell=new_uc, space_group=sgtbx.lattice_symmetry.group(new_uc, self.max_delta), assert_is_compatible_unit_cell=False, ) extra_cb_op = tmp_xs.change_of_basis_op_to_reference_setting() self.extra_cb_op.append(extra_cb_op) # new_sg = None try: new_sg = sgtbx.space_group_info(group=self.basic_xs_n.space_group()).change_basis(cb_op) except Exception: pass if to_reference: tmp_xs = tmp_xs.change_basis(extra_cb_op) try: new_sg = new_sg.change_basis(extra_cb_op) except Exception: pass self.xs_list.append(tmp_xs) self.sg_list.append(new_sg)
from __future__ import print_function from cctbx import crystal, sgtbx, uctbx from cctbx.sgtbx import lattice_symmetry from cctbx.sgtbx.bravais_types import bravais_lattice sg = sgtbx.space_group_info("C2").group() uc = uctbx.unit_cell( parameters="102.965614947,79.2397736681,77.45579668,90.0,139.074859178,90.0" ) cs = crystal.symmetry(unit_cell=uc, space_group=sg) print("Input cell:") cs.show_summary() print() print("Best cell:") cs.best_cell().show_summary() print() print("Reference settings:") print("Best cell -> reference setting") cs.best_cell().as_reference_setting().show_summary() print("Input -> reference setting") cs.as_reference_setting().show_summary() print("Input -> primitive setting -> reference setting") cs.primitive_setting().as_reference_setting().show_summary() print("Best cell -> primitive setting -> reference setting") print() print("Primitive settings:") cs.best_cell().primitive_setting().as_reference_setting().show_summary() print("Best cell -> primitive setting")
def from_words(self, words, master): symbol = libtbx.phil.str_from_words(words) if (symbol is None): return None if (symbol is Auto): return Auto return sgtbx.space_group_info(symbol=str(symbol))
def spacegroup_number_to_name(spg_num): """Convert a spacegroup number to a more readable name.""" return sgtbx.space_group_info(number=spg_num).type().lookup_symbol()
try: from crys3d.qttbx.xray_structure_viewer import display except IOError: def display(*args, **kwds): pass # then use it #display(xray_structure=xs) # Let's look at symmetries info = xs.space_group_info() info.show_summary() print "Hall: %s" % info.type().hall_symbol() # List of all symmetries print "Symmetries:" for rt in info.group(): print rt.as_xyz() # The mathematical object representing the group g = info.group() print "Inversion at origin:%s" % ('no', 'yes')[g.is_origin_centric()] # Let's triple the cell from cctbx import sgtbx g.expand_ltr(sgtbx.tr_vec((1,0,1),3).new_denominator(g.t_den())) g.expand_ltr(sgtbx.tr_vec((2,0,2),3).new_denominator(g.t_den())) tripled_info = sgtbx.space_group_info(group=g) tripled_info.show_summary() print tripled_info.type().hall_symbol() # Let's change the space group of the structure now xs.as_cif_simple(open('03srv209x3.cif', 'w'))
def CheckFormat(self, value): if len(value) == 0: return "" from cctbx import sgtbx sg = str(sgtbx.space_group_info(symbol=str(value))) return sg
def __init__( self, intensities, normalisation="ml_aniso", lattice_symmetry_max_delta=2.0, d_min=libtbx.Auto, min_i_mean_over_sigma_mean=4, min_cc_half=0.6, relative_length_tolerance=None, absolute_angle_tolerance=None, best_monoclinic_beta=True, ): u"""Initialise a symmetry_base object. Args: intensities (cctbx.miller.array): The intensities on which to perform symmetry anaylsis. normalisation (str): The normalisation method to use. Possible choices are 'kernel', 'quasi', 'ml_iso' and 'ml_aniso'. Set to None to switch off normalisation altogether. lattice_symmetry_max_delta (float): The maximum value of delta for determining the lattice symmetry using the algorithm of Le Page (1982). d_min (float): Optional resolution cutoff to be applied to the input intensities. If set to :data:`libtbx.Auto` then d_min will be automatically determined according to the parameters ``min_i_mean_over_sigma_mean`` and ``min_cc_half``. min_i_mean_over_sigma_mean (float): minimum value of |I|/|sigma(I)| for automatic determination of resolution cutoff. min_cc_half (float): minimum value of CC½ for automatic determination of resolution cutoff. relative_length_tolerance (float): Relative length tolerance in checking consistency of input unit cells against the median unit cell. absolute_angle_tolerance (float): Absolute angle tolerance in checking consistency of input unit cells against the median unit cell. best_monoclinic_beta (bool): If True, then for monoclinic centered cells, I2 will be preferred over C2 if it gives a more oblique cell (i.e. smaller beta angle). """ self.input_intensities = intensities uc_params = [flex.double() for i in range(6)] for d in self.input_intensities: for i, p in enumerate(d.unit_cell().parameters()): uc_params[i].append(p) self.median_unit_cell = uctbx.unit_cell( parameters=[flex.median(p) for p in uc_params]) self._check_unit_cell_consistency(relative_length_tolerance, absolute_angle_tolerance) self.intensities = self.input_intensities[0] self.dataset_ids = flex.double(self.intensities.size(), 0) for i, d in enumerate(self.input_intensities[1:]): self.intensities = self.intensities.concatenate( d, assert_is_similar_symmetry=False) self.dataset_ids.extend(flex.double(d.size(), i + 1)) self.intensities = self.intensities.customized_copy( unit_cell=self.median_unit_cell) self.intensities.set_observation_type_xray_intensity() sys_absent_flags = self.intensities.sys_absent_flags( integral_only=True).data() self.intensities = self.intensities.select(~sys_absent_flags) self.dataset_ids = self.dataset_ids.select(~sys_absent_flags) self.lattice_symmetry_max_delta = lattice_symmetry_max_delta self.subgroups = metric_subgroups( self.intensities.crystal_symmetry(), max_delta=self.lattice_symmetry_max_delta, bravais_types_only=False, best_monoclinic_beta=best_monoclinic_beta, ) self.cb_op_inp_min = self.subgroups.cb_op_inp_minimum self.intensities = (self.intensities.change_basis( self.cb_op_inp_min).customized_copy( space_group_info=sgtbx.space_group_info( "P1")).map_to_asu().set_info(self.intensities.info())) self.lattice_group = (self.subgroups.result_groups[0] ["subsym"].space_group().make_tidy()) self.patterson_group = ( self.lattice_group.build_derived_patterson_group().make_tidy()) logger.info("Patterson group: %s" % self.patterson_group.info()) sel = self.patterson_group.epsilon(self.intensities.indices()) == 1 self.intensities = self.intensities.select(sel) self.dataset_ids = self.dataset_ids.select(sel) # Correct SDs by "typical" SD factors self._correct_sigmas(sd_fac=2.0, sd_b=0.0, sd_add=0.03) self._normalise(normalisation) self._resolution_filter(d_min, min_i_mean_over_sigma_mean, min_cc_half)
def infer_unit_cell_from_symmetry(params, space_group): # XXX exercised by iotbx/kriber/tst_strudat.py # XXX TODO: add to uctbx tests from cctbx import sgtbx error_msg = "Cannot interpret unit cell parameters." # laue_group = str(sgtbx.space_group_info( group=space_group.build_derived_laue_group())).replace(" ", "") # if (len(params) == 6): return unit_cell(params) else: crystal_system = space_group.crystal_system() if (crystal_system == "Cubic"): if len(params) == 1: a = params[0] elif len(params) == 3: a,b,c = params assert a==b==c else: raise RuntimeError(error_msg) unit_cell_ = unit_cell((a,a,a,90,90,90)) elif (crystal_system in ("Hexagonal", "Trigonal")): is_rhombohedral = False if (crystal_system == "Trigonal"): if (laue_group in ("R-3m:R", "R-3:R")): is_rhombohedral = True if (is_rhombohedral): if len(params) != 2: raise RuntimeError(error_msg) a = params[0] angle = params[1] unit_cell_ = unit_cell((a,a,a,angle,angle,angle)) else: if len(params) == 2: a = params[0] c = params[1] elif len(params) == 3: a,b,c = params assert a == b elif len(params) == 4: a,b,c,angle = params assert a == b assert angle == 120 else: raise RuntimeError(error_msg) unit_cell_ = unit_cell((a,a,c,90,90,120)) elif (crystal_system == "Tetragonal"): if len(params) == 2: a = params[0] c = params[1] unit_cell_ = unit_cell((a,a,c,90,90,90)) elif len(params) == 3: a,b,c = params[:3] assert a == b unit_cell_ = unit_cell((a,a,c,90,90,90)) else: raise RuntimeError(error_msg) elif (crystal_system == "Orthorhombic"): if len(params) != 3: raise RuntimeError(error_msg) a = params[0] b = params[1] c = params[2] unit_cell_ = unit_cell((a,b,c,90,90,90)) elif (crystal_system == "Monoclinic"): if len(params) != 4: raise RuntimeError(error_msg) a = params[0] b = params[1] c = params[2] angle = params[3] if (laue_group == "P12/m1"): unit_cell_ = unit_cell((a,b,c,90,angle,90)) elif (laue_group == "P112/m"): unit_cell_ = unit_cell((a,b,c,90,90,angle)) elif (laue_group == "P2/m11"): unit_cell_ = unit_cell((a,b,c,angle,90,90)) elif (crystal_system == "Triclinic"): raise RuntimeError(error_msg) return unit_cell_
def exercise_lex_parse_build(): exercise_parser(cif.reader, cif.builders.cif_model_builder) cm = cif.reader(input_string=cif_quoted_string).model() assert cm['global']['_a'] == 'a"b' assert cm['global']['_b'] == "a dog's life" stdout = sys.stdout s = StringIO() sys.stdout = s try: cif.reader(input_string=cif_invalid_missing_value) except CifParserError: pass else: raise Exception_expected r = cif.reader(input_string=cif_invalid_missing_value, raise_if_errors=False) assert r.error_count() == 1 try: cif.reader(input_string=cif_invalid_string) except CifParserError: pass else: raise Exception_expected a = cif.reader(input_string=cif_cod) assert a.error_count() == 0 try: cif.reader(input_string=cif_invalid_semicolon_text_field) except CifParserError: pass else: raise Exception_expected d = cif.reader(input_string=cif_valid_semicolon_text_field) assert d.error_count() == 0 assert d.model()['1']['_a'] == '\n1\n' e = cif.reader(input_string=cif_unquoted_string_semicolon) assert not show_diff( str(e.model()), """\ data_1 _a ;1 _b ; _c 2 """) cif_str_1 = """\ data_1 _a 1 """ cif_str_2 = """\ data_2 _b 2 """ cm = cif.reader(input_string=cif_str_1).model() assert list(cm.keys()) == ['1'] cif.reader(input_string=cif_str_2, cif_object=cm).model() assert list(cm.keys()) == ['1', '2'] try: cm = cif.reader(input_string=cif_invalid_loop).model() except CifParserError: pass else: raise Exception_expected try: cm = cif.reader(input_string=cif_invalid_loop_2).model() except CifParserError: pass else: raise Exception_expected sys.stdout = stdout arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template % ('_refln_F_calc', '_refln_F_meas', '_refln_F_sigma')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_F_calc', '_refln_F_meas'] assert arrays['_refln_F_calc'].sigmas() is None assert isinstance(arrays['_refln_F_meas'].sigmas(), flex.double) arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template % ('_refln_A_calc', '_refln_B_calc', '_refln_F_meas')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_A_calc', '_refln_F_meas'] assert arrays['_refln_A_calc'].is_complex_array() arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template % ('_refln_A_meas', '_refln_B_meas', '_refln_F_meas')), data_block_name='global') assert sorted(arrays.keys()) == ['_refln_A_meas', '_refln_F_meas'] assert arrays['_refln_A_meas'].is_complex_array() arrays = miller.array.from_cif( file_object=StringIO(cif_miller_array_template % ('_refln_intensity_calc', '_refln_intensity_meas', '_refln_intensity_sigma')), data_block_name='global') assert sorted( arrays.keys()) == ['_refln_intensity_calc', '_refln_intensity_meas'] arrays = miller.array.from_cif(file_object=StringIO( cif_miller_array_template % ('_refln_F_calc', '_refln_phase_calc', '_refln_F_sigma')), data_block_name='global') assert arrays['_refln_F_calc'].is_complex_array() for data_block_name in (None, "global"): miller_arrays = cif.reader( file_object=StringIO(cif_miller_array_template % ('_refln_F_calc', '_refln_F_meas', '_refln_F_sigma'))).as_miller_arrays( data_block_name=data_block_name) assert " ".join(sorted([str(ma.info()) for ma in miller_arrays])) \ == "cif:global,_refln_F_calc cif:global,_refln_F_meas,_refln_F_sigma" f = open_tmp_file(suffix="cif") f.write(cif_miller_array_template % ('_refln_F_calc', '_refln_F_meas', '_refln_F_sigma')) f.close() miller_arrays = any_reflection_file(file_name=f.name).as_miller_arrays() assert len(miller_arrays) == 2 cs = crystal.symmetry(space_group_info=sgtbx.space_group_info("P1")) miller_arrays = any_reflection_file(file_name=f.name).as_miller_arrays( crystal_symmetry=cs, force_symmetry=True, anomalous=True) assert miller_arrays[0].anomalous_flag() is True assert miller_arrays[0].crystal_symmetry().space_group() == cs.space_group( )
def create_all_subgroups( sg1,show_all=True, reverse=False ): sg_high = sgtbx.space_group_info( sg1 ).group() sg_low = sgtbx.space_group_info( "p1" ).group() graph_object = pointgroup_tools.point_group_graph( sg_low, sg_high, False,True) highest_sg = str( sgtbx.space_group_info( sg1 ) ) rev_dict = reverse_dict( graph_object.graph.o ) maximal_subgroups = get_maximal_subgroup( highest_sg, rev_dict ) if show_all: print "Subgroups of input space groups which can be constructed by introducing one single operator (and group completion) in the subgroup:" for sg in rev_dict[ highest_sg ]: line = " " line += sg+(30-len(sg))*" "+str(graph_object.graph.edge_objects[ sg ][highest_sg])+(90-len( str(graph_object.graph.edge_objects[ sg ][highest_sg]) ))*" " print line print print "Maximal subgroup detected in the full sub-group-graph: " for sg in maximal_subgroups: line = " " line += sg print line print print print print " Cosets for each maximal sub-group and the input space group are listed:" for sg in maximal_subgroups: print "-----------------------------------------------------------------" show_cosets.run( sg,highest_sg ) print "-----------------------------------------------------------------" print print print print else: print "Maximal subgroups of %s: "%(sg1) for sg in maximal_subgroups: line = " " line += sg print line print print print if reverse: print "Minimal supergroups generated by the sub-groups of the input space group:" tmp_sg = sgtbx.space_group_info( sg1 ) for sg in maximal_subgroups: tmp_sgsg = sgtbx.space_group_info( sg ) cb_op = tmp_sgsg.change_of_basis_op_to_reference_setting() okai=False try: new_sg = tmp_sg.change_basis( cb_op ) okai=True print new_sg ," is a minimal supergroup of ", tmp_sgsg.change_basis(cb_op) except Exception: pass if not okai: print "%s (%s) is a minimal supergroup of %s [*]"%(tmp_sg,cb_op, tmp_sgsg.change_basis(cb_op)) print print print
def exercise(): flex.set_random_seed(123456) random.seed(123456) base = "tst_table_one" pdb_in = iotbx.pdb.hierarchy.input(pdb_string=model_1yjp) xrs = pdb_in.xray_structure_simple() xrs.set_inelastic_form_factors(photon=1.54, table="sasaki") fc = abs(xrs.structure_factors(d_min=1.5).f_calc()).average_bijvoet_mates() fc.set_observation_type_xray_amplitude() flags = fc.generate_r_free_flags() mtz = fc.as_mtz_dataset(column_root_label="F", wavelength=1.54) mtz.add_miller_array(flags, column_root_label="FreeR_flag") mtz.mtz_object().write(base + ".mtz") xrs_p1 = xrs.expand_to_p1() xrs_p1.shake_sites_in_place(rms_difference=0.1) fc_p1 = xrs_p1.structure_factors(d_min=1.4).f_calc() fc_p1_extra = fc_p1.randomize_amplitude_and_phase(amplitude_error=1.0, phase_error_deg=0, random_seed=123456) fc_p1 = abs( fc_p1.concatenate(other=fc_p1_extra)).sort(by_value="packed_indices") fc_p1.set_observation_type_xray_amplitude() sg_p2 = sgtbx.space_group_info("P2") ic = fc_p1.f_as_f_sq().customized_copy(space_group_info=sg_p2, sigmas=flex.double( fc_p1.size(), 10.0)) ic.export_as_scalepack_unmerged(file_name=base + ".sca") open(base + ".pdb", "w").write(model_1yjp) args = [ base + ".mtz", base + ".pdb", "unmerged_data=%s.sca" % base, "prefix=tst_table_one_1", ] table_one.run(args=args, out=null_out(), use_current_directory_if_not_specified=True) # now with unmerged data in SHELX format f = open(base + ".hkl", "w") ic.export_as_shelx_hklf(file_object=f) f.close() args = [ base + ".mtz", base + ".pdb", "unmerged_data=%s.hkl=hklf4" % base, "prefix=tst_table_one_2", ] table_one.run(args=args, out=null_out(), use_current_directory_if_not_specified=True) # now with phil file f = open("tst_table_one_3.eff", "w") f.write("""\ table_one { structure { name = %(base)s pdb_file = %(base)s.pdb mtz_file = %(base)s.mtz unmerged_data = %(base)s.hkl=hklf4 } output { directory = os.getcwd() base_name = %(base)s_3 } }""" % {"base": base}) args = ["tst_table_one_3.eff"] table_one.run(args=args, out=null_out(), use_current_directory_if_not_specified=True)
def exercise_miller_arrays_as_cif_block(): from iotbx.cif import reader cif_model = reader(input_string=cif_miller_array, builder=cif.builders.cif_model_builder()).model() ma_builder = cif.builders.miller_array_builder(cif_model['global']) ma1 = ma_builder.arrays()['_refln_F_squared_meas'] mas_as_cif_block = cif.miller_arrays_as_cif_block(ma1, array_type='meas', format="corecif") mas_as_cif_block.add_miller_array( ma1.array(data=flex.complex_double([1 - 1j] * ma1.size())), array_type='calc') mas_as_cif_block.add_miller_array( ma1.array(data=flex.complex_double([1 - 2j] * ma1.size())), column_names=['_refln_A_calc', '_refln_B_calc']) for key in ('_refln_F_squared_meas', '_refln_F_squared_sigma', '_refln_F_calc', '_refln_phase_calc', '_refln_A_calc', '_refln_A_calc'): assert (key in mas_as_cif_block.cif_block.keys()), key # mas_as_cif_block = cif.miller_arrays_as_cif_block(ma1, array_type='meas', format="mmcif") mas_as_cif_block.add_miller_array( ma1.array(data=flex.complex_double([1 - 1j] * ma1.size())), array_type='calc') for key in ('_refln.F_squared_meas', '_refln.F_squared_sigma', '_refln.F_calc', '_refln.phase_calc', '_space_group_symop.operation_xyz', '_cell.length_a', '_refln.index_h'): assert key in mas_as_cif_block.cif_block.keys() # mas_as_cif_block = cif.miller_arrays_as_cif_block( ma1, column_names=[ '_diffrn_refln_intensity_net', '_diffrn_refln_intensity_sigma' ], miller_index_prefix='_diffrn_refln') mas_as_cif_block.add_miller_array( ma1.array(data=flex.std_string(ma1.size(), 'om')), column_name='_diffrn_refln_intensity_u') for key in ('_diffrn_refln_intensity_net', '_diffrn_refln_intensity_sigma', '_diffrn_refln_intensity_u'): assert key in list(mas_as_cif_block.cif_block.keys()) # try: reader(input_string=cif_global) except CifParserError as e: pass else: raise Exception_expected cif_model = reader(input_string=cif_global, strict=False).model() assert not show_diff( str(cif_model), """\ data_1 _c 3 _d 4 """) # exercise adding miller arrays with non-matching indices cs = crystal.symmetry(unit_cell=uctbx.unit_cell((10, 10, 10, 90, 90, 90)), space_group_info=sgtbx.space_group_info(symbol="P1")) mi = flex.miller_index(((1, 0, 0), (1, 2, 3), (2, 3, 4))) ms1 = miller.set(cs, mi) ma1 = miller.array(ms1, data=flex.double((1, 2, 3))) mas_as_cif_block = cif.miller_arrays_as_cif_block( ma1, column_name="_refln.F_meas_au") ms2 = miller.set(cs, mi[:2]) ma2 = miller.array(ms2, data=flex.complex_double([1 - 2j] * ms2.size())) mas_as_cif_block.add_miller_array(ma2, column_names=("_refln.F_calc_au", "_refln.phase_calc")), ms3 = miller.set(cs, flex.miller_index(((1, 0, 0), (5, 6, 7), (2, 3, 4)))) ma3 = miller.array(ms3, data=flex.double((4, 5, 6))) mas_as_cif_block.add_miller_array(ma3, column_name="_refln.F_squared_meas") ms4 = miller.set( cs, flex.miller_index( ((1, 2, 3), (5, 6, 7), (1, 1, 1), (1, 0, 0), (2, 3, 4)))) ma4 = ms4.d_spacings() mas_as_cif_block.add_miller_array(ma4, column_name="_refln.d_spacing") # extract arrays from cif block and make sure we get back what we started with arrays = cif.builders.miller_array_builder( mas_as_cif_block.cif_block).arrays() recycled_arrays = (arrays['_refln.F_meas_au'], arrays['_refln.F_calc_au'], arrays['_refln.F_squared_meas'], arrays['_refln.d_spacing']) for orig, recycled in zip((ma1, ma2, ma3, ma4), recycled_arrays): assert orig.size() == recycled.size() recycled = recycled.customized_copy( anomalous_flag=orig.anomalous_flag()) orig, recycled = orig.common_sets(recycled) assert orig.indices().all_eq(recycled.indices()) assert approx_equal(orig.data(), recycled.data(), eps=1e-5) # cif_model = reader(input_string=r3adrsf, builder=cif.builders.cif_model_builder()).model() cs = cif.builders.crystal_symmetry_builder( cif_model["r3adrsf"]).crystal_symmetry ma_builder = cif.builders.miller_array_builder( cif_model['r3adrAsf'], base_array_info=miller.array_info(crystal_symmetry_from_file=cs)) miller_arrays = list(ma_builder.arrays().values()) assert len(miller_arrays) == 4 mas_as_cif_block = cif.miller_arrays_as_cif_block( miller_arrays[0].map_to_asu(), column_names=miller_arrays[0].info().labels, format="corecif") for array in miller_arrays[1:]: labels = array.info().labels if len(labels) > 1: for label in labels: if label.startswith("wavelength_id"): labels.remove(label) mas_as_cif_block.add_miller_array(array=array.map_to_asu(), column_names=array.info().labels) s = StringIO() print(mas_as_cif_block.refln_loop, file=s) assert not show_diff( s.getvalue(), """\ loop_ _refln_index_h _refln_index_k _refln_index_l _refln.crystal_id _refln.wavelength_id _refln.scale_group_code _refln.pdbx_I_plus _refln.pdbx_I_plus_sigma _refln.pdbx_I_minus _refln.pdbx_I_minus_sigma -87 5 46 1 3 1 40.2 40.4 6.7 63.9 -87 5 45 1 3 1 47.8 29.7 35.1 30.5 -87 5 44 1 3 1 18.1 33.2 0.5 34.6 -87 5 43 1 3 1 6.1 45.4 12.9 51.6 -87 5 42 1 3 1 -6.6 45.6 -15.5 55.8 -87 7 37 1 3 1 6.3 43.4 ? ? -87 7 36 1 3 1 -67.2 55.4 ? ? -88 2 44 1 3 1 0 -1 35 38.5 -88 2 43 1 3 1 0 -1 57.4 41.5 -88 4 45 1 3 1 -1 46.1 -9.1 45.6 -88 4 44 1 3 1 -19.8 49.2 0.3 34.7 -88 6 44 1 3 1 -1.8 34.8 ? ? """)
def simple(fmodel, pdb_hierarchy, params=None, log=None, show_results=False): if (params is None): params = master_params().extract() if (log is None): log = sys.stdout crystal_gridding = None unit_cell = None d_min = 1.0 map_1 = None map_2 = None # compute map_1 and map_2 if given F_obs (fmodel exists) if ((params.map_file_name is None) and (params.map_coefficients_file_name is None) and (fmodel is not None)): e_map_obj = fmodel.electron_density_map() coeffs_2 = e_map_obj.map_coefficients( map_type=params.map_2.type, fill_missing=params.map_2.fill_missing_reflections, isotropize=params.map_2.isotropize) fft_map_2 = coeffs_2.fft_map( resolution_factor=params.resolution_factor) crystal_gridding = fft_map_2 fft_map_2.apply_sigma_scaling() map_2 = fft_map_2.real_map_unpadded() coeffs_1 = e_map_obj.map_coefficients( map_type=params.map_1.type, fill_missing=params.map_1.fill_missing_reflections, isotropize=params.map_1.isotropize) fft_map_1 = miller.fft_map(crystal_gridding=crystal_gridding, fourier_coefficients=coeffs_1) fft_map_1.apply_sigma_scaling() map_1 = fft_map_1.real_map_unpadded() unit_cell = fmodel.xray_structure.unit_cell() d_min = fmodel.f_obs().d_min() # or read map coefficents elif (params.map_coefficients_file_name is not None): map_handle = any_file(params.map_coefficients_file_name) crystal_symmetry = get_crystal_symmetry(map_handle) unit_cell = crystal_symmetry.unit_cell() d_min = get_d_min(map_handle) crystal_gridding = maptbx.crystal_gridding( crystal_symmetry.unit_cell(), d_min=d_min, resolution_factor=params.resolution_factor, space_group_info=crystal_symmetry.space_group_info()) coeffs_2 = map_handle.file_server.get_miller_array( params.map_coefficients_label) fft_map_2 = miller.fft_map(crystal_gridding=crystal_gridding, fourier_coefficients=coeffs_2) fft_map_2.apply_sigma_scaling() map_2 = fft_map_2.real_map_unpadded() # or read CCP4 map else: map_handle = any_file(params.map_file_name) unit_cell = map_handle.file_object.unit_cell() sg_info = space_group_info(map_handle.file_object.space_group_number) n_real = map_handle.file_object.unit_cell_grid crystal_gridding = maptbx.crystal_gridding( unit_cell, space_group_info=sg_info, pre_determined_n_real=n_real) map_2 = map_handle.file_object.map_data() # check for origin shift # modified from phenix.command_line.real_space_refine # plan to centralize functionality in another location # ------------------------------------------------------------------------- shift_manager = mmtbx.utils.shift_origin( map_data=map_2, pdb_hierarchy=pdb_hierarchy, crystal_symmetry=map_handle.crystal_symmetry()) if (shift_manager.shift_cart is not None): print("Map origin is not at (0,0,0): shifting the map and model.", file=log) pdb_hierarchy = shift_manager.pdb_hierarchy map_2 = shift_manager.map_data # ------------------------------------------------------------------------- # compute map_1 (Fc) if given a map (fmodel does not exist) if (map_1 is None): xray_structure = pdb_hierarchy.extract_xray_structure( crystal_symmetry=crystal_gridding.crystal_symmetry()) fft_map_1 = compute_map_from_model(d_min, None, xray_structure, crystal_gridding=crystal_gridding) fft_map_1.apply_sigma_scaling() map_1 = fft_map_1.real_map_unpadded() # compute cc assert ((map_1 is not None) and (map_2 is not None)) broadcast(m="Map correlation and map values", log=log) overall_cc = flex.linear_correlation(x=map_1.as_1d(), y=map_2.as_1d()).coefficient() print(" Overall map cc(%s,%s): %6.4f" % (params.map_1.type, params.map_2.type, overall_cc), file=log) detail, atom_radius = params.detail, params.atom_radius detail, atom_radius = set_detail_level_and_radius(detail=detail, atom_radius=atom_radius, d_min=d_min) use_hydrogens = params.use_hydrogens if (use_hydrogens is None): if (params.scattering_table == "neutron" or d_min <= 1.2): use_hydrogens = True else: use_hydrogens = False hydrogen_atom_radius = params.hydrogen_atom_radius if (hydrogen_atom_radius is None): if (params.scattering_table == "neutron"): hydrogen_atom_radius = atom_radius else: hydrogen_atom_radius = 1 results = compute(pdb_hierarchy=pdb_hierarchy, unit_cell=unit_cell, fft_n_real=map_1.focus(), fft_m_real=map_1.all(), map_1=map_1, map_2=map_2, detail=detail, atom_radius=atom_radius, use_hydrogens=use_hydrogens, hydrogen_atom_radius=hydrogen_atom_radius) if (show_results): show(log=log, results=results, params=params, detail=detail) return overall_cc, results
def exercise_floating_origin_dynamic_weighting(verbose=False): from cctbx import covariance import scitbx.math worst_condition_number_acceptable = 10 # light elements only xs0 = random_structure.xray_structure(elements=['C', 'C', 'C', 'O', 'N'], use_u_aniso=True) msg = "light elements in %s ..." % ( xs0.space_group_info().type().hall_symbol()) if verbose: print(msg, end=' ') fo_sq = xs0.structure_factors(d_min=0.8).f_calc().norm() fo_sq = fo_sq.customized_copy(sigmas=flex.double(fo_sq.size(), 1.)) xs = xs0.deep_copy_scatterers() xs.shake_adp() xs.shake_sites_in_place(rms_difference=0.1) for sc in xs.scatterers(): sc.flags.set_grad_site(True).set_grad_u_aniso(True) ls = least_squares.crystallographic_ls( fo_sq.as_xray_observations(), constraints.reparametrisation( structure=xs, constraints=[], connectivity_table=smtbx.utils.connectivity_table(xs)), weighting_scheme=least_squares.unit_weighting(), origin_fixing_restraints_type= origin_fixing_restraints.atomic_number_weighting) ls.build_up() lambdas = eigensystem.real_symmetric( ls.normal_matrix_packed_u().matrix_packed_u_as_symmetric()).values() # assert the restrained L.S. problem is not too ill-conditionned cond = math.log10(lambdas[0]/lambdas[-1]) if verbose: print("normal matrix condition: %.1f" % cond) assert cond < worst_condition_number_acceptable, msg # one heavy element xs0 = random_structure.xray_structure( space_group_info=sgtbx.space_group_info('hall: P 2yb'), elements=['Zn', 'C', 'C', 'C', 'O', 'N'], use_u_aniso=True) msg = "one heavy element + light elements (synthetic data) in %s ..." % ( xs0.space_group_info().type().hall_symbol()) if verbose: print(msg, end=' ') fo_sq = xs0.structure_factors(d_min=0.8).f_calc().norm() fo_sq = fo_sq.customized_copy(sigmas=flex.double(fo_sq.size(), 1.)) xs = xs0.deep_copy_scatterers() xs.shake_adp() xs.shake_sites_in_place(rms_difference=0.1) for sc in xs.scatterers(): sc.flags.set_grad_site(True).set_grad_u_aniso(True) ls = least_squares.crystallographic_ls( fo_sq.as_xray_observations(), constraints.reparametrisation( structure=xs, constraints=[], connectivity_table=smtbx.utils.connectivity_table(xs)), weighting_scheme=least_squares.mainstream_shelx_weighting(), origin_fixing_restraints_type= origin_fixing_restraints.atomic_number_weighting) ls.build_up() lambdas = eigensystem.real_symmetric( ls.normal_matrix_packed_u().matrix_packed_u_as_symmetric()).values() # assert the restrained L.S. problem is not too ill-conditionned cond = math.log10(lambdas[0]/lambdas[-1]) if verbose: print("normal matrix condition: %.1f" % cond) assert cond < worst_condition_number_acceptable, msg # are esd's for x,y,z coordinates of the same order of magnitude? var_cart = covariance.orthogonalize_covariance_matrix( ls.covariance_matrix(), xs.unit_cell(), xs.parameter_map()) var_site_cart = covariance.extract_covariance_matrix_for_sites( flex.size_t_range(len(xs.scatterers())), var_cart, xs.parameter_map()) site_esds = var_site_cart.matrix_packed_u_diagonal() indicators = flex.double() for i in xrange(0, len(site_esds), 3): stats = scitbx.math.basic_statistics(site_esds[i:i+3]) indicators.append(stats.bias_corrected_standard_deviation/stats.mean) assert indicators.all_lt(2) # especially troublesome structure with one heavy element # (contributed by Jonathan Coome) xs0 = xray.structure( crystal_symmetry=crystal.symmetry( unit_cell=(8.4519, 8.4632, 18.7887, 90, 96.921, 90), space_group_symbol="hall: P 2yb"), scatterers=flex.xray_scatterer([ xray.scatterer( #0 label="ZN1", site=(-0.736683, -0.313978, -0.246902), u=(0.000302, 0.000323, 0.000054, 0.000011, 0.000015, -0.000004)), xray.scatterer( #1 label="N3B", site=(-0.721014, -0.313583, -0.134277), u=(0.000268, 0.000237, 0.000055, -0.000027, 0.000005, 0.000006)), xray.scatterer( #2 label="N3A", site=(-0.733619, -0.290423, -0.357921), u=(0.000229, 0.000313, 0.000053, 0.000022, 0.000018, -0.000018)), xray.scatterer( #3 label="C9B", site=(-1.101537, -0.120157, -0.138063), u=(0.000315, 0.000345, 0.000103, 0.000050, 0.000055, -0.000017)), xray.scatterer( #4 label="N5B", site=(-0.962032, -0.220345, -0.222045), u=(0.000274, 0.000392, 0.000060, -0.000011, -0.000001, -0.000002)), xray.scatterer( #5 label="N1B", site=(-0.498153, -0.402742, -0.208698), u=(0.000252, 0.000306, 0.000063, 0.000000, 0.000007, 0.000018)), xray.scatterer( #6 label="C3B", site=(-0.322492, -0.472610, -0.114594), u=(0.000302, 0.000331, 0.000085, 0.000016, -0.000013, 0.000037)), xray.scatterer( #7 label="C4B", site=(-0.591851, -0.368163, -0.094677), u=(0.000262, 0.000255, 0.000073, -0.000034, 0.000027, -0.000004)), xray.scatterer( #8 label="N4B", site=(-0.969383, -0.204624, -0.150014), u=(0.000279, 0.000259, 0.000070, -0.000009, 0.000039, 0.000000)), xray.scatterer( #9 label="N2B", site=(-0.470538, -0.414572, -0.135526), u=(0.000277, 0.000282, 0.000065, 0.000003, 0.000021, -0.000006)), xray.scatterer( #10 label="C8A", site=(-0.679889, -0.158646, -0.385629), u=(0.000209, 0.000290, 0.000078, 0.000060, 0.000006, 0.000016)), xray.scatterer( #11 label="N5A", site=(-0.649210, -0.075518, -0.263412), u=(0.000307, 0.000335, 0.000057, -0.000002, 0.000016, -0.000012)), xray.scatterer( #12 label="C6B", site=(-0.708620, -0.325965, 0.011657), u=(0.000503, 0.000318, 0.000053, -0.000058, 0.000032, -0.000019)), xray.scatterer( #13 label="C10B", site=(-1.179332, -0.083184, -0.202815), u=(0.000280, 0.000424, 0.000136, 0.000094, 0.000006, 0.000013)), xray.scatterer( #14 label="N1A", site=(-0.838363, -0.532191, -0.293213), u=(0.000312, 0.000323, 0.000060, 0.000018, 0.000011, -0.000008)), xray.scatterer( #15 label="C3A", site=(-0.915414, -0.671031, -0.393826), u=(0.000319, 0.000384, 0.000078, -0.000052, -0.000001, -0.000020)), xray.scatterer( #16 label="C1A", site=(-0.907466, -0.665419, -0.276011), u=(0.000371, 0.000315, 0.000079, 0.000006, 0.000036, 0.000033)), xray.scatterer( #17 label="C1B", site=(-0.365085, -0.452753, -0.231927), u=(0.000321, 0.000253, 0.000087, -0.000024, 0.000047, -0.000034)), xray.scatterer( #18 label="C11A", site=(-0.598622, 0.053343, -0.227354), u=(0.000265, 0.000409, 0.000084, 0.000088, -0.000018, -0.000030)), xray.scatterer( #19 label="C2A", site=(-0.958694, -0.755645, -0.337016), u=(0.000394, 0.000350, 0.000106, -0.000057, 0.000027, -0.000005)), xray.scatterer( #20 label="C4A", site=(-0.784860, -0.407601, -0.402050), u=(0.000238, 0.000296, 0.000064, 0.000002, 0.000011, -0.000016)), xray.scatterer( #21 label="C5A", site=(-0.784185, -0.399716, -0.475491), u=(0.000310, 0.000364, 0.000062, 0.000044, -0.000011, -0.000017)), xray.scatterer( #22 label="N4A", site=(-0.630284, -0.043981, -0.333143), u=(0.000290, 0.000275, 0.000074, 0.000021, 0.000027, 0.000013)), xray.scatterer( #23 label="C10A", site=(-0.545465, 0.166922, -0.272829), u=(0.000369, 0.000253, 0.000117, 0.000015, -0.000002, -0.000008)), xray.scatterer( #24 label="C9A", site=(-0.567548, 0.102272, -0.339923), u=(0.000346, 0.000335, 0.000103, -0.000016, 0.000037, 0.000023)), xray.scatterer( #25 label="C11B", site=(-1.089943, -0.146930, -0.253779), u=(0.000262, 0.000422, 0.000102, -0.000018, -0.000002, 0.000029)), xray.scatterer( #26 label="N2A", site=(-0.843385, -0.537780, -0.366515), u=(0.000273, 0.000309, 0.000055, -0.000012, -0.000005, -0.000018)), xray.scatterer( #27 label="C7A", site=(-0.674021, -0.136086, -0.457790), u=(0.000362, 0.000378, 0.000074, 0.000043, 0.000034, 0.000016)), xray.scatterer( #28 label="C8B", site=(-0.843625, -0.264182, -0.102023), u=(0.000264, 0.000275, 0.000072, -0.000025, 0.000019, -0.000005)), xray.scatterer( #29 label="C6A", site=(-0.726731, -0.261702, -0.502366), u=(0.000339, 0.000472, 0.000064, 0.000062, -0.000003, 0.000028)), xray.scatterer( #30 label="C5B", site=(-0.577197, -0.376753, -0.020800), u=(0.000349, 0.000353, 0.000066, -0.000082, -0.000022, 0.000014)), xray.scatterer( #31 label="C2B", site=(-0.252088, -0.497338, -0.175057), u=(0.000251, 0.000342, 0.000119, 0.000020, 0.000034, -0.000018)), xray.scatterer( #32 label="C7B", site=(-0.843956, -0.268811, -0.028080), u=(0.000344, 0.000377, 0.000078, -0.000029, 0.000059, -0.000007)), xray.scatterer( #33 label="F4B", site=(-0.680814, -0.696808, -0.115056), u=(0.000670, 0.000408, 0.000109, -0.000099, 0.000139, -0.000031)), xray.scatterer( #34 label="F1B", site=(-0.780326, -0.921249, -0.073962), u=(0.000687, 0.000357, 0.000128, -0.000152, -0.000011, 0.000021)), xray.scatterer( #35 label="B1B", site=(-0.795220, -0.758128, -0.075955), u=(0.000413, 0.000418, 0.000075, 0.000054, 0.000045, 0.000023)), xray.scatterer( #36 label="F2B", site=(-0.945140, -0.714626, -0.105820), u=(0.000584, 0.001371, 0.000108, 0.000420, 0.000067, 0.000134)), xray.scatterer( #37 label="F3B", site=(-0.768914, -0.701660, -0.005161), u=(0.000678, 0.000544, 0.000079, -0.000000, 0.000090, -0.000021)), xray.scatterer( #38 label="F1A", site=(-0.109283, -0.252334, -0.429288), u=(0.000427, 0.001704, 0.000125, 0.000407, 0.000041, 0.000035)), xray.scatterer( #39 label="F4A", site=(-0.341552, -0.262864, -0.502023), u=(0.000640, 0.000557, 0.000081, -0.000074, 0.000042, -0.000052)), xray.scatterer( #40 label="F3A", site=(-0.324533, -0.142292, -0.393215), u=(0.000471, 0.001203, 0.000134, 0.000333, -0.000057, -0.000220)), xray.scatterer( #41 label="F2A", site=(-0.312838, -0.405405, -0.400231), u=(0.002822, 0.000831, 0.000092, -0.000648, 0.000115, 0.000027)), xray.scatterer( #42 label="B1A", site=(-0.271589, -0.268874, -0.430724), u=(0.000643, 0.000443, 0.000079, 0.000040, 0.000052, -0.000034)), xray.scatterer( #43 label="H5B", site=(-0.475808, -0.413802, 0.004402), u=0.005270), xray.scatterer( #44 label="H6B", site=(-0.699519, -0.326233, 0.062781), u=0.019940), xray.scatterer( #45 label="H3B", site=(-0.283410, -0.484757, -0.063922), u=0.029990), xray.scatterer( #46 label="H1B", site=(-0.357103, -0.451819, -0.284911), u=0.031070), xray.scatterer( #47 label="H10A", site=(-0.495517, 0.268296, -0.256187), u=0.027610), xray.scatterer( #48 label="H2B", site=(-0.147129, -0.535141, -0.174699), u=0.017930), xray.scatterer( #49 label="H7A", site=(-0.643658, -0.031387, -0.475357), u=0.020200), xray.scatterer( #50 label="H1A", site=(-0.912757, -0.691043, -0.227554), u=0.033320), xray.scatterer( #51 label="H7B", site=(-0.933670, -0.241189, -0.010263), u=0.021310), xray.scatterer( #52 label="H11B", site=(-1.107736, -0.155470, -0.311996), u=0.041500), xray.scatterer( #53 label="H9A", site=(-0.539908, 0.139753, -0.382281), u=0.007130), xray.scatterer( #54 label="H10B", site=(-1.265944, -0.029610, -0.212398), u=0.030910), xray.scatterer( #55 label="H3A", site=(-0.934728, -0.691149, -0.450551), u=0.038950), xray.scatterer( #56 label="H5A", site=(-0.833654, -0.487479, -0.508239), u=0.031150), xray.scatterer( #57 label="H6A", site=(-0.742871, -0.242269, -0.558157), u=0.050490), xray.scatterer( #58 label="H9B", site=(-1.120150, -0.093752, -0.090706), u=0.039310), xray.scatterer( #59 label="H11A", site=(-0.593074, 0.054973, -0.180370), u=0.055810), xray.scatterer( #60 label="H2A", site=(-0.999576, -0.842158, -0.340837), u=0.057030) ])) fo_sq = xs0.structure_factors(d_min=0.8).f_calc().norm() fo_sq = fo_sq.customized_copy(sigmas=flex.double(fo_sq.size(), 1.)) for hydrogen_flag in (True, False): xs = xs0.deep_copy_scatterers() if not hydrogen_flag: xs.select_inplace(~xs.element_selection('H')) xs.shake_adp() xs.shake_sites_in_place(rms_difference=0.1) for sc in xs.scatterers(): sc.flags.set_grad_site(True).set_grad_u_aniso(False) ls = least_squares.crystallographic_ls( fo_sq.as_xray_observations(), constraints.reparametrisation( structure=xs, constraints=[], connectivity_table=smtbx.utils.connectivity_table(xs)), weighting_scheme=least_squares.unit_weighting(), origin_fixing_restraints_type= origin_fixing_restraints.atomic_number_weighting) ls.build_up() lambdas = eigensystem.real_symmetric( ls.normal_matrix_packed_u().matrix_packed_u_as_symmetric()).values() # assert the restrained L.S. problem is not too ill-conditionned cond = math.log10(lambdas[0]/lambdas[-1]) msg = ("one heavy element + light elements (real data) %s Hydrogens: %.1f" % (['without', 'with'][hydrogen_flag], cond)) if verbose: print(msg) assert cond < worst_condition_number_acceptable, msg # are esd's for x,y,z coordinates of the same order of magnitude? var_cart = covariance.orthogonalize_covariance_matrix( ls.covariance_matrix(), xs.unit_cell(), xs.parameter_map()) var_site_cart = covariance.extract_covariance_matrix_for_sites( flex.size_t_range(len(xs.scatterers())), var_cart, xs.parameter_map()) site_esds = var_site_cart.matrix_packed_u_diagonal() indicators = flex.double() for i in xrange(0, len(site_esds), 3): stats = scitbx.math.basic_statistics(site_esds[i:i+3]) indicators.append(stats.bias_corrected_standard_deviation/stats.mean) assert indicators.all_lt(1)
def exercise_space_group_info(): i = sgtbx.space_group_info("P 1") assert i.type().number() == 1 i = sgtbx.space_group_info("P -1") assert i.type().number() == 2 i = sgtbx.space_group_info("P 2", "I", space_group_t_den=24) assert str(i) == "P 1 1 2" assert i.group().t_den() == 24 i = sgtbx.space_group_info("P32 (a,b,3c)", space_group_t_den=36) assert str(i) == "P 32 (a,b,3*c)" assert i.group().t_den() == 36 i = sgtbx.space_group_info("P 2", "I") assert str(i) == "P 1 1 2" i = sgtbx.space_group_info("P 2", "a") assert str(i) == "P 1 2 1" assert i.group() == i.type().group() assert i.reciprocal_space_asu().reference_as_string() \ == "k>=0 and (l>0 or (l==0 and h>=0))" assert str(i.brick()) == "0<=x<=1/2; 0<=y<1; 0<=z<1" assert i.wyckoff_table().space_group_type().group() == i.type().group() assert len(i.structure_seminvariants().vectors_and_moduli()) == 3 assert i.number_of_continuous_allowed_origin_shifts() == 1 for sg_number in (1, 3, 15, 75, 143, 195): assert approx_equal( sgtbx.space_group_info(sg_number).any_compatible_unit_cell( 100).volume(), 100) s = pickle.dumps(i) j = pickle.loads(s) assert str(i) == str(j) i = sgtbx.space_group_info("B 2", "i") assert not i.is_reference_setting() assert str(i.change_of_basis_op_to_reference_setting().c()) == "-x,z,y" assert str(i.reference_setting()) == "C 1 2 1" assert str(i.as_reference_setting()) == "C 1 2 1" assert str(i.primitive_setting()) == "C 1 2 1 (-x+y,z,x+y)" asu = i.direct_space_asu() assert len(asu.cuts) == 6 assert sgtbx.space_group(asu.hall_symbol) == i.group() j = i.primitive_setting() asu = j.direct_space_asu() assert len(asu.cuts) == 6 assert sgtbx.space_group(asu.hall_symbol) == j.group() i = sgtbx.space_group_info(number=19) assert [ str(sgtbx.space_group_info(group=group)) for group in i.reflection_intensity_equivalent_groups() ] == [ "P 2 2 2", "P 2 2 21", "P 21 2 2", "P 2 21 2", "P 21 21 2", "P 2 21 21", "P 21 2 21", "P 21 21 21" ] assert len(i.reflection_intensity_equivalent_groups(anomalous_flag=False)) \ == 127 # i = sgtbx.space_group_info(symbol="C 1 2 1") assert str(i.change_of_basis_op_to_reference_setting().c()) == "x,y,z" assert approx_equal( i.subtract_continuous_allowed_origin_shifts( translation_frac=[1, 2, 3]), [1, 0, 3]) i = sgtbx.space_group_info(symbol="B 2 1 1") assert str(i.change_of_basis_op_to_reference_setting().c()) == "z,x,y" assert approx_equal( i.subtract_continuous_allowed_origin_shifts( translation_frac=[1, 2, 3]), [0, 2, 3]) # for space_group_symbol, addl_smx, uhm in [ ("P 21 21 21", "x+1/2,y+1/2,z", "C 2 2 21 (a-1/4,b,c)"), ("C 1 2 1", "x,y+1/2,z", "P 1 2 1 (2*a,2*b,c)") ]: for f in range(1, 12 + 1): sg_t_den = sgtbx.sg_t_den * f cb_r_den = sgtbx.cb_r_den * f cb_t_den = sgtbx.cb_t_den * f # sg_i = sgtbx.space_group_info(symbol=space_group_symbol) t = sg_i.type() assert sg_i.type(tidy_cb_op=True) is t assert sg_i.type(tidy_cb_op=False) is not t if (f != 1): t = sg_i.type() c = t.cb_op().c() assert c.r().den() == sgtbx.cb_r_den assert c.t().den() == sgtbx.cb_t_den for t_den in [cb_t_den, None]: if (t_den is not None): tt = sg_i.type(t_den=t_den) assert tt is not t else: assert sg_i.type(t_den=t_den) is tt c = tt.cb_op().c() assert c.r().den() == sgtbx.cb_r_den assert c.t().den() == cb_t_den for r_den in [cb_r_den, None]: if (r_den is not None): tr = sg_i.type(r_den=r_den) assert tr is not tt else: sg_i.type(r_den=r_den) is tr c = tr.cb_op().c() assert c.r().den() == cb_r_den assert c.t().den() == cb_t_den # sg_i = sgtbx.space_group_info(symbol=space_group_symbol, space_group_t_den=sg_t_den) sgx = sgtbx.space_group(sg_i.group()) rt_mx = sgtbx.rt_mx(addl_smx) rt_mx = rt_mx.new_denominators(sgx.r_den(), sgx.t_den()) sgx.expand_smx(rt_mx) sgx_i = sgx.info() assert str(sgx_i) == uhm t = sgx_i.type() c = t.cb_op().c() assert c.r().den() == cb_r_den assert c.t().den() == cb_t_den for r_den, t_den in [(None, None), (cb_r_den, None), (None, cb_t_den), (cb_r_den, cb_t_den)]: assert sgx_i.type(r_den=r_den, t_den=t_den) is t assert sgx_i.type(tidy_cb_op=False, r_den=r_den, t_den=t_den) is not t for i, op in enumerate(sgx_i.group()): assert sgx_i.cif_symmetry_code(op) == "%i" % (i + 1) assert sgx_i.cif_symmetry_code(op, full_code=True, sep=" ") == "%i 555" % (i + 1) tr = [random.randint(-4, 4) for j in range(3)] rt_mx = sgtbx.rt_mx( op.r(), op.t().plus( sgtbx.tr_vec(tr, tr_den=1).new_denominator(op.t().den()))) assert sgx_i.cif_symmetry_code(rt_mx, full_code=True) \ == "%i_%i%i%i" %(i+1, 5+tr[0], 5+tr[1], 5+tr[2])
def refined_settings_factory_from_refined_triclinic(params, experiments, reflections, i_setting=None, lepage_max_delta=5.0, nproc=1, refiner_verbosity=0): assert len(experiments.crystals()) == 1 crystal = experiments.crystals()[0] used_reflections = copy.deepcopy(reflections) UC = crystal.get_unit_cell() from rstbx.dps_core.lepage import iotbx_converter Lfat = refined_settings_list() for item in iotbx_converter(UC, lepage_max_delta): Lfat.append(bravais_setting(item)) supergroup = Lfat.supergroup() triclinic = Lfat.triclinic() triclinic_miller = used_reflections['miller_index'] # assert no transformation between indexing and bravais list assert str(triclinic['cb_op_inp_best']) == "a,b,c" Nset = len(Lfat) for j in xrange(Nset): Lfat[j].setting_number = Nset - j from cctbx.crystal_orientation import crystal_orientation from cctbx import sgtbx from scitbx import matrix for j in xrange(Nset): cb_op = Lfat[j]['cb_op_inp_best'].c().as_double_array()[0:9] orient = crystal_orientation(crystal.get_A(), True) orient_best = orient.change_basis(matrix.sqr(cb_op).transpose()) constrain_orient = orient_best.constrain(Lfat[j]['system']) bravais = Lfat[j]["bravais"] cb_op_best_ref = Lfat[j][ 'best_subsym'].change_of_basis_op_to_reference_setting() space_group = sgtbx.space_group_info( number=bravais_lattice_to_lowest_symmetry_spacegroup_number[ bravais]).group() space_group = space_group.change_basis(cb_op_best_ref.inverse()) bravais = str(bravais_types.bravais_lattice(group=space_group)) Lfat[j]["bravais"] = bravais Lfat[j].unrefined_crystal = dials_crystal_from_orientation( constrain_orient, space_group) args = [] for subgroup in Lfat: args.append((params, subgroup, used_reflections, experiments, refiner_verbosity)) results = easy_mp.parallel_map(func=refine_subgroup, iterable=args, processes=nproc, method="multiprocessing", preserve_order=True, asynchronous=True, preserve_exception_message=True) for i, result in enumerate(results): Lfat[i] = result identify_likely_solutions(Lfat) return Lfat
def run(args, out=sys.stdout, validated=False): show_citation(out=out) if (len(args) == 0): master_phil.show(out=out) print('\nUsage: phenix.map_comparison <CCP4> <CCP4>\n',\ ' phenix.map_comparison <CCP4> <MTZ> mtz_label_1=<label>\n',\ ' phenix.map_comparison <MTZ 1> mtz_label_1=<label 1> <MTZ 2> mtz_label_2=<label 2>\n', file=out) sys.exit() # process arguments params = None input_attributes = ['map_1', 'mtz_1', 'map_2', 'mtz_2'] try: # automatic parsing params = phil.process_command_line_with_files( args=args, master_phil=master_phil).work.extract() except Exception: # map_file_def only handles one map phil from libtbx.phil.command_line import argument_interpreter arg_int = argument_interpreter(master_phil=master_phil) command_line_args = list() map_files = list() for arg in args: if (os.path.isfile(arg)): map_files.append(arg) else: command_line_args.append(arg_int.process(arg)) params = master_phil.fetch(sources=command_line_args).extract() # check if more files are necessary n_defined = 0 for attribute in input_attributes: if (getattr(params.input, attribute) is not None): n_defined += 1 # matches files to phil scope, stops once there is sufficient data for map_file in map_files: if (n_defined < 2): current_map = file_reader.any_file(map_file) if (current_map.file_type == 'ccp4_map'): n_defined += 1 if (params.input.map_1 is None): params.input.map_1 = map_file elif (params.input.map_2 is None): params.input.map_2 = map_file elif (current_map.file_type == 'hkl'): n_defined += 1 if (params.input.mtz_1 is None): params.input.mtz_1 = map_file elif (params.input.mtz_2 is None): params.input.mtz_2 = map_file else: print('WARNING: only the first two files are used', file=out) break # validate arguments (GUI sets validated to true, no need to run again) assert (params is not None) if (not validated): validate_params(params) # --------------------------------------------------------------------------- # check if maps need to be generated from mtz n_maps = 0 maps = list() map_names = list() for attribute in input_attributes: filename = getattr(params.input, attribute) if (filename is not None): map_names.append(filename) current_map = file_reader.any_file(filename) maps.append(current_map) if (current_map.file_type == 'ccp4_map'): n_maps += 1 # construct maps, if necessary crystal_gridding = None m1 = None m2 = None # 1 map, 1 mtz file if (n_maps == 1): for current_map in maps: if (current_map.file_type == 'ccp4_map'): uc = current_map.file_object.unit_cell() sg_info = space_group_info( current_map.file_object.space_group_number) n_real = current_map.file_object.unit_cell_grid crystal_gridding = maptbx.crystal_gridding( uc, space_group_info=sg_info, pre_determined_n_real=n_real) m1 = current_map.file_object.map_data() if (crystal_gridding is not None): label = None for attribute in [('mtz_1', 'mtz_label_1'), ('mtz_2', 'mtz_label_2')]: filename = getattr(params.input, attribute[0]) label = getattr(params.input, attribute[1]) if ((filename is not None) and (label is not None)): break # labels will match currently open mtz file for current_map in maps: if (current_map.file_type == 'hkl'): m2 = miller.fft_map( crystal_gridding=crystal_gridding, fourier_coefficients=current_map.file_server. get_miller_array( label)).apply_sigma_scaling().real_map_unpadded() else: raise Sorry('Gridding is not defined.') # 2 mtz files elif (n_maps == 0): crystal_symmetry = get_crystal_symmetry(maps[0]) d_min = min(get_d_min(maps[0]), get_d_min(maps[1])) crystal_gridding = maptbx.crystal_gridding( crystal_symmetry.unit_cell(), d_min=d_min, resolution_factor=params.options.resolution_factor, space_group_info=crystal_symmetry.space_group_info()) m1 = miller.fft_map( crystal_gridding=crystal_gridding, fourier_coefficients=maps[0].file_server.get_miller_array( params.input.mtz_label_1)).apply_sigma_scaling( ).real_map_unpadded() m2 = miller.fft_map( crystal_gridding=crystal_gridding, fourier_coefficients=maps[1].file_server.get_miller_array( params.input.mtz_label_2)).apply_sigma_scaling( ).real_map_unpadded() # 2 maps else: m1 = maps[0].file_object.map_data() m2 = maps[1].file_object.map_data() # --------------------------------------------------------------------------- # analyze maps assert ((m1 is not None) and (m2 is not None)) # show general statistics s1 = maptbx.more_statistics(m1) s2 = maptbx.more_statistics(m2) show_overall_statistics(out=out, s=s1, header="Map 1 (%s):" % map_names[0]) show_overall_statistics(out=out, s=s2, header="Map 2 (%s):" % map_names[1]) cc_input_maps = flex.linear_correlation(x=m1.as_1d(), y=m2.as_1d()).coefficient() print("CC, input maps: %6.4f" % cc_input_maps, file=out) # compute CCpeak cc_peaks = list() m1_he = maptbx.volume_scale(map=m1, n_bins=10000).map_data() m2_he = maptbx.volume_scale(map=m2, n_bins=10000).map_data() cc_quantile = flex.linear_correlation(x=m1_he.as_1d(), y=m2_he.as_1d()).coefficient() print("CC, quantile rank-scaled (histogram equalized) maps: %6.4f" % \ cc_quantile, file=out) print("Peak correlation:", file=out) print(" cutoff CCpeak", file=out) cutoffs = [i / 100. for i in range(1, 90)] + [i / 1000 for i in range(900, 1000)] for cutoff in cutoffs: cc_peak = maptbx.cc_peak(map_1=m1_he, map_2=m2_he, cutoff=cutoff) print(" %3.2f %7.4f" % (cutoff, cc_peak), file=out) cc_peaks.append((cutoff, cc_peak)) # compute discrepancy function (D-function) discrepancies = list() cutoffs = flex.double(cutoffs) df = maptbx.discrepancy_function(map_1=m1_he, map_2=m2_he, cutoffs=cutoffs) print("Discrepancy function:", file=out) print(" cutoff D", file=out) for c, d in zip(cutoffs, df): print(" %3.2f %7.4f" % (c, d), file=out) discrepancies.append((c, d)) # compute and output histograms h1 = maptbx.histogram(map=m1, n_bins=10000) h2 = maptbx.histogram(map=m2, n_bins=10000) print("Map histograms:", file=out) print("Map 1 (%s) Map 2 (%s)"%\ (params.input.map_1,params.input.map_2), file=out) print("(map_value,cdf,frequency) <> (map_value,cdf,frequency)", file=out) for a1, c1, v1, a2, c2, v2 in zip(h1.arguments(), h1.c_values(), h1.values(), h2.arguments(), h2.c_values(), h2.values()): print("(%9.5f %9.5f %9.5f) <> (%9.5f %9.5f %9.5f)"%\ (a1,c1,v1, a2,c2,v2), file=out) # store results s1_dict = create_statistics_dict(s=s1) s2_dict = create_statistics_dict(s=s2) results = dict() inputs = list() for attribute in input_attributes: filename = getattr(params.input, attribute) if (filename is not None): inputs.append(filename) assert (len(inputs) == 2) results['map_files'] = inputs results['map_statistics'] = (s1_dict, s2_dict) results['cc_input_maps'] = cc_input_maps results['cc_quantile'] = cc_quantile results['cc_peaks'] = cc_peaks results['discrepancies'] = discrepancies # TODO, verify h1,h2 are not dicts, e.g. .values is py2/3 compat. I assume it is here results['map_histograms'] = ((h1.arguments(), h1.c_values(), h1.values()), (h2.arguments(), h2.c_values(), h2.values())) return results
def metric_supergroup(group): return sgtbx.space_group_info( group=group).type().expand_addl_generators_of_euclidean_normalizer( True, True).build_derived_acentric_group()
raise except Exception: pass fullprof_out = easy_run.fully_buffered(command="fp2k "+os.path.split(pcrfile)[1]) \ .raise_if_errors() \ .stdout_lines if (0 or verbose): for l in fullprof_out: print(l) f = open(pcrfile + ".out", "r") fullprof_out = f.readlines() f.close() sys.stderr.flush() if (0 or verbose): for l in fullprof_out: print(l[:-1]) sys.stdout.flush() os.chdir(old_cwd) if __name__ == '__main__': # just a little test for debugging from cctbx import sgtbx from cctbx.development import random_structure xrs = random_structure.xray_structure( space_group_info=sgtbx.space_group_info(number=1), elements=["C"] * 10, u_iso=0.005) print(list(simulate_powder_pattern(xrs) [0])) #, filename="/tmp/test.pcr", keep_results=True))
def _mosflm_refine_cell(self, idxr, set_spacegroup=None): '''Perform the refinement of the unit cell. This will populate all of the information needed to perform the integration.''' # FIXME this will die after #1285 #if not self.get_integrater_indexer(): #Debug.write('Replacing indexer of %s with self at %d' % \ #(str(self.get_integrater_indexer()), __line__)) #self.set_integrater_indexer(self) #idxr = self.get_integrater_indexer() if not idxr.get_indexer_payload('mosflm_orientation_matrix'): raise RuntimeError('unexpected situation in indexing') lattice = idxr.get_indexer_lattice() mosaic = idxr.get_indexer_mosaic() cell = idxr.get_indexer_cell() beam_centre = idxr.get_indexer_beam_centre() # bug # 3174 - if mosaic is very small (here defined to be # 0.25 x osc_width) then set to this minimum value. phi_width = idxr.get_phi_width() if mosaic < 0.25 * phi_width: mosaic = 0.25 * phi_width if idxr.get_indexer_payload('mosflm_beam_centre'): beam_centre = idxr.get_indexer_payload('mosflm_beam_centre') distance = idxr.get_indexer_distance() matrix = idxr.get_indexer_payload('mosflm_orientation_matrix') integration_params = idxr.get_indexer_payload( 'mosflm_integration_parameters') if integration_params is None: integration_params = {} if integration_params: if 'separation' in integration_params: self.set_refiner_parameter( 'mosflm', 'separation', '%f %f' % tuple(integration_params['separation'])) if 'raster' in integration_params: self.set_refiner_parameter( 'mosflm', 'raster', '%d %d %d %d %d' % tuple(integration_params['raster'])) idxr.set_indexer_payload('mosflm_integration_parameters', None) spacegroup_number = lattice_to_spacegroup(lattice) # copy these into myself for later reference, if indexer # is not myself - everything else is copied via the # cell refinement process... from cctbx import sgtbx from dxtbx.model import Crystal from dxtbx.model.detector_helpers import set_mosflm_beam_centre experiment = idxr.get_indexer_experiment_list()[0] set_mosflm_beam_centre(experiment.detector, experiment.beam, beam_centre) space_group = sgtbx.space_group_info(number=spacegroup_number).group() a, b, c = experiment.crystal.get_real_space_vectors() experiment.crystal = Crystal(a, b, c, space_group=space_group) # FIXME surely these have been assigned further up?! if not self._mosflm_cell_ref_images: self._mosflm_cell_ref_images = self._refine_select_images(mosaic) f = open( os.path.join(self.get_working_directory(), 'xiaindex-%s.mat' % lattice), 'w') for m in matrix: f.write(m) f.close() # then start the cell refinement refiner = MosflmRefineCell() refiner.set_working_directory(self.get_working_directory()) auto_logfiler(refiner) if self._mosflm_gain: refiner.set_gain(self._mosflm_gain) refiner.set_template(os.path.basename(idxr.get_template())) refiner.set_directory(idxr.get_directory()) refiner.set_input_mat_file('xiaindex-%s.mat' % lattice) refiner.set_output_mat_file('xiarefine.mat') refiner.set_beam_centre(beam_centre) refiner.set_unit_cell(cell) refiner.set_distance(distance) if set_spacegroup: refiner.set_space_group_number(set_spacegroup) else: refiner.set_space_group_number(spacegroup_number) # FIXME 18/JUN/08 - it may help to have an overestimate # of the mosaic spread in here as it *may* refine down # better than up... - this is not a good idea as it may # also not refine at all! - 12972 # integration failed # Bug # 3103 if self._mosflm_cell_ref_double_mosaic: mosaic *= 2.0 refiner.set_mosaic(mosaic) # if set, use the resolution for cell refinement - see # bug # 2078... if self._mosflm_cell_ref_resolution: refiner.set_resolution(self._mosflm_cell_ref_resolution) refiner.set_fix_mosaic(self._mosflm_postref_fix_mosaic) # note well that the beam centre is coming from indexing so # should be already properly handled #if idxr.get_wavelength_prov() == 'user': #refiner.set_wavelength(idxr.get_wavelength()) # belt + braces mode - only to be used when considering failover, # will run an additional step of autoindexing prior to cell # refinement, to be used only after proving that not going it # will result in cell refinement failure - will use the first # wedge... N.B. this is only useful if the indexer is Labelit # not Mosflm... refiner.set_add_autoindex(self._mosflm_cell_ref_add_autoindex) # get all of the stored parameter values parameters = self.get_refiner_parameters('mosflm') refiner.update_parameters(parameters) detector = idxr.get_detector() detector_width, detector_height = detector[0].get_image_size_mm() lim_x = 0.5 * detector_width lim_y = 0.5 * detector_height Debug.write('Scanner limits: %.1f %.1f' % (lim_x, lim_y)) refiner.set_limits(lim_x, lim_y) refiner.set_images(self._mosflm_cell_ref_images) failover = PhilIndex.params.xia2.settings.failover if failover and not self._mosflm_cell_ref_add_autoindex: refiner.set_ignore_cell_refinement_failure(True) refiner.run() # then look to see if the cell refinement worked ok - if it # didn't then this may indicate that the lattice was wrongly # selected. cell_refinement_ok = refiner.cell_refinement_ok() if not cell_refinement_ok: Debug.write('Repeating cell refinement...') self.set_integrater_prepare_done(False) self._mosflm_cell_ref_add_autoindex = True return [0.0], [0.0] rms_values = refiner.get_rms_values() background_residual = refiner.get_background_residual() self._refinr_cell = refiner.get_refined_unit_cell() distance = refiner.get_refined_distance2() experiment = idxr.get_indexer_experiment_list()[0] from xia2.Wrappers.Mosflm.AutoindexHelpers import set_distance set_distance(experiment.detector, distance) self.set_refiner_parameter('mosflm', 'distortion yscale', refiner.get_refined_distortion_yscale()) self.set_refiner_parameter('mosflm', 'raster', refiner.get_raster()) #integration_params['distortion yscale'] \ #= refiner.get_refined_distortion_yscale() #integration_params['raster'] = refiner.get_raster() separation = refiner.get_separation() if separation is not None: self.set_refiner_parameter('mosflm', 'separation', '%s %s' % refiner.get_separation()) #integration_params['separation'] = refiner.get_separation() self.set_refiner_parameter('mosflm', 'beam', '%s %s' % refiner.get_refined_beam_centre()) self.set_refiner_parameter('mosflm', 'distance', refiner.get_refined_distance()) self.set_refiner_parameter('mosflm', 'distortion tilt', refiner.get_refined_distortion_tilt()) self.set_refiner_parameter('mosflm', 'distortion twist', refiner.get_refined_distortion_twist()) integration_params['beam'] = tuple( float(b) for b in refiner.get_refined_beam_centre()) integration_params['distance'] = refiner.get_refined_distance() integration_params[ 'distortion tilt'] = refiner.get_refined_distortion_tilt() integration_params[ 'distortion twist'] = refiner.get_refined_distortion_twist() idxr._indxr_mosaic = refiner.get_refined_mosaic() idxr.set_indexer_payload( 'mosflm_orientation_matrix', open(os.path.join(self.get_working_directory(), 'xiarefine.mat'), 'r').readlines()) self.set_refiner_payload( 'mosflm_orientation_matrix', idxr.get_indexer_payload('mosflm_orientation_matrix')) self.set_refiner_payload('mosaic', refiner.get_refined_mosaic()) self.set_refiner_payload('beam', integration_params['beam']) self.set_refiner_payload('distance', integration_params['distance']) from xia2.Wrappers.Mosflm.AutoindexHelpers import crystal_model_from_mosflm_mat # make a dxtbx crystal_model object from the mosflm matrix experiment = idxr.get_indexer_experiment_list()[0] crystal_model = crystal_model_from_mosflm_mat( idxr._indxr_payload['mosflm_orientation_matrix'], unit_cell=refiner.get_refined_unit_cell(), space_group=experiment.crystal.get_space_group()) experiment.crystal = crystal_model #self.set_refiner_payload( #'mosflm_integration_parameters', integration_params) self._refinr_refined_experiment_list = ExperimentList([experiment]) return rms_values, background_residual
fill_missing=False) crystal_gridding = fmodel.f_obs().crystal_gridding( d_min=fmodel.f_obs().d_min(), symmetry_flags=maptbx.use_space_group_symmetry, resolution_factor=1. / 3) # compute OMIT map r = cfom.run(crystal_gridding=crystal_gridding, fmodel=fmodel.deep_copy(), full_resolution_map=False, max_boxes=70, neutral_volume_box_cushion_width=0, box_size_as_fraction=0.3, log=False) ccs = get_cc(mc1=mc1, mc2=r.map_coefficients(filter_noise=False), xrs=xrs) assert flex.mean(ccs) > 0.8 print " CC(min/max,mean)", ccs.min_max_mean().as_tuple() def run_call_back(flags, space_group_info): run(space_group_info) if (__name__ == "__main__"): t0 = time.time() debug_utils.parse_options_loop_space_groups(sys.argv[1:], run_call_back, symbols_to_stdout=True, symbols_to_stderr=False) run(sgtbx.space_group_info("R3:R")) print "Time: %6.4f" % (time.time() - t0)
def run(args, command_name="phenix.reflection_file_converter", simply_return_all_miller_arrays=False): command_line = (option_parser( usage="%s [options] reflection_file ..." % command_name, description="Example: %s w1.sca --mtz ." % command_name ).enable_symmetry_comprehensive().option( None, "--weak_symmetry", action="store_true", default=False, help="symmetry on command line is weaker than symmetry found in files" ).enable_resolutions().option( None, "--label", action="store", type="string", help="Substring of reflection data label or number", metavar="STRING" ).option( None, "--non_anomalous", action="store_true", default=False, help="Averages Bijvoet mates to obtain a non-anomalous array" ).option( None, "--r_free_label", action="store", type="string", help="Substring of reflection data label or number", metavar="STRING" ).option( None, "--r_free_test_flag_value", action="store", type="int", help="Value in R-free array indicating assignment to free set.", metavar="FLOAT" ).option( None, "--generate_r_free_flags", action="store_true", default=False, help="Generates a new array of random R-free flags" " (MTZ and CNS output only)." ).option( None, "--use_lattice_symmetry_in_r_free_flag_generation", dest="use_lattice_symmetry_in_r_free_flag_generation", action="store_true", default=True, help="group twin/pseudo symmetry related reflections together" " in r-free set (this is the default)." ).option( None, "--no_lattice_symmetry_in_r_free_flag_generation", dest="use_lattice_symmetry_in_r_free_flag_generation", action="store_false", help="opposite of --use-lattice-symmetry-in-r-free-flag-generation" ).option( None, "--r_free_flags_fraction", action="store", default=0.10, type="float", help="Target fraction free/work reflections (default: 0.10).", metavar="FLOAT" ).option( None, "--r_free_flags_max_free", action="store", default=2000, type="int", help="Maximum number of free reflections (default: 2000).", metavar="INT" ).option( None, "--r_free_flags_format", choices=( "cns", "ccp4", "shelx"), default="cns", help="Convention for generating R-free flags", metavar="cns|ccp4" ).option( None, "--output_r_free_label", action="store", type="string", help= "Label for newly generated R-free flags (defaults to R-free-flags)", default="R-free-flags", metavar="STRING" ).option( None, "--random_seed", action="store", type="int", help="Seed for random number generator (affects generation of" " R-free flags).", metavar="INT" ).option( None, "--change_of_basis", action="store", type="string", help="Change-of-basis operator: h,k,l or x,y,z" " or to_reference_setting, to_primitive_setting, to_niggli_cell," " to_inverse_hand", metavar="STRING" ).option( None, "--eliminate_invalid_indices", action="store_true", default=False, help="Remove indices which are invalid given the change of basis desired" ).option( None, "--expand_to_p1", action="store_true", default=False, help="Generates all symmetrically equivalent reflections." " The space group symmetry is reset to P1." " May be used in combination with --change_to_space_group to" " lower the symmetry." ).option( None, "--change_to_space_group", action="store", type="string", help="Changes the space group and merges equivalent reflections" " if necessary", metavar="SYMBOL|NUMBER" ).option( None, "--write_mtz_amplitudes", action="store_true", default=False, help="Converts intensities to amplitudes before writing MTZ format;" " requires --mtz_root_label" ).option( None, "--write_mtz_intensities", action="store_true", default=False, help="Converts amplitudes to intensities before writing MTZ format;" " requires --mtz_root_label" ).option( None, "--remove_negatives", action="store_true", default=False, help="Remove negative intensities or amplitudes from the data set" ).option( None, "--massage_intensities", action="store_true", default=False, help="'Treat' negative intensities to get a positive amplitude." " |Fnew| = sqrt((Io+sqrt(Io**2 +2sigma**2))/2.0). Requires" " intensities as input and the flags --mtz," " --write_mtz_amplitudes and --mtz_root_label." ).option( None, "--scale_max", action="store", type="float", help="Scales data such that the maximum is equal to the given value", metavar="FLOAT" ).option( None, "--scale_factor", action="store", type="float", help="Multiplies data with the given factor", metavar="FLOAT" ).option( None, "--sca", action="store", type="string", help= "write data to Scalepack FILE ('--sca .' copies name of input file)", metavar="FILE" ).option( None, "--mtz", action="store", type="string", help="write data to MTZ FILE ('--mtz .' copies name of input file)", metavar="FILE" ).option( None, "--mtz_root_label", action="store", type="string", help="Root label for MTZ file (e.g. Fobs)", metavar="STRING" ).option( None, "--cns", action="store", type="string", help="write data to CNS FILE ('--cns .' copies name of input file)", metavar="FILE" ).option( None, "--shelx", action="store", type="string", help="write data to SHELX FILE ('--shelx .' copies name of input file)", metavar="FILE")).process(args=args) co = command_line.options if (co.random_seed is not None): random.seed(co.random_seed) flex.set_random_seed(value=co.random_seed) if (co.write_mtz_amplitudes and co.write_mtz_intensities): print() print("--write_mtz_amplitudes and --write_mtz_intensities" \ " are mutually exclusive.") print() return None if (co.write_mtz_amplitudes or co.write_mtz_intensities): if (co.mtz_root_label is None): print() print("--write_mtz_amplitudes and --write_mtz_intensities" \ " require --mtz_root_label.") print() return None if (co.scale_max is not None and co.scale_factor is not None): print() print("--scale_max and --scale_factor are mutually exclusive.") print() return None if (len(command_line.args) == 0): command_line.parser.show_help() return None all_miller_arrays = reflection_file_reader.collect_arrays( file_names=command_line.args, crystal_symmetry=None, force_symmetry=False, merge_equivalents=False, discard_arrays=False, verbose=1) if (simply_return_all_miller_arrays): return all_miller_arrays if (len(all_miller_arrays) == 0): print() print("No reflection data found in input file%s." % (plural_s(len(command_line.args))[1])) print() return None label_table = reflection_file_utils.label_table( miller_arrays=all_miller_arrays) selected_array = label_table.select_array(label=co.label, command_line_switch="--label") if (selected_array is None): return None r_free_flags = None r_free_info = None if (co.r_free_label is not None): r_free_flags = label_table.match_data_label( label=co.r_free_label, command_line_switch="--r_free_label") if (r_free_flags is None): return None r_free_info = str(r_free_flags.info()) if (not r_free_flags.is_bool_array()): test_flag_value = reflection_file_utils.get_r_free_flags_scores( miller_arrays=[r_free_flags], test_flag_value=co.r_free_test_flag_value).test_flag_values[0] if (test_flag_value is None): if (co.r_free_test_flag_value is None): raise Sorry( "Cannot automatically determine r_free_test_flag_value." " Please use --r_free_test_flag_value to specify a value." ) else: raise Sorry("Invalid --r_free_test_flag_value.") r_free_flags = r_free_flags.customized_copy( data=(r_free_flags.data() == test_flag_value)) print("Selected data:") print(" ", selected_array.info()) print(" Observation type:", selected_array.observation_type()) print() if (r_free_info is not None): print("R-free flags:") print(" ", r_free_info) print() processed_array = selected_array.customized_copy( crystal_symmetry=selected_array.join_symmetry( other_symmetry=command_line.symmetry, force=not co.weak_symmetry)).set_observation_type( selected_array.observation_type()) if (r_free_flags is not None): r_free_flags = r_free_flags.customized_copy( crystal_symmetry=processed_array) print("Input crystal symmetry:") crystal.symmetry.show_summary(processed_array, prefix=" ") print() if (processed_array.unit_cell() is None): command_line.parser.show_help() print( "Unit cell parameters unknown. Please use --symmetry or --unit_cell." ) print() return None if (processed_array.space_group_info() is None): command_line.parser.show_help() print("Space group unknown. Please use --symmetry or --space_group.") print() return None if (r_free_flags is not None): r_free_flags = r_free_flags.customized_copy( crystal_symmetry=processed_array) if (co.change_of_basis is not None): processed_array, cb_op = processed_array.apply_change_of_basis( change_of_basis=co.change_of_basis, eliminate_invalid_indices=co.eliminate_invalid_indices) if (r_free_flags is not None): r_free_flags = r_free_flags.change_basis(cb_op=cb_op) if (not processed_array.is_unique_set_under_symmetry()): print("Merging symmetry-equivalent values:") merged = processed_array.merge_equivalents() merged.show_summary(prefix=" ") print() processed_array = merged.array() del merged processed_array.show_comprehensive_summary(prefix=" ") print() if (r_free_flags is not None and not r_free_flags.is_unique_set_under_symmetry()): print("Merging symmetry-equivalent R-free flags:") merged = r_free_flags.merge_equivalents() merged.show_summary(prefix=" ") print() r_free_flags = merged.array() del merged r_free_flags.show_comprehensive_summary(prefix=" ") print() if (co.expand_to_p1): print("Expanding symmetry and resetting space group to P1:") if (r_free_flags is not None): raise Sorry( "--expand_to_p1 not supported for arrays of R-free flags.") processed_array = processed_array.expand_to_p1() processed_array.show_comprehensive_summary(prefix=" ") print() if (co.change_to_space_group is not None): if (r_free_flags is not None): raise Sorry( "--change_to_space_group not supported for arrays of R-free flags." ) new_space_group_info = sgtbx.space_group_info( symbol=co.change_to_space_group) print("Change to space group:", new_space_group_info) new_crystal_symmetry = crystal.symmetry( unit_cell=processed_array.unit_cell(), space_group_info=new_space_group_info, assert_is_compatible_unit_cell=False) if (not new_crystal_symmetry.unit_cell().is_similar_to( processed_array.unit_cell())): print(" *************") print(" W A R N I N G") print(" *************") print( " Unit cell parameters adapted to new space group symmetry are" ) print(" significantly different from input unit cell parameters:") print(" Input unit cell parameters:", \ processed_array.unit_cell()) print(" Adapted unit cell parameters:", \ new_crystal_symmetry.unit_cell()) processed_array = processed_array.customized_copy( crystal_symmetry=new_crystal_symmetry) print() if (not processed_array.is_unique_set_under_symmetry()): print(" Merging values symmetry-equivalent under new symmetry:") merged = processed_array.merge_equivalents() merged.show_summary(prefix=" ") print() processed_array = merged.array() del merged processed_array.show_comprehensive_summary(prefix=" ") print() if (processed_array.anomalous_flag() and co.non_anomalous): print("Converting data array from anomalous to non-anomalous.") if (not processed_array.is_xray_intensity_array()): processed_array = processed_array.average_bijvoet_mates() else: processed_array = processed_array.average_bijvoet_mates() processed_array.set_observation_type_xray_intensity() if (r_free_flags is not None and r_free_flags.anomalous_flag() and co.non_anomalous): print("Converting R-free flags from anomalous to non-anomalous.") r_free_flags = r_free_flags.average_bijvoet_mates() d_max = co.low_resolution d_min = co.resolution if (d_max is not None or d_min is not None): if (d_max is not None): print("Applying low resolution cutoff: d_max=%.6g" % d_max) if (d_min is not None): print("Applying high resolution cutoff: d_min=%.6g" % d_min) processed_array = processed_array.resolution_filter(d_max=d_max, d_min=d_min) print("Number of reflections:", processed_array.indices().size()) print() if (co.scale_max is not None): print("Scaling data such that the maximum value is: %.6g" % co.scale_max) processed_array = processed_array.apply_scaling( target_max=co.scale_max) print() if (co.scale_factor is not None): print("Multiplying data with the factor: %.6g" % co.scale_factor) processed_array = processed_array.apply_scaling(factor=co.scale_factor) print() if (([co.remove_negatives, co.massage_intensities]).count(True) == 2): raise Sorry("It is not possible to use --remove_negatives and" " --massage_intensities at the same time.") if (co.remove_negatives): if processed_array.is_real_array(): print("Removing negatives items") processed_array = processed_array.select( processed_array.data() > 0) if processed_array.sigmas() is not None: processed_array = processed_array.select( processed_array.sigmas() > 0) else: raise Sorry( "--remove_negatives not applicable to complex data arrays.") if (co.massage_intensities): if processed_array.is_real_array(): if processed_array.is_xray_intensity_array(): if (co.mtz is not None): if (co.write_mtz_amplitudes): print( "The supplied intensities will be used to estimate" ) print(" amplitudes in the following way: ") print( " Fobs = Sqrt[ (Iobs + Sqrt(Iobs**2 + 2sigmaIobs**2))/2 ]" ) print(" Sigmas are estimated in a similar manner.") print() processed_array = processed_array.enforce_positive_amplitudes( ) else: raise Sorry( "--write_mtz_amplitudes has to be specified when using" " --massage_intensities") else: raise Sorry( "--mtz has to be used when using --massage_intensities" ) else: raise Sorry( "Intensities must be supplied when using the option" " --massage_intensities") else: raise Sorry( "--massage_intensities not applicable to complex data arrays.") if (not co.generate_r_free_flags): if (r_free_flags is None): r_free_info = [] else: if (r_free_flags.anomalous_flag() != processed_array.anomalous_flag()): if (processed_array.anomalous_flag()): is_not = ("", " not") else: is_not = (" not", "") raise Sorry( "The data array is%s anomalous but the R-free array is%s.\n" % is_not + " Please try --non_anomalous.") r_free_info = ["R-free flags source: " + r_free_info] if (not r_free_flags.indices().all_eq(processed_array.indices())): processed_array = processed_array.map_to_asu() r_free_flags = r_free_flags.map_to_asu().common_set( processed_array) n_missing_r_free_flags = processed_array.indices().size() \ - r_free_flags.indices().size() if (n_missing_r_free_flags != 0): raise Sorry( "R-free flags not compatible with data array:" " missing flag for %d reflections selected for output." % n_missing_r_free_flags) else: if (r_free_flags is not None): raise Sorry( "--r_free_label and --generate_r_free_flags are mutually exclusive." ) print("Generating a new array of R-free flags:") r_free_flags = processed_array.generate_r_free_flags( fraction=co.r_free_flags_fraction, max_free=co.r_free_flags_max_free, use_lattice_symmetry=co. use_lattice_symmetry_in_r_free_flag_generation, format=co.r_free_flags_format) test_flag_value = True if (co.r_free_flags_format == "ccp4"): test_flag_value = 0 elif (co.r_free_flags_format == "shelx"): test_flag_value = -1 r_free_as_bool = r_free_flags.customized_copy( data=r_free_flags.data() == test_flag_value) r_free_info = ["R-free flags generated by %s:" % command_name] r_free_info.append(" " + date_and_time()) r_free_info.append(" fraction: %.6g" % co.r_free_flags_fraction) r_free_info.append(" max_free: %s" % str(co.r_free_flags_max_free)) r_free_info.append(" size of work set: %d" % r_free_as_bool.data().count(False)) r_free_info.append(" size of free set: %d" % r_free_as_bool.data().count(True)) r_free_info_str = StringIO() r_free_as_bool.show_r_free_flags_info(prefix=" ", out=r_free_info_str) if (co.r_free_flags_format == "ccp4"): r_free_info.append(" convention: CCP4 (test=0, work=1-%d)" % flex.max(r_free_flags.data())) elif (co.r_free_flags_format == "shelx"): r_free_info.append(" convention: SHELXL (test=-1, work=1)") else: r_free_info.append(" convention: CNS/X-PLOR (test=1, work=0)") print("\n".join(r_free_info[2:4])) print(r_free_info[-1]) print(r_free_info_str.getvalue()) print() n_output_files = 0 if (co.sca is not None): if (co.generate_r_free_flags): raise Sorry("Cannot write R-free flags to Scalepack file.") file_name = reflection_file_utils.construct_output_file_name( input_file_names=[selected_array.info().source], user_file_name=co.sca, file_type_label="Scalepack", file_extension="sca") print("Writing Scalepack file:", file_name) iotbx.scalepack.merge.write(file_name=file_name, miller_array=processed_array) n_output_files += 1 print() if (co.mtz is not None): file_name = reflection_file_utils.construct_output_file_name( input_file_names=[selected_array.info().source], user_file_name=co.mtz, file_type_label="MTZ", file_extension="mtz") print("Writing MTZ file:", file_name) mtz_history_buffer = flex.std_string() mtz_history_buffer.append(date_and_time()) mtz_history_buffer.append("> program: %s" % command_name) mtz_history_buffer.append( "> input file name: %s" % os.path.basename(selected_array.info().source)) mtz_history_buffer.append( "> input directory: %s" % os.path.dirname(os.path.abspath(selected_array.info().source))) mtz_history_buffer.append("> input labels: %s" % selected_array.info().label_string()) mtz_output_array = processed_array if (co.write_mtz_amplitudes): if (not mtz_output_array.is_xray_amplitude_array()): print(" Converting intensities to amplitudes.") mtz_output_array = mtz_output_array.f_sq_as_f() mtz_history_buffer.append( "> Intensities converted to amplitudes.") elif (co.write_mtz_intensities): if (not mtz_output_array.is_xray_intensity_array()): print(" Converting amplitudes to intensities.") mtz_output_array = mtz_output_array.f_as_f_sq() mtz_history_buffer.append( "> Amplitudes converted to intensities.") column_root_label = co.mtz_root_label if (column_root_label is None): # XXX 2013-03-29: preserve original root label by default # XXX 2014-12-16: skip trailing "(+)" in root_label if anomalous column_root_label = selected_array.info().labels[0] column_root_label = remove_anomalous_suffix_if_necessary( miller_array=selected_array, column_root_label=column_root_label) mtz_dataset = mtz_output_array.as_mtz_dataset( column_root_label=column_root_label) del mtz_output_array if (r_free_flags is not None): mtz_dataset.add_miller_array( miller_array=r_free_flags, column_root_label=co.output_r_free_label) for line in r_free_info: mtz_history_buffer.append("> " + line) mtz_history_buffer.append("> output file name: %s" % os.path.basename(file_name)) mtz_history_buffer.append("> output directory: %s" % os.path.dirname(os.path.abspath(file_name))) mtz_object = mtz_dataset.mtz_object() mtz_object.add_history(mtz_history_buffer) mtz_object.write(file_name=file_name) n_output_files += 1 print() if (co.cns is not None): file_name = reflection_file_utils.construct_output_file_name( input_file_names=[selected_array.info().source], user_file_name=co.cns, file_type_label="CNS", file_extension="cns") print("Writing CNS file:", file_name) processed_array.export_as_cns_hkl( file_object=open(file_name, "w"), file_name=file_name, info=["source of data: " + str(selected_array.info())] + r_free_info, r_free_flags=r_free_flags) n_output_files += 1 print() if (co.shelx is not None): if (co.generate_r_free_flags): raise Sorry("Cannot write R-free flags to SHELX file.") file_name = reflection_file_utils.construct_output_file_name( input_file_names=[selected_array.info().source], user_file_name=co.shelx, file_type_label="SHELX", file_extension="shelx") print("Writing SHELX file:", file_name) processed_array.as_amplitude_array().export_as_shelx_hklf( open(file_name, "w")) n_output_files += 1 print() if (n_output_files == 0): command_line.parser.show_help() print("Please specify at least one output file format,", end=' ') print("e.g. --mtz, --sca, etc.") print() return None return processed_array
def OnChangeSpaceGroup(self, event): sg_sel = str(self.sg_ctrl.GetStringSelection()) if (sg_sel != self._last_sg_sel): from cctbx import sgtbx sg_info = sgtbx.space_group_info(sg_sel) self.parent.set_space_group(sg_info)
"P-1", # space groups for all 14 Bravais lattices "P2/m", "C2/m", "Pmmm", "Cmmm", "Fmmm", "Immm", "P4/mmm", "I4/mmm", "R-3m", "P6/mmm", "Pm-3m", "Im-3m", "Fm-3m", ): sgi = sgtbx.space_group_info(symbol=sg) cs = sgi.any_compatible_crystal_symmetry(volume=1000) ms = cs.build_miller_set(anomalous_flag=False, d_min=dmin) #ms.show_summary() spcg = ("%s" % ms.space_group_info()).split(':')[0] print(spcg) fp.write("'%s': [\n" % spcg) fp.write("%s ,\n" % ms.unit_cell()) hkllist = {} for hkl, d in ms.d_spacings(): if hkllist.has_key(d): hkllist[d].append(hkl) print(hkllist[d]) else: hkllist[d] = [ hkl,
def run_group(symbol): group = space_group_info(symbol) print("\n==") elements = ('C', 'N', 'O', 'H') * 11 struc = random_structure.xray_structure(space_group_info=group, volume_per_atom=25., general_positions_only=False, elements=elements, min_distance=1.0) struc.show_summary() d_min = 2. fc = struc.structure_factors(d_min=d_min).f_calc() symmetry_flags = maptbx.use_space_group_symmetry fftmap = fc.fft_map(symmetry_flags=symmetry_flags) grid_size = fftmap.real_map().accessor().focus() ### rm = fftmap.real_map().deep_copy() amap0 = asymmetric_map(struc.space_group().type(), rm) p1_map00 = amap0.symmetry_expanded_map() assert approx_equal(p1_map00, rm) # maptbx.unpad_in_place(rm) amap1 = asymmetric_map(struc.space_group().type(), rm) p1_map10 = amap1.symmetry_expanded_map() assert approx_equal(p1_map00, p1_map10) ### grid_tags = maptbx.grid_tags(grid_size) grid_tags.build(fftmap.space_group_info().type(), fftmap.symmetry_flags()) grid_tags.verify(fftmap.real_map()) print("FFT grid_size = ", grid_size) amap = asymmetric_map(struc.space_group().type(), fftmap.real_map()) afc = amap.structure_factors(fc.indices()) afftmap = amap.map_for_fft() print("whole cell map size: ", afftmap.accessor().focus()) adata = amap.data() acc = adata.accessor() print("Asu map size: ", acc.origin(), " ", acc.last(), " ", acc.focus(), \ " ", acc.all()) df = flex.abs(afc - fc.data()) r1 = flex.sum(df) / flex.sum(flex.abs(fc.data())) print("R1: ", r1) assert r1 < 1.e-5 # just to prove to myself that I can shift origin to 000 and then reshape back adata = adata.shift_origin() adata.reshape(acc) # adata2 = adata.deep_copy() * 2. amap2 = asymmetric_map(struc.space_group().type(), adata2, grid_size) afc2 = amap2.structure_factors(fc.indices()) df2 = flex.abs(afc2 * .5 - fc.data()) r12 = flex.sum(df2) / flex.sum(flex.abs(fc.data())) print("R1 again: ", r12) assert r12 < 1.e-5 p1_map = amap.symmetry_expanded_map() assert p1_map.accessor().focus() == grid_size rel_tol = 1.e-6 n = 0 mean_rel_dif = 0. for (m1, m2) in zip(fftmap.real_map(), p1_map): dif = abs(m1 - m2) av = 0.5 * (abs(m1) + abs(m2)) assert dif <= rel_tol * av, "%f not <= %f * %f" % (dif, rel_tol, av) if av != 0: mean_rel_dif = mean_rel_dif + dif / av n = n + 1 mean_rel_dif = mean_rel_dif / n print("mean rel err: ", mean_rel_dif) assert mean_rel_dif < 1.e-6
def parse(self, int_lp): re_im = re.compile( "^ (.....) 0 +([0-9\.]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9]+) +([0-9\.]+) +([0-9\.]+)" ) re_cell = re.compile( "^ UNIT CELL PARAMETERS *([0-9\.]+) *([0-9\.]+) *([0-9\.]+) *([0-9\.]+) *([0-9\.]+) *([0-9\.]+)" ) re_rotation = re.compile( "^ CRYSTAL ROTATION OFF FROM INITIAL ORIENTATION *([-0-9\.]+) *([-0-9\.]+) *([-0-9\.]+)" ) # re_mosaicity = re.compile( "^ CRYSTAL MOSAICITY \(DEGREES\) *([0-9\.]+)") # re_axis = re.compile( "^ LAB COORDINATES OF ROTATION AXIS *([-0-9\.]+) *([-0-9\.]+) *([-0-9\.]+)" ) # re_beam = re.compile( "^ DIRECT BEAM COORDINATES \(REC\. ANGSTROEM\) *([-0-9\.]+) *([-0-9\.]+) *([-0-9\.]+)" ) # re_dist = re.compile( "^ CRYSTAL TO DETECTOR DISTANCE \(mm\) *([-0-9\.]+)") re_dev_spot = re.compile( "^ STANDARD DEVIATION OF SPOT POSITION \(PIXELS\) *([0-9\.]+)") re_dev_spindle = re.compile( "^ STANDARD DEVIATION OF SPINDLE POSITION \(DEGREES\) *([0-9\.]+)") re_orig = re.compile( "^ DETECTOR ORIGIN \(PIXELS\) AT *([0-9\.]+) *([0-9\.]+)") images = [] # as key of params self.cell_changes = [] self.blockparams = collections.OrderedDict() clear_flag = False self.frames = [] self.scales, self.overloads, self.strongs, self.rejecteds, self.sigmads, self.sigmars = [], [], [], [], [], [] self.space_group = None # Read INTEGRATE.LP file for l in open(int_lp): r_im = re_im.search(l) r_cell = re_cell.search(l) r_rotation = re_rotation.search(l) r_dist = re_dist.search(l) r_spot = re_dev_spot.search(l) r_spindle = re_dev_spindle.search(l) r_orig = re_orig.search(l) if l.startswith(" SPACE_GROUP_NUMBER="): sgnum = int(l.strip().split()[-1]) if sgnum > 0: self.space_group = sgtbx.space_group_info(sgnum).group() if r_im: if clear_flag: images = [] clear_flag = False image, scale, nbkg, novl, newald, nstrong, nrej, sigmad, sigmar = r_im.groups( ) images.append(int(image)) # for plot self.frames.append(int(image)) self.scales.append(scale) self.overloads.append(int(novl)) self.strongs.append(int(nstrong)) self.rejecteds.append(int(nrej)) self.sigmads.append(sigmad) self.sigmars.append(sigmar) if r_cell: #a, b, c, alpha, beta, gamma = r_cell.groups() self.blockparams.setdefault(tuple(images), {})["cell"] = r_cell.groups() self.cell_changes.append((images, r_cell.groups())) clear_flag = True if r_rotation: self.blockparams.setdefault( tuple(images), {})["rotation"] = r_rotation.groups() misset = rotations_to_missetting_angles( map(float, r_rotation.groups())) self.blockparams.setdefault(tuple(images), {})["misset"] = map( lambda x: "%.2f" % x, misset) clear_flag = True if r_dist: self.blockparams.setdefault(tuple(images), {})["dist"] = r_dist.group(1) clear_flag = True if r_spot: self.blockparams.setdefault(tuple(images), {})["spot"] = r_spot.group(1) clear_flag = True if r_spindle: self.blockparams.setdefault(tuple(images), {})["spindle"] = r_spindle.group(1) clear_flag = True if r_orig: self.blockparams.setdefault(tuple(images), {})["orig"] = r_orig.groups() clear_flag = True if l.startswith(" SIGMAB (degree)"): self.blockparams.setdefault( tuple(images), {})["sigmab9"] = l.strip().split()[-9:] clear_flag = True if l.startswith(" SIGMAR (degree)"): self.blockparams.setdefault( tuple(images), {})["sigmar9"] = l.strip().split()[-9:] clear_flag = True
def exercise_restrained_refinement(options): import random random.seed(1) flex.set_random_seed(1) xs0 = smtbx.development.random_xray_structure( sgtbx.space_group_info('P1'), n_scatterers=options.n_scatterers, elements="random") for sc in xs0.scatterers(): sc.flags.set_grad_site(True) sc0 = xs0.scatterers() uc = xs0.unit_cell() mi = xs0.build_miller_set(anomalous_flag=False, d_min=options.resolution) fo_sq = mi.structure_factors_from_scatterers( xs0, algorithm="direct").f_calc().norm() fo_sq = fo_sq.customized_copy(sigmas=flex.double(fo_sq.size(), 1)) i, j, k, l = random.sample(range(options.n_scatterers), 4) bond_proxies = geometry_restraints.shared_bond_simple_proxy() w = 1e9 d_ij = uc.distance(sc0[i].site, sc0[j].site)*0.8 bond_proxies.append(geom.bond_simple_proxy( i_seqs=(i, j), distance_ideal=d_ij, weight=w)) d_jk = uc.distance(sc0[j].site, sc0[k].site)*0.85 bond_proxies.append(geom.bond_simple_proxy( i_seqs=(j, k), distance_ideal=d_jk, weight=w)) d_ki = min(uc.distance(sc0[k].site, sc0[i].site)*0.9, (d_ij + d_jk)*0.8) bond_proxies.append(geom.bond_simple_proxy( i_seqs=(k, i), distance_ideal=d_ki, weight=w)) d_jl = uc.distance(sc0[j].site, sc0[l].site)*0.9 bond_proxies.append(geom.bond_simple_proxy( i_seqs=(j, l), distance_ideal=d_jl, weight=w)) d_lk = min(uc.distance(sc0[l].site, sc0[k].site)*0.8, 0.75*(d_jk + d_jl)) bond_proxies.append(geom.bond_simple_proxy( i_seqs=(l, k), distance_ideal=d_jl, weight=w)) restraints_manager = restraints.manager(bond_proxies=bond_proxies) xs1 = xs0.deep_copy_scatterers() xs1.shake_sites_in_place(rms_difference=0.1) def ls_problem(): xs = xs1.deep_copy_scatterers() reparametrisation = constraints.reparametrisation( structure=xs, constraints=[], connectivity_table=smtbx.utils.connectivity_table(xs), temperature=20) return least_squares.crystallographic_ls( fo_sq.as_xray_observations(), reparametrisation=reparametrisation, restraints_manager=restraints_manager) gradient_threshold, step_threshold = 1e-6, 1e-6 eps = 5e-3 ls = ls_problem() t = wall_clock_time() cycles = normal_eqns_solving.naive_iterations( ls, gradient_threshold=gradient_threshold, step_threshold=step_threshold, track_all=True) if options.verbose: print("%i %s steps in %.6f s" % (cycles.n_iterations, cycles, t.elapsed())) sc = ls.xray_structure.scatterers() for p in bond_proxies: d = uc.distance(*[ sc[i_pair].site for i_pair in p.i_seqs ]) assert approx_equal(d, p.distance_ideal, eps) ls = ls_problem() t = wall_clock_time() cycles = normal_eqns_solving.levenberg_marquardt_iterations( ls, gradient_threshold=gradient_threshold, step_threshold=step_threshold, tau=1e-3, track_all=True) if options.verbose: print("%i %s steps in %.6f s" % (cycles.n_iterations, cycles, t.elapsed())) sc = ls.xray_structure.scatterers() sc = ls.xray_structure.scatterers() for p in bond_proxies: d = uc.distance(*[ sc[i].site for i in p.i_seqs ]) assert approx_equal(d, p.distance_ideal, eps)
class process_command_line_with_files(object): def __init__(self, args, master_phil=None, master_phil_string=None, pdb_file_def=None, reflection_file_def=None, map_file_def=None, cif_file_def=None, seq_file_def=None, pickle_file_def=None, ncs_file_def=None, directory_def=None, integer_def=None, float_def=None, space_group_def=None, unit_cell_def=None, usage_string=None): assert (master_phil is not None) or (master_phil_string is not None) if (master_phil_string is not None): assert (master_phil is None) import iotbx.phil master_phil = iotbx.phil.parse(input_string=master_phil_string, process_includes=True) if (usage_string is not None): if (len(args) == 0) or ("--help" in args): raise Usage(""" %s Full parameters: %s""" % (usage_string, master_phil.as_str(prefix=" ", attributes_level=1))) self.master = master_phil self.pdb_file_def = pdb_file_def self.reflection_file_def = reflection_file_def self.cif_file_def = cif_file_def self.map_file_def = map_file_def self.seq_file_def = seq_file_def self.pickle_file_def = pickle_file_def self.ncs_file_def = ncs_file_def self.directory_def = directory_def self.integer_def = integer_def self.float_def = float_def self.space_group_def = space_group_def self.unit_cell_def = unit_cell_def self._type_counts = {} self._cache = {} cai = libtbx.phil.command_line.argument_interpreter( master_phil=self.master) self.unused_args = [] self.work = cai.process_and_fetch(args=args, custom_processor=self) def get_file_type_count(self, file_type): return self._type_counts.get(file_type, 0) def __call__(self, arg): file_arg = None is_shelx_file = False if (os.path.isfile(arg)): file_arg = arg # handle SHELX format hack elif (arg.endswith("=hklf3") or arg.endswith("=hklf4") or arg.endswith("=amplitudes") or arg.endswith("=intensities")): base_arg = "".join(arg.split("=")[:-1]) if (base_arg != "") and os.path.isfile(base_arg): file_arg = arg is_shelx_file = True if (file_arg is not None): from iotbx import file_reader f = file_reader.any_file(os.path.abspath(file_arg), raise_sorry_if_not_expected_format=True) if (f.file_type is not None): if (not f.file_type in self._type_counts): self._type_counts[f.file_type] = 0 self._type_counts[f.file_type] += 1 self._cache[f.file_name] = f file_def_name = None if (f.file_type == "pdb") and (self.pdb_file_def is not None): file_def_name = self.pdb_file_def elif (f.file_type == "hkl") and (self.reflection_file_def is not None): file_def_name = self.reflection_file_def elif (f.file_type == "ccp4_map") and (self.map_file_def is not None): file_def_name = self.map_file_def elif (f.file_type == "cif") and (self.cif_file_def is not None): file_def_name = self.cif_file_def elif (f.file_type == "seq") and (self.seq_file_def is not None): file_def_name = self.seq_file_def elif (f.file_type == "ncs") and (self.ncs_file_def is not None): file_def_name = self.ncs_file_def elif (f.file_type == "pkl") and (self.pickle_file_def is not None): file_def_name = self.pickle_file_def if (file_def_name is not None): file_name = f.file_name if (is_shelx_file): file_name = file_arg return libtbx.phil.parse("%s=%s" % (file_def_name, file_name)) else: return False elif (os.path.isdir(arg)): if (self.directory_def is not None): return libtbx.phil.parse("%s=%s" % (self.directory_def, arg)) else: int_value = float_value = None if (self.integer_def is not None): try: int_value = int(arg) except ValueError, e: pass else: return libtbx.phil.parse("%s=%d" % (self.integer_def, int_value)) if (self.float_def is not None): try: float_value = float(arg) except ValueError, e: pass else: return libtbx.phil.parse("%s=%g" % (self.float_def, float_value)) if (self.space_group_def is not None): try: space_group_info = sgtbx.space_group_info(arg) except RuntimeError: # XXX should really be ValueError pass else: return libtbx.phil.parse( "%s=%s" % (self.space_group_def, space_group_info)) if (self.unit_cell_def is not None) and (arg.count(",") >= 2): try: uc_params = tuple([float(x) for x in arg.split(",")]) unit_cell = uctbx.unit_cell(uc_params) except Exception: # XXX should really be ValueError pass else: return libtbx.phil.parse( "%s=%s" % (self.unit_cell_def, ",".join( ["%g" % x for x in unit_cell.parameters()])))
def run(args): assert len(args) == 0 from cctbx import miller import cctbx.miller.reindexing from cctbx import uctbx from cctbx import sgtbx from cctbx.array_family import flex uc = uctbx.unit_cell((11, 11, 11, 81, 81, 81)) ms = uc.complete_miller_set_with_lattice_symmetry(anomalous_flag=True, d_min=3) ra = miller.reindexing.assistant( lattice_group=ms.space_group(), intensity_group=sgtbx.space_group_info(symbol="P 1").group(), miller_indices=ms.expand_to_p1().indices()) mt = flex.mersenne_twister(seed=0) def check_cb_op_perm(cb_op, perm): mi_cb = cb_op.apply(ra.miller_indices) miis = flex.random_permutation(size=ra.miller_indices.size())[:2] k = cb_op.apply(ra.miller_indices.select(miis)) matches = miller.match_indices(k, ra.miller_indices) pairs = matches.pairs() assert pairs.column(0).all_eq(flex.size_t_range(k.size())) miis_cb = pairs.column(1) assert perm.select(miis).all_eq(miis_cb) def check_ra(): for cb_op, perm, inv_perm in zip(ra.cb_ops, ra.perms, ra.inv_perms): check_cb_op_perm(cb_op, perm) check_cb_op_perm(cb_op.inverse(), inv_perm) check_ra() assert ra.i_j_multiplication_table == [[0, 1, 2, 3, 4, 5], [1, 2, 0, 4, 5, 3], [2, 0, 1, 5, 3, 4], [3, 5, 4, 0, 2, 1], [4, 3, 5, 1, 0, 2], [5, 4, 3, 2, 1, 0]] assert ra.i_inv_j_multiplication_table == [[0, 1, 2, 3, 4, 5], [2, 0, 1, 5, 3, 4], [1, 2, 0, 4, 5, 3], [3, 5, 4, 0, 2, 1], [4, 3, 5, 1, 0, 2], [5, 4, 3, 2, 1, 0]] assert ra.i_j_inv_multiplication_table == [[0, 2, 1, 3, 4, 5], [1, 0, 2, 4, 5, 3], [2, 1, 0, 5, 3, 4], [3, 4, 5, 0, 2, 1], [4, 5, 3, 1, 0, 2], [5, 3, 4, 2, 1, 0]] from libtbx.test_utils import show_diff from six.moves import cStringIO as StringIO sio = StringIO() assert ra.show_summary(out=sio, prefix=": ") is ra assert not show_diff( sio.getvalue(), """\ : Lattice symmetry: R 3 2 :R (No. 155) : Intensity symmetry: P 1 (No. 1) : : Indexing ambiguities: : k,l,h 3-fold invariants: 4 : l,h,k 3-fold invariants: 4 : -k,-h,-l 2-fold invariants: 4 : -l,-k,-h 2-fold invariants: 4 : -h,-l,-k 2-fold invariants: 4 """) # ra = miller.reindexing.assistant(lattice_group=ms.space_group(), intensity_group=ms.space_group(), miller_indices=ra.miller_indices) check_ra() sio = StringIO() assert ra.show_summary(out=sio) is ra assert not show_diff( sio.getvalue(), """\ Lattice symmetry: R 3 2 :R (No. 155) Intensity symmetry: R 3 2 :R (No. 155) No indexing ambiguity. """) assert ra.i_j_multiplication_table == [[0]] assert ra.i_inv_j_multiplication_table == [[0]] assert ra.i_j_inv_multiplication_table == [[0]] # ra = miller.reindexing.assistant( lattice_group=ms.space_group(), intensity_group=sgtbx.space_group_info(symbol="R 3 :R").group(), miller_indices=ra.miller_indices) check_ra() sio = StringIO() assert ra.show_summary(out=sio) is ra assert not show_diff( sio.getvalue(), """\ Lattice symmetry: R 3 2 :R (No. 155) Intensity symmetry: R 3 :R (No. 146) Indexing ambiguity: -h,-l,-k 2-fold invariants: 4 """) assert ra.i_j_multiplication_table == [[0, 1], [1, 0]] assert ra.i_inv_j_multiplication_table == [[0, 1], [1, 0]] assert ra.i_j_inv_multiplication_table == [[0, 1], [1, 0]] # import math ta = math.acos(-1 / 3) * 180 / math.pi uc = uctbx.unit_cell((11, 11, 11, ta, ta, ta)) ms = uc.complete_miller_set_with_lattice_symmetry(anomalous_flag=True, d_min=3) ra = miller.reindexing.assistant( lattice_group=ms.space_group(), intensity_group=sgtbx.space_group_info( symbol="I 4 (y+z,x+z,x+y)").group(), miller_indices=ms.expand_to_p1().indices()) check_ra() sio = StringIO() assert ra.show_summary(out=sio) is ra assert not show_diff( sio.getvalue(), """\ Lattice symmetry: I 4 3 2 (y+z,x+z,x+y) (No. 211) Intensity symmetry: I 4 (y+z,x+z,x+y) (No. 79) Indexing ambiguities: k,l,h 3-fold invariants: 2 -l,-k,-h 2-fold invariants: 4 -h,-l,-k 2-fold invariants: 4 l,h,k 3-fold invariants: 2 -k,-h,-l 2-fold invariants: 4 """) assert ra.i_j_multiplication_table == [[0, 1, 2, 3, 4, 5], [1, 4, 3, 5, 0, 2], [2, 5, 0, 4, 3, 1], [3, 2, 1, 0, 5, 4], [4, 0, 5, 2, 1, 3], [5, 3, 4, 1, 2, 0]] assert ra.i_inv_j_multiplication_table == [[0, 1, 2, 3, 4, 5], [4, 0, 5, 2, 1, 3], [2, 5, 0, 4, 3, 1], [3, 2, 1, 0, 5, 4], [1, 4, 3, 5, 0, 2], [5, 3, 4, 1, 2, 0]] assert ra.i_j_inv_multiplication_table == [[0, 4, 2, 3, 1, 5], [1, 0, 3, 5, 4, 2], [2, 3, 0, 4, 5, 1], [3, 5, 1, 0, 2, 4], [4, 1, 5, 2, 0, 3], [5, 2, 4, 1, 3, 0]] # print("OK")
def __init__(self, pdb_hierarchy, xray_structure, fmodel, distance_cutoff=4.0, collect_all=True, molprobity_map_params=None): validation.__init__(self) from mmtbx.real_space_correlation import extract_map_stats_for_single_atoms from cctbx import adptbx from scitbx.matrix import col self.n_bad = 0 self.n_heavy = 0 pdb_atoms = pdb_hierarchy.atoms() if (len(pdb_atoms) > 1): assert (not pdb_atoms.extract_i_seq().all_eq(0)) unit_cell = xray_structure.unit_cell() pair_asu_table = xray_structure.pair_asu_table( distance_cutoff=distance_cutoff) asu_mappings = pair_asu_table.asu_mappings() asu_table = pair_asu_table.table() u_isos = xray_structure.extract_u_iso_or_u_equiv() occupancies = xray_structure.scatterers().extract_occupancies() sites_frac = xray_structure.sites_frac() sel_cache = pdb_hierarchy.atom_selection_cache() water_sel = sel_cache.selection("water") if (molprobity_map_params is not None): # assume parameters have been validated (symmetry of pdb and map matches) two_fofc_map = None fc_map = None d_min = None crystal_gridding = None # read two_fofc_map if (molprobity_map_params.map_file_name is not None): f = any_file(molprobity_map_params.map_file_name) two_fofc_map = f.file_object.map_data() d_min = molprobity_map_params.d_min crystal_gridding = maptbx.crystal_gridding( f.file_object.unit_cell(), space_group_info=space_group_info( f.file_object.space_group_number), pre_determined_n_real=f.file_object.unit_cell_grid) pdb_atoms = pdb_hierarchy.atoms() xray_structure = pdb_hierarchy.extract_xray_structure( crystal_symmetry=f.crystal_symmetry()) unit_cell = xray_structure.unit_cell() # check for origin shift # --------------------------------------------------------------------- soin = maptbx.shift_origin_if_needed( map_data=two_fofc_map, sites_cart=xray_structure.sites_cart(), crystal_symmetry=xray_structure.crystal_symmetry()) two_fofc_map = soin.map_data xray_structure.set_sites_cart(soin.sites_cart) # --------------------------------------------------------------------- pair_asu_table = xray_structure.pair_asu_table( distance_cutoff=distance_cutoff) asu_mappings = pair_asu_table.asu_mappings() asu_table = pair_asu_table.table() u_isos = xray_structure.extract_u_iso_or_u_equiv() occupancies = xray_structure.scatterers().extract_occupancies() sites_frac = xray_structure.sites_frac() sel_cache = pdb_hierarchy.atom_selection_cache() water_sel = sel_cache.selection("water") elif (molprobity_map_params.map_coefficients_file_name is not None): f = any_file(molprobity_map_params.map_coefficients_file_name) fourier_coefficients = f.file_server.get_miller_array( molprobity_map_params.map_coefficients_label) crystal_symmetry = fourier_coefficients.crystal_symmetry() d_min = fourier_coefficients.d_min() crystal_gridding = maptbx.crystal_gridding( crystal_symmetry.unit_cell(), d_min, resolution_factor=0.25, space_group_info=crystal_symmetry.space_group_info()) two_fofc_map = miller.fft_map( crystal_gridding=crystal_gridding, fourier_coefficients=fourier_coefficients).apply_sigma_scaling().\ real_map_unpadded() # calculate fc_map assert ((d_min is not None) and (crystal_gridding is not None)) f_calc = xray_structure.structure_factors(d_min=d_min).f_calc() fc_map = miller.fft_map(crystal_gridding=crystal_gridding, fourier_coefficients=f_calc) fc_map = fc_map.apply_sigma_scaling().real_map_unpadded() map_stats = extract_map_stats_for_single_atoms( pdb_atoms=pdb_atoms, xray_structure=xray_structure, fmodel=None, selection=water_sel, fc_map=fc_map, two_fofc_map=two_fofc_map) else: map_stats = extract_map_stats_for_single_atoms( pdb_atoms=pdb_atoms, xray_structure=xray_structure, fmodel=fmodel, selection=water_sel) waters = [] for i_seq, atom in enumerate(pdb_atoms): if (water_sel[i_seq]): rt_mx_i_inv = asu_mappings.get_rt_mx(i_seq, 0).inverse() self.n_total += 1 asu_dict = asu_table[i_seq] nearest_atom = nearest_contact = None for j_seq, j_sym_groups in asu_dict.items(): atom_j = pdb_atoms[j_seq] site_j = sites_frac[j_seq] # Filter out hydrogens if atom_j.element.upper().strip() in ["H", "D"]: continue for j_sym_group in j_sym_groups: rt_mx = rt_mx_i_inv.multiply( asu_mappings.get_rt_mx(j_seq, j_sym_group[0])) site_ji = rt_mx * site_j site_ji_cart = xray_structure.unit_cell( ).orthogonalize(site_ji) vec_i = col(atom.xyz) vec_ji = col(site_ji_cart) dxyz = abs(vec_i - vec_ji) if (nearest_contact is None) or (dxyz < nearest_contact): nearest_contact = dxyz nearest_atom = atom_info(pdb_atom=atom_j, symop=rt_mx) w = water(pdb_atom=atom, b_iso=adptbx.u_as_b(u_isos[i_seq]), occupancy=occupancies[i_seq], nearest_contact=nearest_contact, nearest_atom=nearest_atom, score=map_stats.two_fofc_ccs[i_seq], fmodel=map_stats.fmodel_values[i_seq], two_fofc=map_stats.two_fofc_values[i_seq], fofc=map_stats.fofc_values[i_seq], anom=map_stats.anom_values[i_seq], n_hbonds=None) # TODO if (w.is_bad_water()): w.outlier = True self.n_bad += 1 elif (w.is_heavy_atom()): w.outlier = True self.n_heavy += 1 if (w.outlier) or (collect_all): self.results.append(w) self.n_outliers = len(self.results)
def _compute_scaler_statistics(self, scaled_unmerged_mtz, selected_band=None, wave=None): ''' selected_band = (d_min, d_max) with None for automatic determination. ''' # mapping of expected dictionary names to iotbx.merging_statistics attributes key_to_var = { 'I/sigma': 'i_over_sigma_mean', 'Completeness': 'completeness', 'Low resolution limit': 'd_max', 'Multiplicity': 'mean_redundancy', 'Rmerge(I)': 'r_merge', #'Wilson B factor':, 'Rmeas(I)': 'r_meas', 'High resolution limit': 'd_min', 'Total observations': 'n_obs', 'Rpim(I)': 'r_pim', 'CC half': 'cc_one_half', 'Total unique': 'n_uniq', } anom_key_to_var = { 'Rmerge(I+/-)': 'r_merge', 'Rpim(I+/-)': 'r_pim', 'Rmeas(I+/-)': 'r_meas', 'Anomalous completeness': 'anom_completeness', 'Anomalous correlation': 'anom_half_corr', 'Anomalous multiplicity': 'mean_redundancy', } stats = {} select_result, select_anom_result = None, None # don't call self.get_scaler_likely_spacegroups() since that calls # self.scale() which introduced a subtle bug from cctbx import sgtbx sg = sgtbx.space_group_info(str( self._scalr_likely_spacegroups[0])).group() from xia2.Handlers.Environment import Environment log_directory = Environment.generate_directory('LogFiles') merging_stats_file = os.path.join( log_directory, '%s_%s%s_merging-statistics.txt' % (self._scalr_pname, self._scalr_xname, '' if wave is None else '_%s' % wave)) merging_stats_json = os.path.join( log_directory, '%s_%s%s_merging-statistics.json' % (self._scalr_pname, self._scalr_xname, '' if wave is None else '_%s' % wave)) result, select_result, anom_result, select_anom_result = None, None, None, None n_bins = PhilIndex.params.xia2.settings.merging_statistics.n_bins import iotbx.merging_statistics while result is None: try: result = self._iotbx_merging_statistics(scaled_unmerged_mtz, anomalous=False, n_bins=n_bins) result.as_json(file_name=merging_stats_json) with open(merging_stats_file, 'w') as fh: result.show(out=fh) four_column_output = selected_band and any(selected_band) if four_column_output: select_result = self._iotbx_merging_statistics( scaled_unmerged_mtz, anomalous=False, d_min=selected_band[0], d_max=selected_band[1], n_bins=n_bins) if sg.is_centric(): anom_result = None anom_key_to_var = {} else: anom_result = self._iotbx_merging_statistics( scaled_unmerged_mtz, anomalous=True, n_bins=n_bins) stats['Anomalous slope'] = [anom_result.anomalous_np_slope] if four_column_output: select_anom_result = self._iotbx_merging_statistics( scaled_unmerged_mtz, anomalous=True, d_min=selected_band[0], d_max=selected_band[1], n_bins=n_bins) except iotbx.merging_statistics.StatisticsErrorNoReflectionsInRange: # Too few reflections for too many bins. Reduce number of bins and try again. result = None n_bins = n_bins - 3 if n_bins > 5: continue else: raise from six.moves import cStringIO as StringIO result_cache = StringIO() result.show(out=result_cache) for d, r, s in ((key_to_var, result, select_result), (anom_key_to_var, anom_result, select_anom_result)): for k, v in d.iteritems(): if four_column_output: values = (getattr(s.overall, v), getattr(s.bins[0], v), getattr(s.bins[-1], v), getattr(r.overall, v)) else: values = (getattr(r.overall, v), getattr(r.bins[0], v), getattr(r.bins[-1], v)) if 'completeness' in v: values = [v_ * 100 for v_ in values] if values[0] is not None: stats[k] = values return stats