def exercise_remark_290_interpretation(): symmetry_operators = pdb.remark_290_interpretation.extract_symmetry_operators( remark_290_records=pdb.remark_290_interpretation.example.splitlines() ) assert symmetry_operators is not None assert len(symmetry_operators) == 4 assert symmetry_operators[0] == sgtbx.rt_mx("X,Y,Z") assert symmetry_operators[1] == sgtbx.rt_mx("1/2-X,-Y,1/2+Z") assert symmetry_operators[2] == sgtbx.rt_mx("-X,1/2+Y,1/2-Z") assert symmetry_operators[3] == sgtbx.rt_mx("1/2+X,1/2-Y,-Z") for link_sym, expected_sym_op in [ ("1555", "x,y,z"), ("1381", "x-2,y+3,z-4"), ("3729", "-x+2,1/2+y-3,1/2-z+4"), (" 3_729 ", "-x+2,1/2+y-3,1/2-z+4"), (" 3 729 ", "-x+2,1/2+y-3,1/2-z+4"), ("_3729", None), ("37_29", None), ]: sym_op = pdb.remark_290_interpretation.get_link_symmetry_operator( symmetry_operators=symmetry_operators, link_sym=link_sym ) if sym_op is None: assert expected_sym_op is None else: assert sym_op == sgtbx.rt_mx(expected_sym_op)
def exercise_space_group_contains(): g = sgtbx.space_group("P 2") for s in ["x,y,z", "-x,-y,z", "-x+1,-y-2,z+3"]: assert g.contains(sgtbx.rt_mx(s)) for s in ["x,y,-z", "x+1/2,y,z"]: assert not g.contains(sgtbx.rt_mx(s)) for symbols in sgtbx.space_group_symbol_iterator(): g = sgtbx.space_group(symbols.hall()) for s in g: assert g.contains(s) rnd = flex.mersenne_twister(seed=0) n_c = 0 n_nc = 0 for symbol in sgtbx.bravais_types.centric: g = sgtbx.space_group_info(symbol=symbol, space_group_t_den=144).group() for s in g.change_basis(sgtbx.change_of_basis_op("x+1/12,y-1/12,z+1/12")): if (rnd.random_double() < 0.9): continue # avoid long runtime gc = sgtbx.space_group(g) gc.expand_smx(s) if (gc.order_z() == g.order_z()): assert g.contains(s) n_c += 1 else: assert not g.contains(s) n_nc += 1 assert n_c == 11, n_c assert n_nc == 53, n_nc
def exercise_monoclinic_cell_choices_core(space_group_number, verbose): # transformation matrices for cell choices # columns are basis vectors "new in terms of old" # see Int. Tab. Vol. A, p. 22, Fig. 2.2.6.4. b1 = (1, 0, 0, 0, 1, 0, 0, 0, 1) b2 = (-1, 0, 1, 0, 1, 0, -1, 0, 0) b3 = (0, 0, -1, 0, 1, 0, 1, 0, -1) flip = (0, 0, 1, 0, -1, 0, 1, 0, 0) p3s = sgtbx.space_group("P 3*") done = {} ref = sgtbx.space_group_info(number=space_group_number) ref_uhm = ref.type().universal_hermann_mauguin_symbol() for i_fl,fl in enumerate([b1, flip]): rfl = sgtbx.rot_mx(fl) cfl = sgtbx.change_of_basis_op(sgtbx.rt_mx(rfl)) for i_rt,rt in enumerate(p3s): rp3 = rt.r() cp3 = sgtbx.change_of_basis_op(sgtbx.rt_mx(rp3)) for i_cs,cs in enumerate([b1,b2,b3]): rcs = sgtbx.rot_mx(cs).inverse() ccs = sgtbx.change_of_basis_op(sgtbx.rt_mx(rcs)) cb_all = cp3 * cfl * ccs refcb = ref.change_basis(cb_all) refcb2 = sgtbx.space_group_info(symbol=ref_uhm+"("+str(cb_all.c())+")") assert refcb2.group() == refcb.group() s = sgtbx.space_group_symbols(str(refcb)) q = s.qualifier() hm = str(refcb) if (0 or verbose): print hm, q, cb_all.c() if (i_fl == 0): assert q[0] == "bca"[i_rt] if (len(q) == 2): assert q[1] == "123"[i_cs] elif (q[0] == "-"): assert q[1] == "bca"[i_rt] if (len(q) == 3): assert q[2] == "123"[i_cs] else: assert q[0] == "bca"[i_rt] if (len(q) == 2 and q[1] != "123"[i_cs]): assert done[hm] == 1 done.setdefault(hm, 0) done[hm] += 1 assert len(done) in [3, 9, 18] assert done.values() == [18/len(done)]*len(done) if (0 or verbose): print return done
def exercise_orthorhombic_hm_qualifier_as_cb_symbol(): cb_symbols = { "cab": ["c,a,b", "z,x,y"], "a-cb": ["a,-c,b", "x,-z,y"], "-cba": ["-c,b,a", "-z,y,x"], "bca": ["b,c,a", "y,z,x"], "ba-c": ["b,a,-c", "y,x,-z"]} for sgsyms1 in sgtbx.space_group_symbol_iterator(): n = sgsyms1.number() if (n < 16 or n > 74): continue q = sgsyms1.qualifier() if (len(q) == 0): continue e = sgsyms1.extension() if (e == "\0"): e = "" ehm = sgtbx.space_group_symbols( space_group_number=n, extension=e).universal_hermann_mauguin() cabc, cxyz = cb_symbols[q] assert sgtbx.change_of_basis_op(cxyz).as_abc() == cabc assert sgtbx.change_of_basis_op(cabc).as_xyz() == cxyz uhm_xyz = ehm + " ("+cxyz+")" sgsyms2 = sgtbx.space_group_symbols(symbol=uhm_xyz) assert sgsyms2.change_of_basis_symbol() == cxyz assert sgsyms2.extension() == sgsyms1.extension() assert sgsyms2.universal_hermann_mauguin() == uhm_xyz g1 = sgtbx.space_group(space_group_symbols=sgsyms1) g2 = sgtbx.space_group(space_group_symbols=sgsyms2) assert g2 == g1 g2 = sgtbx.space_group( sgtbx.space_group_symbols(symbol=ehm)).change_basis( sgtbx.change_of_basis_op(sgtbx.rt_mx(cxyz))) assert g2 == g1 for c in [cxyz, cabc]: g2 = sgtbx.space_group_info( group=sgtbx.space_group( sgtbx.space_group_symbols(symbol=ehm))).change_basis(c).group() assert g2 == g1 cit = sgtbx.rt_mx(cxyz).r().inverse().transpose() cit_xyz = cit.as_xyz() g2 = sgtbx.space_group_info( group=sgtbx.space_group( sgtbx.space_group_symbols(symbol=ehm))).change_basis(cit_xyz).group() assert g2 == g1 assert cit.as_xyz(False, "abc") == cabc uhm_abc = ehm + " ("+cabc+")" sgsyms2 = sgtbx.space_group_symbols(symbol=uhm_abc) assert sgsyms2.change_of_basis_symbol() == cxyz assert sgsyms2.extension() == sgsyms1.extension() assert sgsyms2.universal_hermann_mauguin() == uhm_xyz g2 = sgtbx.space_group(space_group_symbols=sgsyms2) assert g2 == g1
def run(args): if "--full" in args: to_do = range(1, 230 + 1) elif "--special" in args: to_do = sorted(special.keys()) else: to_do = [75, 151] for space_group_number in to_do: sgi = sgtbx.space_group_info(number=space_group_number) sgi.show_summary(prefix="") sys.stdout.flush() n_special = 0 for m in scitbx.math.unimodular_generator(range=1).all(): cb_op = sgtbx.change_of_basis_op(sgtbx.rt_mx(sgtbx.rot_mx(m, 1), 1)).new_denominators(12, 144) cb_sgi = sgi.change_basis(cb_op=cb_op) cb_op_ref = cb_sgi.change_of_basis_op_to_reference_setting() ref_sgi = cb_sgi.change_basis(cb_op=cb_op_ref) assert ref_sgi.group() == sgi.group() c = cb_op_ref.c() if c.r().is_unit_mx() and c.t().num() != (0, 0, 0): n_special += 1 cb_ref_sgi = sgi.change_basis(cb_op=cb_op_ref) print " cb_op=%s -> %s" % (str(cb_op.c()), cb_ref_sgi.type().universal_hermann_mauguin_symbol()) sys.stdout.flush() # verify that c.t() is not an allowed origin shift assert cb_ref_sgi.group() != sgi.group() assert special.get(space_group_number, 0) == n_special print format_cpu_times()
def list_all_axes(space_group_symbol=None, space_group_info=None): assert space_group_symbol is None or space_group_info is None shift_range = 1 # XXX Works for the 230 reference settings; it is not # XXX clear to me (rwgk) what value is needed in general. if (space_group_symbol is not None): space_group_info = sgtbx.space_group_info(symbol=space_group_symbol) space_group_info.show_summary() print print "Rotation type, Axis direction, Intrinsic part, Origin shift" axes_dict = {} for s in space_group_info.group(): r = s.r() t = s.t() shift = [0,0,0] for shift[0] in xrange(-shift_range,shift_range+1): for shift[1] in xrange(-shift_range,shift_range+1): for shift[2] in xrange(-shift_range,shift_range+1): ts = t.plus(sgtbx.tr_vec(shift, 1)).new_denominator(t.den()) ss = sgtbx.rt_mx(r, ts) axes_dict[rt_mx_analysis(ss)] = 0 axes_list = axes_dict.keys() axes_list.sort() for a in axes_list: print a print
def run(): settings = [0] for i in xrange(1, 231): settings.append({}) list_cb_op = [] for xyz in ("x,y,z", "z,x,y", "y,z,x"): list_cb_op.append(sgtbx.change_of_basis_op(sgtbx.rt_mx(xyz))) n_built = 0 for i in sgtbx.space_group_symbol_iterator(): hall_symbol = i.hall() for z in "PABCIRHF": hall_z = hall_symbol[0] + z + hall_symbol[2:] for cb_op in list_cb_op: group = sgtbx.space_group(hall_z).change_basis(cb_op) sg_type = group.type() settings[sg_type.number()][sg_type.lookup_symbol()] = 0 n_built += 1 print "# n_built =", n_built n_non_redundant = 0 print "settings = (" for i in xrange(1, 231): print "#", i symbols = settings[i].keys() symbols.sort() for s in symbols: print "'" + s + "'," n_non_redundant += 1 print ")" print "# n_non_redundant =", n_non_redundant
def exercise(): ma = miller.array( miller.set(crystal.symmetry(unit_cell=(5,5,5, 90, 90, 90), space_group=sgtbx.space_group('P 2x')), indices=flex.miller_index( [(1,0,0), (0,1,0), (0,0,1), (-1,0,0), (0,-1,0), (0,0,-1), (1,1,0), (1,0,1), (0,1,1), (-1,-1,0), (-1,0,-1), (0,-1,-1), (1,-1,0), (1,0,-1), (0,1,-1), (-1,1,0), (-1,0,1), (0,-1,1), (1,1,1), (-1,1,1), (1,-1,1), (1,1,-1), (-1,-1,-1), (1,-1,-1), (-1,1,-1), (-1,-1,1)])), data=flex.complex_double(flex.random_double(26), flex.random_double(26))) f_at_h = dict(zip(ma.indices(), ma.data())) for op in ("-x, y+1/2, -z", "x+1/2, -y, z-1/2"): op = sgtbx.rt_mx(op) original, transformed = ma.common_sets( ma.change_basis(sgtbx.change_of_basis_op(op.inverse()))) for h, f in original: assert f == f_at_h[h] for h, op_f in transformed: assert approx_equal( op_f, f_at_h[h*op.r()]*exp(1j*2*pi*row(h).dot(col(op.t().as_double()))))
def convert(file_object): """ Examplify the direct use of the tool from shelx.lexer In practice, one is strongly encouraged to make use of the tools from shelx.parsers: that is to say, for the task handled here, crystal_symmetry_parser (the code to follow just parrots the implementation of crystal_symmetry_parser). """ space_group = None for command, line in shelx.command_stream(file=file_object): cmd, args = command[0], command[-1] if cmd == "LATT": assert space_group is None assert len(args) == 1 space_group = sgtbx.space_group() n = int(args[0]) if n > 0: space_group.expand_inv(sgtbx.tr_vec((0,0,0))) z = "*PIRFABC"[abs(n)] space_group.expand_conventional_centring_type(z) elif cmd == "SYMM": assert space_group is not None assert len(args) == 1 s = sgtbx.rt_mx(args[0]) space_group.expand_smx(s) elif cmd == "SFAC": return sgtbx.space_group_info(group=space_group)
def distances_as_cif_loop(xray_structure, proxies): space_group_info = sgtbx.space_group_info(group=xray_structure.space_group()) unit_cell = xray_structure.unit_cell() sites_cart = xray_structure.sites_cart() site_labels = xray_structure.scatterers().extract_labels() fmt = "%.4f" loop = model.loop(header=( "_restr_distance_atom_site_label_1", "_restr_distance_atom_site_label_2", "_restr_distance_site_symmetry_2", "_restr_distance_target", "_restr_distance_target_weight_param", "_restr_distance_diff" )) for proxy in proxies: restraint = geometry_restraints.bond( unit_cell=unit_cell, sites_cart=sites_cart, proxy=proxy) i_seqs = proxy.i_seqs sym_op = proxy.rt_mx_ji if sym_op is None: sym_op = sgtbx.rt_mx() loop.add_row((site_labels[i_seqs[0]], site_labels[i_seqs[1]], space_group_info.cif_symmetry_code(sym_op), fmt % restraint.distance_ideal, fmt % math.sqrt(1/restraint.weight), fmt % restraint.delta)) return loop
def __init__(self, f_obs, ncs_pairs, reflections_per_bin): adopt_init_args(self, locals()) # Create bins f_obs.setup_binner(reflections_per_bin = reflections_per_bin) self.binner = f_obs.binner() n_bins = self.binner.n_bins_used() self.n_bins = n_bins self.SigmaN = None self.update_SigmaN() # self.rbin = flex.int(f_obs.data().size(), -1) for i_bin in self.binner.range_used(): for i_seq in self.binner.array_indices(i_bin): self.rbin[i_seq] = i_bin-1 # i_bin starts with 1, not 0 ! assert flex.min(self.rbin)==0 assert flex.max(self.rbin)==n_bins-1 # Extract symmetry matrices self.sym_matrices = [] for m_as_string in f_obs.space_group().smx(): o = sgtbx.rt_mx(symbol=str(m_as_string), t_den=f_obs.space_group().t_den()) m_as_double = o.r().as_double() self.sym_matrices.append(m_as_double) self.gradient_evaluator = None self.target_and_grads = ext.tncs_eps_factor_refinery( tncs_pairs = self.ncs_pairs, f_obs = self.f_obs.data(), sigma_f_obs = self.f_obs.sigmas(), rbin = self.rbin, SigmaN = self.SigmaN, space_group = self.f_obs.space_group(), miller_indices = self.f_obs.indices(), fractionalization_matrix = self.f_obs.unit_cell().fractionalization_matrix(), sym_matrices = self.sym_matrices) self.update()
def __init__(self, file_name, header_only=False): self.file_name = os.path.normpath(file_name) f = open(file_name) line = f.readline() assert line[5] == " " n_sym_ops_from_file = int(line[:5].strip()) assert n_sym_ops_from_file > 0 self.space_group_symbol = line[6:].strip() self.space_group_from_ops = sgtbx.space_group() for i in xrange(n_sym_ops_from_file): line = f.readline().rstrip() assert len(line) == 27 r = sgtbx.rot_mx([int(line[j*3:(j+1)*3]) for j in xrange(9)], 1) line = f.readline().rstrip() assert len(line) == 9 t = sgtbx.tr_vec([int(line[j*3:(j+1)*3]) for j in xrange(3)], 12) self.space_group_from_ops.expand_smx(sgtbx.rt_mx(r, t)) f.close() if (header_only): self.original_indices = None return all_arrays = scalepack_ext.no_merge_original_index_arrays( file_name, n_sym_ops_from_file*2+1) self.original_indices = all_arrays.original_indices() self.unique_indices = all_arrays.unique_indices() self.batch_numbers = all_arrays.batch_numbers() self.centric_tags = all_arrays.centric_tags() self.spindle_flags = all_arrays.spindle_flags() self.asymmetric_unit_indices = all_arrays.asymmetric_unit_indices() self.i_obs = all_arrays.i_obs() self.sigmas = all_arrays.sigmas()
def exercise_symmetry_equivalent(): xs = xray.structure( crystal_symmetry=crystal.symmetry( unit_cell=(1, 2, 3), space_group_symbol='hall: P 2x'), scatterers=flex.xray_scatterer(( xray.scatterer("C", site=(0.1, 0.2, 0.3)), ))) xs.scatterers()[0].flags.set_grad_site(True) connectivity_table = smtbx.utils.connectivity_table(xs) reparametrisation = constraints.reparametrisation( xs, [], connectivity_table) site_0 = reparametrisation.add(constraints.independent_site_parameter, scatterer=xs.scatterers()[0]) g = sgtbx.rt_mx('x,-y,-z') symm_eq = reparametrisation.add( constraints.symmetry_equivalent_site_parameter, site=site_0, motion=g) reparametrisation.finalise() assert approx_equal(symm_eq.original.scatterers[0].site, (0.1, 0.2, 0.3), eps=1e-15) assert str(symm_eq.motion) == 'x,-y,-z' assert symm_eq.is_variable reparametrisation.linearise() assert approx_equal(symm_eq.value, g*site_0.value, eps=1e-15) reparametrisation.store() assert approx_equal(symm_eq.value, (0.1, -0.2, -0.3), eps=1e-15) assert approx_equal(site_0.value, (0.1, 0.2, 0.3), eps=1e-15)
def find_space_group(self): decorated_symmetry_pool = [] denominator = 12**3 for i, (r, d) in enumerate(self.cross_correlation_peaks()): t = sgtbx.tr_vec((d*denominator).as_int(), tr_den=denominator) cb_op = sgtbx.change_of_basis_op(sgtbx.rt_mx(r, t)) phi_sym = self.f_in_p1.symmetry_agreement_factor( cb_op, assert_is_similar_symmetry=False) if phi_sym < self.phi_sym_acceptance_cutoff: status = possible_symmetry.accepted elif phi_sym < self.phi_sym_rejection_cutoff: status = possible_symmetry.unsure else: status = possible_symmetry.rejected decorated_symmetry_pool.append( (-status, i, possible_symmetry(r, d, phi_sym, status))) decorated_symmetry_pool.sort() self.symmetry_pool = [ item[-1] for item in decorated_symmetry_pool ] self.origin = mat.mutable_zeros(3) for symm in self.symmetry_pool: if symm.status != symm.accepted: continue symm.set_components_of_global_origin(self.origin) if self.origin.elems.count(0) == 0: break for symm in self.symmetry_pool: if symm.status != symm.accepted: continue symm.change_origin(self.origin) self.space_group.expand_smx(symm.rt)
def exercise_connectivity_table(): xs = development.sucrose() connectivity = utils.connectivity_table(xs) pair_counts = [ 2, 2, 1, 2, 1, 2, 1, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2, 1, 2, 4, 1, 4, 1, 4, 1, 4, 1, 1, 4, 1, 4, 1, 4, 4, 1, 1, 4, 1, 4, 1, 4, 1, 4, 1, 1] assert approx_equal(connectivity.pair_asu_table.pair_counts(), pair_counts) connectivity.add_bond(0, 1) assert approx_equal( connectivity.pair_asu_table.pair_counts(), [3, 3, 1, 2, 1, 2, 1, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2, 1, 2, 4, 1, 4, 1, 4, 1, 4, 1, 1, 4, 1, 4, 1, 4, 4, 1, 1, 4, 1, 4, 1, 4, 1, 4, 1, 1]) connectivity.add_bond(5, 5, rt_mx_ji=sgtbx.rt_mx("x+1,y,z")) assert connectivity.pair_asu_table.pair_counts()[5] == 4 connectivity.remove_bond(0, 1) connectivity.remove_bond(5,5, rt_mx_ji=sgtbx.rt_mx("x+1,y,z")) assert approx_equal(connectivity.pair_asu_table.pair_counts(), pair_counts)
def generate_unimodular_cells(cell): Amat = sqr(cell.orthogonalization_matrix()).transpose() for m in unimodular_generator(range=1).all(): c_inv = sgtbx.rt_mx(sgtbx.rot_mx(m)) orientation_similarity_cb_op = sgtbx.change_of_basis_op(c_inv).inverse() new_cell = cell.change_basis(orientation_similarity_cb_op) yield new_cell,orientation_similarity_cb_op
def change_origin(self, origin): o = self.raw_origin - origin t_l = self.one_minus_r*o t_l = sgtbx.tr_vec((sg_t_den*t_l).as_int(), tr_den=sg_t_den) self.rt = sgtbx.rt_mx(self.r, self.t_i.plus(t_l).new_denominator(sg_t_den)) tr_info = sgtbx.translation_part_info(self.rt) self.origin = tr_info.origin_shift() self.t_i = tr_info.intrinsic_part()
def issue_merohedral_twinning(self): twin_law = self.instructions['twin'].get('matrix', sgtbx.rt_mx('-x,-y,-z')) n = self.instructions['twin'].get('n', 2) assert n - 1 == len(self.instructions['basf']) self.builder.make_merohedral_twinning( fractions=self.instructions['basf'], twin_law=twin_law)
def augm_arr(sym_op = 'x,y,z'): """returns augmented array corresponding to the symmetry operation sym_op""" mtr = sgtbx.rt_mx(sym_op) #constructing augmented matrix of the symmetry operation augm_arr = numpy.reshape( numpy.array(mtr.as_4x4_rational(), dtype = numpy.float), (4,4) ) return augm_arr
def get_all_axes(space_group_symbol=None, space_group_info=None, extension=0): assert space_group_symbol is None or space_group_info is None shift_range = 1 # RWGK Works for the 230 reference settings; it is not # RWGK clear to me (rwgk) what value is needed in general. if (space_group_symbol is not None): space_group_info = sgtbx.space_group_info(symbol=space_group_symbol) #space_group_info.show_summary() axes_dict = {} for smx in space_group_info.group(): r = smx.r() t = smx.t() shift = [0,0,0] for shift[0] in range(-shift_range,shift_range+1): for shift[1] in range(-shift_range,shift_range+1): for shift[2] in range(-shift_range,shift_range+1): ts = t.plus(sgtbx.tr_vec(shift, 1)).new_denominator(t.den()) m = sgtbx.rt_mx(r, ts) #print m rtmxanal = rlc_RTMxAnalysis(m) #print r, t, shift, ts, m if rtmxanal: #print rtmxanal axes_dict[rtmxanal] = 0 axes_list = axes_dict.keys() axes_list.sort() # reject nonenantiomorphic space groups if len(axes_list) > 0 and not re.compile("[A-z]").search(space_group_symbol[1:]): try: sgtbx.space_group_info(space_group_symbol).show_summary(), #print len(axes_list), space_group_symbol except: print space_group, space_group_symbol print sys.exit(1) axes = [] for a in axes_list: if len(a) == 3 and len(a[1]) == 3 and len(a[2]) == 3: tmp_dict = {} print "%4s %7.4f %7.4f %7.4f %7.4f %7.4f %7.4f " % (a[0],a[1][0],a[1][1],a[1][2],a[2][0],a[2][1],a[2][2]) tmp_dict['symb'] = a[0] start_array = N.asarray(a[1]) end_array = N.asarray(a[2]) start_vec = start_array - (end_array - start_array)*extension end_vec = end_array + (end_array - start_array)*extension tmp_dict['start'] = start_vec tmp_dict['end'] = end_vec #rlc# tmp_dict['start'] = a[1] #rlc# tmp_dict['end'] = a[2] axes.append(tmp_dict) else: print a else: return None return axes
def __call__(self, params, options): ''' Import the integrate.hkl file. ''' from iotbx.xds import integrate_hkl from dials.array_family import flex from dials.util.command_line import Command from cctbx import sgtbx # Get the unit cell to calculate the resolution uc = self._experiment.crystal.get_unit_cell() # Read the INTEGRATE.HKL file Command.start('Reading INTEGRATE.HKL') handle = integrate_hkl.reader() handle.read_file(self._integrate_hkl) hkl = flex.miller_index(handle.hkl) xyzcal = flex.vec3_double(handle.xyzcal) xyzobs = flex.vec3_double(handle.xyzobs) iobs = flex.double(handle.iobs) sigma = flex.double(handle.sigma) rlp = flex.double(handle.rlp) peak = flex.double(handle.peak) * 0.01 Command.end('Read %d reflections from INTEGRATE.HKL file.' % len(hkl)) # Derive the reindex matrix rdx = self.derive_reindex_matrix(handle) print 'Reindex matrix:\n%d %d %d\n%d %d %d\n%d %d %d' % (rdx.elems) # Reindex the reflections Command.start('Reindexing reflections') cb_op = sgtbx.change_of_basis_op(sgtbx.rt_mx(sgtbx.rot_mx(rdx.elems))) hkl = cb_op.apply(hkl) Command.end('Reindexed %d reflections' % len(hkl)) # Create the reflection list Command.start('Creating reflection table') table = flex.reflection_table() table['id'] = flex.int(len(hkl), 0) table['panel'] = flex.size_t(len(hkl), 0) table['miller_index'] = hkl table['xyzcal.px'] = xyzcal table['xyzobs.px.value'] = xyzobs table['intensity.cor.value'] = iobs table['intensity.cor.variance'] = sigma**2 table['intensity.prf.value'] = iobs * peak / rlp table['intensity.prf.variance'] = (sigma * peak / rlp)**2 table['lp'] = 1.0 / rlp table['d'] = flex.double(uc.d(h) for h in hkl) Command.end('Created table with {0} reflections'.format(len(table))) # Output the table to pickle file if params.output.filename is None: params.output.filename = 'integrate_hkl.pickle' Command.start('Saving reflection table to %s' % params.output.filename) table.as_pickle(params.output.filename) Command.end('Saved reflection table to %s' % params.output.filename)
def combined_cb_op(self, other, cb_op): sc = self.change_of_basis_op_to_minimum_cell oc = other.change_of_basis_op_to_minimum_cell cb_op = cb_op.new_denominators(sc) best_choice = None best_choice_as_hkl = None for s_symop in self.minimum_cell_symmetry.space_group(): s_symop = sgtbx.change_of_basis_op(sgtbx.rt_mx(s_symop.r())).new_denominators(sc) for o_symop in other.minimum_cell_symmetry.space_group(): o_symop = sgtbx.change_of_basis_op(sgtbx.rt_mx(o_symop.r())).new_denominators(sc) possible_choice = sc.inverse() * s_symop * cb_op * o_symop * oc possible_choice_as_hkl = possible_choice.as_hkl() if ( best_choice_as_hkl is None or sgtbx.compare_cb_op_as_hkl(best_choice_as_hkl, possible_choice_as_hkl) > 0 ): best_choice = possible_choice best_choice_as_hkl = possible_choice_as_hkl assert best_choice is not None return best_choice
def _nnnmmm_operator(record, expected_nnn): if (not record.startswith("REMARK 290 ")): return None flds = record.split() if (len(flds) != 4): return None nnnmmm = flds[2] if (len(nnnmmm) < 4): return None nnn,mmm = nnnmmm[:-3], nnnmmm[-3:] if (mmm != "555"): return None try: nnn = int(nnn) except ValueError: return None if (nnn != expected_nnn): return None try: return sgtbx.rt_mx(flds[3]) except RuntimeError: return None
def exercise_change_of_basis_between_arbitrary_space_groups(): from random import randint from scitbx import matrix as mat g = sgtbx.space_group_info('hall: P 2') h = sgtbx.space_group_info('hall: P 2c') assert g.change_of_basis_op_to(h) is None g = sgtbx.space_group_info('hall: C 2c 2 (x+y,x-y,z)') h = g.change_basis(sgtbx.change_of_basis_op(sgtbx.rt_mx('-y,x,z'))) cb_op = g.change_of_basis_op_to(h) assert cb_op.as_xyz() == 'x,y,z+1/4' assert g.change_basis(cb_op).group() == h.group() g = sgtbx.space_group_info('hall: I 4 2 3') z2p_op = g.change_of_basis_op_to_primitive_setting() h = g.change_basis(z2p_op) cb_op = g.change_of_basis_op_to(h) assert cb_op.c() == z2p_op.c(), (cb_op.as_xyz(), z2p_op.as_xyz()) for i in xrange(1, 231): s = sgtbx.space_group_symbols(space_group_number=i) g = sgtbx.space_group_info(group=sgtbx.space_group(s, t_den=24)) o = tuple([ randint(0, 23) for j in xrange(3) ]) cb_op = sgtbx.change_of_basis_op(sgtbx.rt_mx(sgtbx.tr_vec(o, 24))) h = g.change_basis(cb_op) cb_op_1 = g.change_of_basis_op_to(h) assert cb_op_1.c().r().is_unit_mx() delta = (mat.col(cb_op_1.c().t().as_double()) - mat.col(cb_op.c().t().as_double())) assert h.is_allowed_origin_shift(delta, tolerance=1e-12) z2p_op = g.change_of_basis_op_to_primitive_setting() h = g.change_basis(z2p_op) cb_op = g.change_of_basis_op_to(h) h1 = g.change_basis(cb_op) assert (h.as_reference_setting().group() == h1.as_reference_setting().group())
def remove_bond(self, i_seq, j_seq, rt_mx_ji=sgtbx.rt_mx()): space_group = self.pair_asu_table.asu_mappings().space_group() r_den, t_den = space_group.r_den(), space_group.t_den() if j_seq < i_seq: i_seq, j_seq = j_seq, i_seq if not rt_mx_ji.is_unit_mx(): rt_mx_ji = rt_mx_ji.inverse() if j_seq not in self.pair_sym_table[i_seq]: return for i, rt_mx in enumerate(self.pair_sym_table[i_seq][j_seq]): if (rt_mx.new_denominators(r_den, t_den) == rt_mx_ji.new_denominators(r_den, t_den)): del self.pair_sym_table[i_seq][j_seq][i] self._pair_asu_table_needs_updating = True
def add_bond(self, i_seq, j_seq, rt_mx_ji=sgtbx.rt_mx()): try: self.pair_asu_table.add_pair(i_seq, j_seq, rt_mx_ji) except RuntimeError: sites_frac = self.structure.sites_frac() sites = [sites_frac[i_seq], sites_frac[j_seq]] if not rt_mx_ji.is_unit_mx(): sites[-1] = rt_mx_ji * sites[-1] d = self.structure.unit_cell().distance(sites[0], sites[1]) self.pair_sym_table = self.pair_asu_table.extract_pair_sym_table() self._pair_asu_table = crystal.pair_asu_table(self.structure.asu_mappings( buffer_thickness=max(self.buffer_thickness, d))) self._pair_asu_table.add_pair_sym_table(self.pair_sym_table) self.pair_asu_table.add_pair(i_seq, j_seq, rt_mx_ji)
def verify_definitions_in_paper_zwart_2007(): # Verification of definitions in Peter Zwart's paper for the # CCP4 Study Weekend Jan 2007. # cb_symbol_xyz = "x-y,x+y,z" cb_symbol_abc = "1/2*a-1/2*b,1/2*a+1/2*b,c" # # Verify the claim that cb_symbol_abc is the inverse transpose of # cb_symbol_xyz. cb_mx_xyz = sgtbx.rt_mx(cb_symbol_xyz, r_den=12, t_den=144) assert sgtbx.rt_mx(cb_mx_xyz.r().inverse().transpose()).as_xyz( symbol_letters="abc") == cb_symbol_abc # uhmx = "C 1 2 1 (%s)" % cb_symbol_xyz uhma = "C 1 2 1 (%s)" % cb_symbol_abc sx = sgtbx.space_group_info(symbol=uhmx) sa = sgtbx.space_group_info(symbol=uhma) assert sx.group() == sa.group() # # We trust that the cctbx is self-consistent. structure_unconv = random_structure.xray_structure( space_group_info=sx, elements=["C"], volume_per_atom=100, general_positions_only=True) assert str(structure_unconv.space_group_info()) == uhmx cb_op = structure_unconv.change_of_basis_op_to_reference_setting() structure_reference = structure_unconv.change_basis(cb_op=cb_op) assert str(structure_reference.space_group_info()) == "C 1 2 1" # # Verify the definitions in the paper based on the assumption # that the cctbx is self-consistent. site_reference = structure_reference.scatterers()[0].site site_unconv_direct = cb_mx_xyz * site_reference assert approx_equal( site_unconv_direct, structure_unconv.scatterers()[0].site)
def remove_bond(self, i_seq, j_seq, rt_mx_ji=sgtbx.rt_mx()): space_group = self.pair_asu_table.asu_mappings().space_group() r_den, t_den = space_group.r_den(), space_group.t_den() if j_seq < i_seq: i_seq, j_seq = j_seq, i_seq if not rt_mx_ji.is_unit_mx(): rt_mx_ji = rt_mx_ji.inverse() if j_seq not in self.pair_sym_table[i_seq]: return for i, rt_mx in enumerate(self.pair_sym_table[i_seq][j_seq]): if (rt_mx.new_denominators(r_den, t_den) == rt_mx_ji.new_denominators( r_den, t_den)): del self.pair_sym_table[i_seq][j_seq][i] self._pair_asu_table_needs_updating = True
def SetSymop (self, value) : if type(value)==type(u'abc'): value = value.encode("ascii", "ignore") if (value is None) or (value is Auto) : ValidatedTextCtrl.SetValue(self, "") elif (isinstance(value, str)) : try : from cctbx import sgtbx rt_mx = sgtbx.rt_mx(symbol=value) except ValueError : raise Sorry("Inappropriate value '%s' for %s." % (value, self.GetName())) else : ValidatedTextCtrl.SetValue(self, str(value)) else : raise TypeError("Type '%s' not allowed!" % type(value).__name__)
def add_bond(self, i_seq, j_seq, rt_mx_ji=sgtbx.rt_mx()): try: self.pair_asu_table.add_pair(i_seq, j_seq, rt_mx_ji) except RuntimeError: sites_frac = self.structure.sites_frac() sites = [sites_frac[i_seq], sites_frac[j_seq]] if not rt_mx_ji.is_unit_mx(): sites[-1] = rt_mx_ji * sites[-1] d = self.structure.unit_cell().distance(sites[0], sites[1]) self.pair_sym_table = self.pair_asu_table.extract_pair_sym_table() self._pair_asu_table = crystal.pair_asu_table( self.structure.asu_mappings( buffer_thickness=max(self.buffer_thickness, d))) self._pair_asu_table.add_pair_sym_table(self.pair_sym_table) self.pair_asu_table.add_pair(i_seq, j_seq, rt_mx_ji)
def SetSymop(self, value): if type(value) == type(u'abc'): value = value.encode("ascii", "ignore") if (value is None) or (value is Auto): ValidatedTextCtrl.SetValue(self, "") elif (isinstance(value, str)): try: from cctbx import sgtbx rt_mx = sgtbx.rt_mx(symbol=value) except ValueError: raise Sorry("Inappropriate value '%s' for %s." % (value, self.GetName())) else: ValidatedTextCtrl.SetValue(self, str(value)) else: raise TypeError("Type '%s' not allowed!" % type(value).__name__)
def verify_definitions_in_paper_zwart_2007(): # Verification of definitions in Peter Zwart's paper for the # CCP4 Study Weekend Jan 2007. # cb_symbol_xyz = "x-y,x+y,z" cb_symbol_abc = "1/2*a-1/2*b,1/2*a+1/2*b,c" # # Verify the claim that cb_symbol_abc is the inverse transpose of # cb_symbol_xyz. cb_mx_xyz = sgtbx.rt_mx(cb_symbol_xyz, r_den=12, t_den=144) assert sgtbx.rt_mx(cb_mx_xyz.r().inverse().transpose()).as_xyz( symbol_letters="abc") == cb_symbol_abc # uhmx = "C 1 2 1 (%s)" % cb_symbol_xyz uhma = "C 1 2 1 (%s)" % cb_symbol_abc sx = sgtbx.space_group_info(symbol=uhmx) sa = sgtbx.space_group_info(symbol=uhma) assert sx.group() == sa.group() # # We trust that the cctbx is self-consistent. structure_unconv = random_structure.xray_structure( space_group_info=sx, elements=["C"], volume_per_atom=100, general_positions_only=True) assert str(structure_unconv.space_group_info()) == uhmx cb_op = structure_unconv.change_of_basis_op_to_reference_setting() structure_reference = structure_unconv.change_basis(cb_op=cb_op) assert str(structure_reference.space_group_info()) == "C 1 2 1" # # Verify the definitions in the paper based on the assumption # that the cctbx is self-consistent. site_reference = structure_reference.scatterers()[0].site site_unconv_direct = cb_mx_xyz * site_reference assert approx_equal(site_unconv_direct, structure_unconv.scatterers()[0].site)
def run(): while 1: line = sys.stdin.readline()[:-1] flds = line.split(None, 2) if (len(flds) == 0): break nspgrp = int(flds[0]) # read spacegroup number nsym = int(flds[1]) # read nsym print nspgrp, nsym, flds[2] # print it all group = sgtbx.space_group() # now interpret the symops for i in xrange(nsym): line = sys.stdin.readline()[:-1] # get the i'th symop # print line group.expand_smx(sgtbx.rt_mx(line)) # and interpret info = sgtbx.space_group_info(group=group) print info.type().hall_symbol() # now produce the sg symbol print info
def run(): while 1: line = sys.stdin.readline()[:-1] flds = line.split(None, 2) if (len(flds) == 0): break nspgrp = int(flds[0]) # read spacegroup number nsym = int(flds[1]) # read nsym print(nspgrp, nsym, flds[2]) # print it all group = sgtbx.space_group() # now interpret the symops for i in range(nsym): line = sys.stdin.readline()[:-1] # get the i'th symop # print line group.expand_smx(sgtbx.rt_mx(line)) # and interpret info = sgtbx.space_group_info(group=group) print(info.type().hall_symbol()) # now produce the sg symbol print(info)
def exercise_double_coset_decomposition(crystal_symmetry_ri, lattice_group, miller_array_subs, miller_array_sub_as, miller_array_sub_bs, coset_decompositions, i, j, verbose): group_a = miller_array_subs[i].space_group_info().group() group_b = miller_array_subs[j].space_group_info().group() miller_array_sub_a = miller_array_sub_as[i] miller_array_sub_b = miller_array_sub_bs[j] single_coset_ccs = {} for partition in coset_decompositions[i].partitions: cb_op = sgtbx.change_of_basis_op(partition[0]) cb = miller_array_sub_a.change_basis(cb_op).map_to_asu() cc = cb.correlation(other=miller_array_sub_b).coefficient() key = "%.6f" % cc single_coset_ccs.setdefault(key, []).append(str(partition[0])) double_coset_ccs = {} for c in sgtbx.cosets.double_unique(lattice_group, group_a, group_b): cb_op = sgtbx.change_of_basis_op(c) cb = miller_array_sub_b.change_basis(cb_op).map_to_asu() cc = cb.correlation(other=miller_array_sub_a).coefficient() key = "%.6f" % cc double_coset_ccs.setdefault(key, []).append(str(c)) double_coset_repetitions = [len(v) for v in double_coset_ccs.values()] failure = (max(double_coset_repetitions) > 1) if (failure or verbose): print( [str(sgtbx.space_group_info(group=g)) for g in (group_a, group_b)]) print("single_coset ops:", list(single_coset_ccs.values())) print("double_coset ops:", list(double_coset_ccs.values())) if (failure): for cc, ops in double_coset_ccs.items(): if (len(ops) > 1): print(cc, ops) for op in ops: ri = sgtbx.rt_mx(op).r().info() print(" ", ri.type(), ri.sense(), ri.ev()) raise RuntimeError("max(double_coset_repetitions) > 1") single_coset_ccs = list(single_coset_ccs.keys()) double_coset_ccs = list(double_coset_ccs.keys()) single_coset_ccs.sort() double_coset_ccs.sort() failure = (double_coset_ccs != single_coset_ccs) if (failure or verbose): print("single_coset_ccs:", single_coset_ccs) print("double_coset_ccs:", double_coset_ccs) if (failure): raise RuntimeError("double_coset_ccs != single_coset_ccs")
def _nnnmmm_operator(record, expected_nnn): if (not record.startswith("REMARK 290 ")): return None flds = record.split() if (len(flds) != 4): return None nnnmmm = flds[2] if (len(nnnmmm) < 4): return None nnn, mmm = nnnmmm[:-3], nnnmmm[-3:] if (mmm != "555"): return None try: nnn = int(nnn) except ValueError: return None if (nnn != expected_nnn): return None try: return sgtbx.rt_mx(flds[3]) except RuntimeError: return None
def space_group_from_remark_symop(self): from cctbx import sgtbx result = None for remark in self.remarks: remark = remark.lstrip()[6:].strip().replace(" ", "").lower() if ( remark.startswith("symop(") and remark.endswith(")")): s = remark[6:-1] try: s = sgtbx.rt_mx(s) except RuntimeError: pass else: if (result is None): result = sgtbx.space_group() result.expand_smx(s) return result
def algebraic(self): op = self.operation() # eliminate minus signs to make the expression look more like # what people are used to see r = op.r().num() d = op.r().den() signs = [None, None, None] for j in xrange(3): for i in xrange(3): rij = r[i * 3 + j] if (signs[i] is None and rij != 0): if (rij < 0): signs[i] = -1 else: signs[i] = 1 m = list(sgtbx.rot_mx(d, d).num()) for i in xrange(3): if (signs[i] == -1): m[i * 4] *= -1 return str(sgtbx.rt_mx(op.r().multiply(sgtbx.rot_mx(m, d)), op.t()))
def exercise_comprehensive(args): if ("--verbose" in args): out = sys.stdout else: out = StringIO() if ("--paranoid" in args): cb_range = 2 else: cb_range = 1 for symbol in bravais_types.acentric: print("bravais type:", symbol) sym = sgtbx.space_group_info(symbol=symbol) \ .any_compatible_crystal_symmetry(volume=1000) \ .niggli_cell() abc = list(sym.unit_cell().parameters()[:3]) abc.sort() for cb_elements in flex.nested_loop([-cb_range] * 9, [cb_range + 1] * 9): r = sgtbx.rot_mx(cb_elements) if (r.determinant() != 1): continue cb_op = sgtbx.change_of_basis_op(sgtbx.rt_mx(r)) sym_cb = sym.change_basis(cb_op) abc_cb = list(sym_cb.unit_cell().parameters()[:3]) abc_cb.sort() for x, y in zip(abc, abc_cb): assert y - x > -1.e-6 if (y - x > 1.e-4): break else: print("cb_ob:", cb_op.c(), cb_elements, file=out) assert min(cb_elements) >= -1 assert max(cb_elements) <= 1 for s in sym_cb.space_group(): assert s.r().den() == 1 r_num = s.r().num() print("r:", r_num, file=out) assert min(r_num) >= -1 assert max(r_num) <= 1 for enforce in [False, True]: lattice_group = sgtbx.lattice_symmetry.group( reduced_cell=sym_cb.unit_cell(), max_delta=1.4, enforce_max_delta_for_generated_two_folds=enforce) assert lattice_group == sym_cb.space_group() sys.stdout.flush() print(file=out)
def bond_similarity_as_cif_loops(xray_structure, proxies): space_group_info = sgtbx.space_group_info( group=xray_structure.space_group()) unit_cell = xray_structure.unit_cell() sites_cart = xray_structure.sites_cart() site_labels = xray_structure.scatterers().extract_labels() fmt = "%.4f" loop = model.loop(header=( "_restr_equal_distance_atom_site_label_1", "_restr_equal_distance_atom_site_label_2", "_restr_equal_distance_site_symmetry_2", "_restr_equal_distance_class_id", )) class_loop = model.loop(header=( "_restr_equal_distance_class_class_id", "_restr_equal_distance_class_target_weight_param", "_restr_equal_distance_class_average", "_restr_equal_distance_class_esd", "_restr_equal_distance_class_diff_max", )) class_id = 0 for proxy in proxies: restraint = geometry_restraints.bond_similarity(unit_cell=unit_cell, sites_cart=sites_cart, proxy=proxy) class_id += 1 esd = math.sqrt( flex.sum(flex.pow2(restraint.deltas())) * (1. / proxy.i_seqs.size())) class_loop.add_row(( class_id, fmt % math.sqrt(1 / proxy.weights[0]), # assume equal weights fmt % restraint.mean_distance(), fmt % esd, fmt % flex.max_absolute(restraint.deltas()))) for i in range(proxy.i_seqs.size()): i_seq, j_seq = proxy.i_seqs[i] if proxy.sym_ops is None: sym_op = sgtbx.rt_mx() else: sym_op = proxy.sym_ops[i] loop.add_row( (site_labels[i_seq], site_labels[j_seq], space_group_info.cif_symmetry_code(sym_op), class_id)) return class_loop, loop
def derive_change_of_basis_op(from_hkl, to_hkl): # exclude those reflections that we couldn't index sel = (to_hkl != (0, 0, 0)) & (from_hkl != (0, 0, 0)) assert sel.count(True) >= 3 # need minimum of 3 equations ? to_hkl = to_hkl.select(sel) from_hkl = from_hkl.select(sel) # for each miller index, solve a system of linear equations to find the # change of basis operator h, k, l = to_hkl.as_vec3_double().parts() r = [] from scitbx.lstbx import normal_eqns for i in range(3): eqns = normal_eqns.linear_ls(3) for index, hkl in zip((h, k, l)[i], from_hkl): eqns.add_equation( right_hand_side=index, design_matrix_row=flex.double(hkl), weight=1 ) eqns.solve() r.extend(eqns.solution()) from scitbx import matrix from scitbx.math import continued_fraction denom = 12 r = [ int(denom * continued_fraction.from_real(r_, eps=1e-2).as_rational()) for r_ in r ] r = matrix.sqr(r).transpose() # print (1/denom)*r # now convert into a cctbx change_of_basis_op object change_of_basis_op = sgtbx.change_of_basis_op( sgtbx.rt_mx(sgtbx.rot_mx(r, denominator=denom)) ).inverse() print("discovered change_of_basis_op=%s" % (str(change_of_basis_op))) # sanity check that this is the right cb_op assert (change_of_basis_op.apply(from_hkl) == to_hkl).count(False) == 0 return change_of_basis_op
def create_shelx_reflection_data_source(self, format, indices_transform=None, change_of_basis_op=None, data_scale=1): """ format is one of 3, 4, 5, etc. data_scale scales the data and their standard deviations """ assert [indices_transform, change_of_basis_op].count(None) == 1 if change_of_basis_op is None: if indices_transform.is_unit_mx(): change_of_basis_op = sgtbx.change_of_basis_op() else: r = sgtbx.rt_mx(indices_transform.new_denominator(24).transpose()) change_of_basis_op = sgtbx.change_of_basis_op(r).inverse() self.reflection_file_format = "hklf%i" % format self.data_change_of_basis_op = change_of_basis_op self.data_scale = data_scale
def compatible_symmetries(point_group): """ Primitive setting assumed """ for op in point_group: r = op.r() order = r.order() if r.info().type() == 1: continue yield op invariants = [matrix.col(u) for u in r.info().basis_of_invariant()] if len(invariants) == 2: t1, t2 = invariants invariants.extend((t1 + t2, t1 - t2)) translations = [] for t in invariants: t = sgtbx.tr_vec(t, order).mod_short() if not t.is_zero() and t not in translations: translations.append(t) for t in translations: yield sgtbx.rt_mx(r, t.new_denominator(sgtbx.sg_t_den))
def _analyse_symmetry(self): sym_ops = [sgtbx.rt_mx(s).new_denominators(1, 12) for s in self.target.sym_ops] if not self.input_space_group: self._symmetry_analysis = SymmetryAnalysis( self.coords, sym_ops, self.subgroups, self.cb_op_inp_min ) logger.info(str(self._symmetry_analysis)) self.best_solution = self._symmetry_analysis.best_solution self.best_subgroup = self.best_solution.subgroup else: self.best_solution = None self._symmetry_analysis = None cosets = sgtbx.cosets.left_decomposition( self.target._lattice_group, self.best_subgroup["subsym"].space_group().build_derived_acentric_group(), ) self.reindexing_ops = self._reindexing_ops(self.coords, sym_ops, cosets)
def _analyse_symmetry(self): if self.input_space_group is not None: self.best_solution = None self._symmetry_analysis = None return sym_ops = [ sgtbx.rt_mx(s).new_denominators(1, 12) for s in self.target.get_sym_ops() ] self._symmetry_analysis = SymmetryAnalysis( self.coords, sym_ops, self.subgroups, self.cb_op_inp_min ) logger.info(str(self._symmetry_analysis)) self.best_solution = self._symmetry_analysis.best_solution self.best_subgroup = self.best_solution.subgroup cosets = sgtbx.cosets.left_decomposition( self.lattice_group, self.best_solution.subgroup["subsym"].space_group() ) self.params.cluster.n_clusters = len(cosets.partitions)
def simple_and_slow(pair_asu_table, max_shell=10): asu_mappings = pair_asu_table.asu_mappings() term_table = [] for i_seq_pivot, pair_asu_dict_pivot in enumerate(pair_asu_table.table()): rt_mx_pivot = asu_mappings.get_rt_mx(i_seq=i_seq_pivot, i_sym=0) if (pair_asu_dict_pivot.size() == 0): term_table.append([]) continue nodes_middle = [] nodes_next = [ node(asu_mappings=asu_mappings, i_seq=i_seq_pivot, rt_mx=sgtbx.rt_mx()) ] terms = [1] for i_shell_minus_1 in range(max_shell): nodes_prev = nodes_middle nodes_middle = nodes_next nodes_next = [] for node_m in nodes_middle: rt_mx_i = asu_mappings.get_rt_mx(i_seq=node_m.i_seq, i_sym=0) rt_mx_ni = node_m.rt_mx.multiply(rt_mx_i.inverse()) for j_seq, j_sym_groups in pair_asu_table.table()[ node_m.i_seq].items(): for j_sym_group in j_sym_groups: for j_sym in j_sym_group: rt_mx_j = asu_mappings.get_rt_mx(i_seq=j_seq, i_sym=j_sym) new_node = node(asu_mappings=asu_mappings, i_seq=j_seq, rt_mx=rt_mx_ni.multiply(rt_mx_j)) if (not find_node(test_node=new_node, node_list=nodes_prev) and not find_node(test_node=new_node, node_list=nodes_middle) and not find_node(test_node=new_node, node_list=nodes_next)): nodes_next.append(new_node) terms.append(len(nodes_next)) term_table.append(terms) return term_table
def chirality_as_cif_loop(xray_structure, proxies): space_group_info = sgtbx.space_group_info(group=xray_structure.space_group()) unit_cell = xray_structure.unit_cell() sites_cart = xray_structure.sites_cart() site_labels = xray_structure.scatterers().extract_labels() fmt = "%.4f" loop = model.loop(header=( "_restr_chirality_atom_site_label_1", "_restr_chirality_atom_site_label_2", "_restr_chirality_atom_site_label_3", "_restr_chirality_atom_site_label_4", "_restr_chirality_site_symmetry_1", "_restr_chirality_site_symmetry_2", "_restr_chirality_site_symmetry_3", "_restr_chirality_site_symmetry_4", "_restr_chirality_volume_target", "_restr_chirality_weight_param", "_restr_chirality_diff", )) unit_mxs = [sgtbx.rt_mx()]*4 for proxy in proxies: restraint = geometry_restraints.chirality( unit_cell=unit_cell, sites_cart=sites_cart, proxy=proxy) sym_ops = proxy.sym_ops if sym_ops is None: sym_ops = unit_mxs i_seqs = proxy.i_seqs loop.add_row((site_labels[i_seqs[0]], site_labels[i_seqs[1]], site_labels[i_seqs[2]], site_labels[i_seqs[3]], space_group_info.cif_symmetry_code(sym_ops[0]), space_group_info.cif_symmetry_code(sym_ops[1]), space_group_info.cif_symmetry_code(sym_ops[2]), space_group_info.cif_symmetry_code(sym_ops[3]), fmt % restraint.volume_ideal, fmt % math.sqrt(1/restraint.weight), fmt % restraint.delta)) return loop
def run(args): assert len(args) == 1 lines = open(args[0]).read().splitlines() title = lines[0] unit_cell = uctbx.unit_cell(lines[1]) n_symops = int(lines[2].split()[0]) space_group = sgtbx.space_group() for line in lines[3:3+n_symops]: coeffs = [float(field) for field in line.split()] space_group.expand_smx(sgtbx.rt_mx(coeffs[:9], coeffs[9:])) crystal_symmetry = crystal.symmetry( unit_cell=unit_cell, space_group=space_group) miller_indices = flex.miller_index() data = flex.double() sigmas = flex.double() for i_line in xrange(3+n_symops,len(lines)): fields = lines[i_line].split() assert len(fields) == 5 miller_indices.append([int(value) for value in fields[:3]]) data.append(float(fields[3])) sigmas.append(float(fields[4])) miller_set=miller.set( crystal_symmetry=crystal_symmetry, indices=miller_indices, anomalous_flag=False) miller_array = miller_set.array( data=data, sigmas=sigmas).set_observation_type_xray_intensity() print "Before merging:" miller_array.show_summary() print merged = miller_array.merge_equivalents() merged.show_summary() print merged_array = merged.array() print "After merging:" merged_array.show_comprehensive_summary() print
def sample(two_folds, fudge_factor, deltas, perturbations): for two_fold in two_folds: group = sgtbx.space_group() group.expand_smx(sgtbx.rt_mx(two_fold)) assert group.order_z() == 2 sym = sgtbx.space_group_info( group=group).any_compatible_crystal_symmetry(volume=1000) for i_trial in xrange(30): while True: uc_fudge = list(sym.unit_cell().parameters()) for i in xrange(6): uc_fudge[i] *= 1 + (random.random() * 2 - 1) * fudge_factor try: uc_fudge = uctbx.unit_cell(uc_fudge) except ValueError: pass else: break deltas.append( two_fold.le_page_1982_delta(reduced_cell=uc_fudge, deg=True)) perturbations.append( two_fold.lebedev_2005_perturbation(reduced_cell=uc_fudge))
def __init__(self, cif_block, strict=False): # The order of priority for determining space group is: # sym_ops, hall symbol, H-M symbol, space group number self.cif_block = cif_block sym_ops = self.get_cif_item('_space_group_symop_operation_xyz') sym_op_ids = self.get_cif_item('_space_group_symop_id') space_group = None if sym_ops is not None: if isinstance(sym_ops, basestring): sym_ops = flex.std_string([sym_ops]) if sym_op_ids is not None: if isinstance(sym_op_ids, basestring): sym_op_ids = flex.std_string([sym_op_ids]) assert len(sym_op_ids) == len(sym_ops) self.sym_ops = {} space_group = sgtbx.space_group() if isinstance(sym_ops, basestring): sym_ops = [sym_ops] for i, op in enumerate(sym_ops): try: s = sgtbx.rt_mx(op) except RuntimeError, e: str_e = str(e) if "Parse error: " in str_e: raise CifBuilderError( "Error interpreting symmetry operator: %s" % (str_e.split("Parse error: ")[-1])) else: raise if sym_op_ids is None: sym_op_id = i + 1 else: try: sym_op_id = int(sym_op_ids[i]) except ValueError, e: raise CifBuilderError( "Error interpreting symmetry operator id: %s" % (str(e))) self.sym_ops[sym_op_id] = s space_group.expand_smx(s)
def __init__(self, lf): self.symbol = None l = lf.next() print l.strip() if (lf.eof): return no, number, symbol, m = l.split() assert no == "NO." number = int(number) assert int(float(m)) == float(m) m = int(float(m)) space_group = sgtbx.space_group() if (m < 0): space_group.expand_inv(sgtbx.tr_vec((0, 0, 0))) space_group.expand_conventional_centring_type(symbol[0]) t_den = space_group.t_den() matrices = [] for i in xrange(abs(m)): l = lf.next() assert not lf.eof flds = l.split() assert len(flds) == 12 r = [int(e) for e in flds[1:4] + flds[5:8] + flds[9:12]] t = [ int(round(float(e) * t_den)) for e in (flds[0], flds[4], flds[8]) ] try: s = sgtbx.rt_mx(sgtbx.rot_mx(r), sgtbx.tr_vec(t)) except RuntimeError, e: print e print l else: try: matrices.append(s) space_group.expand_smx(s) except RuntimeError, e: print e print l print s
def _residue_groups_rt_mx_ij(self, atom1, atom2, unit_cell, symop_str, symop): """ Get atoms object and residue group object for H and heavy atom """ xyzs1 = atom1.parent().parent().atoms().extract_xyz() xyzs2 = atom2.parent().parent().atoms().extract_xyz() rg1 = atom1.parent().parent().detached_copy() rg2 = atom2.parent().parent().detached_copy() if symop_str: rt_mx_ji = sgtbx.rt_mx(str(symop)) xyzs2 = unit_cell.fractionalize(xyzs2) m3 = rt_mx_ji.r().as_double() m3 = scitbx.matrix.sqr(m3) t = rt_mx_ji.t().as_double() t = scitbx.matrix.col((t[0],t[1],t[2])) xyzs2 = unit_cell.orthogonalize(m3.elems*xyzs2+t) rg2.atoms().set_xyz(xyzs2) for atom in rg2.atoms(): if atom.name==atom2.name: atom2=atom break return rg1, rg2, atom1, atom2
def filtered_commands(self): """ Yields those command in self.command_stream that this parser is not concerned with. On the contrary, LATT, SYMM are swallowed (CELL is yielded because it carries the wavelength too). """ unit_cell = None unit_cell_param_sigmas = None space_group = sgtbx.space_group() for command, line in self.command_stream: cmd, args = command[0], command[-1] if cmd == 'CELL': assert unit_cell is None unit_cell = uctbx.unit_cell(args[1:]) yield command, line elif cmd == 'ZERR': assert unit_cell_param_sigmas is None unit_cell_param_sigmas = args[1:] yield command, line elif cmd == 'LATT': assert len(args) == 1 n = int(args[0]) if n > 0: space_group.expand_inv(sgtbx.tr_vec((0, 0, 0))) z = "*PIRFABC"[abs(n)] space_group.expand_conventional_centring_type(z) elif cmd == 'SYMM': assert len(args) == 1 s = sgtbx.rt_mx(args[0]) space_group.expand_smx(s) else: if cmd == 'SFAC': assert unit_cell is not None self.builder.make_crystal_symmetry(unit_cell=unit_cell, space_group=space_group) self.builder.set_unit_cell_parameter_sigmas( unit_cell_param_sigmas) yield command, line
def __init__(self, si_structure, si_si_sym_table): self.structure = si_structure.deep_copy_scatterers() bond_sym_table = crystal.pair_sym_table(si_si_sym_table.size()) sites_frac = si_structure.sites_frac() i_oxygen = count(1) for i_seq,pair_sym_dict in enumerate(si_si_sym_table): site_frac_i = mx.col(sites_frac[i_seq]) for j_seq,sym_ops in pair_sym_dict.items(): assert j_seq >= i_seq for rt_mx_ji in sym_ops: site_frac_ji = mx.col(rt_mx_ji * sites_frac[j_seq]) bond_center = (site_frac_i + site_frac_ji) / 2 i_seq_o = self.structure.scatterers().size() self.structure.add_scatterer(xray.scatterer( label="O%d"%i_oxygen.next(), site=bond_center)) bond_sym_table[i_seq].setdefault(i_seq_o).append( sgtbx.rt_mx(1,1)) bond_sym_table[j_seq].setdefault(i_seq_o).append( rt_mx_ji.inverse_cancel()) bond_sym_table.append(crystal.pair_sym_dict()) self.bond_sym_table = bond_sym_table.tidy( site_symmetry_table=self.structure.site_symmetry_table())
def add_user_vector(self): uc = self.viewer.miller_array.unit_cell() ln = len(self.viewer.all_vectors) label = self.params.viewer.user_label order = 0 try: hklvec = "" abcvec = "" hklop = "" if self.params.viewer.add_user_vector_hkl not in [None, "", "()"]: hklvec = eval(self.params.viewer.add_user_vector_hkl.replace(" ","")) # convert into cartesian space cartvec = list( self.viewer.scene.renderscale*(hklvec * matrix.sqr(uc.fractionalization_matrix()).transpose()) ) elif self.params.viewer.add_user_vector_abc not in [None, "", "()"]: abcvec = eval(self.params.viewer.add_user_vector_abc.replace(" ","")) # convert into cartesian space cartvec = list(abcvec * matrix.sqr(uc.orthogonalization_matrix())) elif self.params.viewer.add_user_vector_hkl_op not in [None, ""]: hklop = self.params.viewer.add_user_vector_hkl_op.replace(" ","") rt = sgtbx.rt_mx(symbol=hklop, r_den=12, t_den=144) rt.r().as_double() self.viewer.symops.append( rt ) # (cartvec, a, label, order) = self.viewer.GetVectorAndAngleFromRotationMx( rt.r() ) if label: label = "%s-fold_%s" %(str(int(roundoff(2*math.pi/a, 0))), self.params.viewer.user_label) self.mprint("Rotation axis, %s, added" %label) if (self.params.viewer.add_user_vector_hkl in [None, "", "()"] \ and self.params.viewer.add_user_vector_abc in [None, "", "()"] \ and self.params.viewer.add_user_vector_hkl_op) in [None, ""]: self.mprint("No vector was specified") self.uservectors.append( (ln, label, order, cartvec, hklop, str(hklvec), str(abcvec) )) self.list_vectors() except Exception as e: raise Sorry( str(e)) self.params.viewer.add_user_vector_hkl_op = "" self.params.viewer.add_user_vector_hkl = "" self.params.viewer.add_user_vector_abc = ""
def __init__(self, f_obs, ncs_pairs, reflections_per_bin): adopt_init_args(self, locals()) # Create bins f_obs.setup_binner(reflections_per_bin=reflections_per_bin) self.binner = f_obs.binner() n_bins = self.binner.n_bins_used() self.n_bins = n_bins self.SigmaN = None self.update_SigmaN() # self.rbin = flex.int(f_obs.data().size(), -1) for i_bin in self.binner.range_used(): for i_seq in self.binner.array_indices(i_bin): self.rbin[i_seq] = i_bin - 1 # i_bin starts with 1, not 0 ! assert flex.min(self.rbin) == 0 assert flex.max(self.rbin) == n_bins - 1 # Extract symmetry matrices self.sym_matrices = [] for m_as_string in f_obs.space_group().smx(): o = sgtbx.rt_mx(symbol=str(m_as_string), t_den=f_obs.space_group().t_den()) m_as_double = o.r().as_double() self.sym_matrices.append(m_as_double) self.gradient_evaluator = None self.target_and_grads = ext.tncs_eps_factor_refinery( tncs_pairs=self.ncs_pairs, f_obs=self.f_obs.data(), sigma_f_obs=self.f_obs.sigmas(), rbin=self.rbin, SigmaN=self.SigmaN, space_group=self.f_obs.space_group(), miller_indices=self.f_obs.indices(), fractionalization_matrix=self.f_obs.unit_cell( ).fractionalization_matrix(), sym_matrices=self.sym_matrices) self.update()
def process_nonbonded_for_links( self, bond_params_table, bond_asu_table, geometry_proxy_registries, link_metals=True, link_residues=True, link_carbohydrates=True, link_amino_acid_rna_dna=False, link_ligands=False, link_small_molecules=False, max_bonded_cutoff=None, metal_coordination_cutoff=3., amino_acid_bond_cutoff=2., inter_residue_bond_cutoff=2., second_row_buffer=0.5, carbohydrate_bond_cutoff=2., ligand_bond_cutoff=2., small_molecule_bond_cutoff=2., include_selections=None, exclude_selections=None, log=None, verbose=False, ): assert hasattr(self, "_cif") if max_bonded_cutoff is None: max_bonded_cutoff = max( metal_coordination_cutoff, amino_acid_bond_cutoff, carbohydrate_bond_cutoff, ligand_bond_cutoff, small_molecule_bond_cutoff, inter_residue_bond_cutoff + second_row_buffer, ) max_bonded_cutoff_standard = max_bonded_cutoff if include_selections: for selection_1, selection_2, cutoff in include_selections: max_bonded_cutoff = max(max_bonded_cutoff, cutoff) # check that linking required ## has_checks = [] ## for i, link_boolean in enumerate([link_carbohydrates, ## ] ## ): ## if i==0: ct = "common_saccharide" ## rc = False ## if link_boolean: ## rc = check_all_classes(self.pdb_hierarchy, ct) ## has_checks.append(rc) ## has_checks.append(link_amino_acid_rna_dna) ## if not filter(None, has_checks): return # if max_bonded_cutoff > 15: raise Sorry( "One of the following parameters: \nmetal_coordination_" + "cutoff, amino_acid_bond_cutoff," + "inter_residue_bond_cutoff, \ncarbohydrate_bond_cutoff," "bonds.bond_distance_cutoff \nis greater than 15A. Please check and" + " correct these parameters.") if verbose and log is not None: print(""" metal_coordination_cutoff %s amino_acid_bond_cutoff %s carbohydrate_bond_cutoff %s inter_residue_bond_cutoff %s second_row_buffer %s """ % ( metal_coordination_cutoff, amino_acid_bond_cutoff, carbohydrate_bond_cutoff, inter_residue_bond_cutoff, second_row_buffer, ), file=log) from cctbx import crystal from cctbx.array_family import flex # def _nonbonded_pair_objects( max_bonded_cutoff=3., i_seqs=None, ): if i_seqs is None: atoms = self.pdb_hierarchy.atoms() i_seqs = flex.size_t() for atom in atoms: i_seqs.append(atom.i_seq) if (self.model_indices is not None): model_indices = self.model_indices.select(i_seqs) conformer_indices = self.conformer_indices.select(i_seqs) sym_excl_indices = self.sym_excl_indices.select(i_seqs) donor_acceptor_excl_groups = self.donor_acceptor_excl_groups.select( i_seqs) asu_mappings = self.special_position_settings.asu_mappings( buffer_thickness=max_bonded_cutoff) sites_cart = self.sites_cart.select(i_seqs) asu_mappings.process_sites_cart( original_sites=sites_cart, site_symmetry_table=self.site_symmetry_table().select(i_seqs)) pair_asu_table = crystal.pair_asu_table(asu_mappings=asu_mappings) nonbonded_proxies = geometry_restraints.nonbonded_sorted_asu_proxies( model_indices=model_indices, conformer_indices=conformer_indices, sym_excl_indices=sym_excl_indices, donor_acceptor_excl_groups=donor_acceptor_excl_groups, nonbonded_params=geometry_restraints.nonbonded_params( default_distance=1), nonbonded_types=flex.std_string(conformer_indices.size()), nonbonded_charges=flex.int(conformer_indices.size(), 0), nonbonded_distance_cutoff_plus_buffer=max_bonded_cutoff, min_cubicle_edge=5, shell_asu_tables=[pair_asu_table]) return nonbonded_proxies, sites_cart, pair_asu_table, asu_mappings, i_seqs # def _nonbonded_pair_generator_geometry_restraints_sort( nonbonded_proxies, max_bonded_cutoff=3.): rc = nonbonded_proxies.get_sorted( by_value="delta", sites_cart=sites_cart, include_proxy=True, ) if rc is None: return rc, junk = rc for item in rc: yield item # if (log is not None): print(""" Automatic linking Parameters for automatic linking Linking & cutoffs Metal : %-5s - %0.2f Amimo acid : %-5s - %0.2f Carbohydrate : %-5s - %0.2f Ligands : %-5s - %0.2f Small molecules : %-5s - %0.2f Amino acid - RNA/DNA : %-5s """ % ( link_metals, metal_coordination_cutoff, link_residues, amino_acid_bond_cutoff, link_carbohydrates, carbohydrate_bond_cutoff, link_ligands, ligand_bond_cutoff, link_small_molecules, small_molecule_bond_cutoff, link_amino_acid_rna_dna, ), file=log) t0 = time.time() atoms = self.pdb_hierarchy.atoms() bond_data = [] bond_data_i_seqs = {} simple_bonds = 0 sym_bonds = 0 link_data = [] simple_links = 0 sym_links = 0 done = ResidueLinkClass() links = {} custom_links = {} exclude_out_lines = {} # main loop nonbonded_proxies, sites_cart, pair_asu_table, asu_mappings, nonbonded_i_seqs = \ _nonbonded_pair_objects(max_bonded_cutoff=max_bonded_cutoff, ) initial_pair_asu_table_table = bond_asu_table.table().deep_copy() for ii, item in enumerate( _nonbonded_pair_generator_geometry_restraints_sort( nonbonded_proxies=nonbonded_proxies, max_bonded_cutoff=max_bonded_cutoff, )): labels, i_seq, j_seq, distance, vdw_distance, sym_op, rt_mx_ji, proxy = item # # include & exclude selection # origin_id = None if (include_selections and distance >= max_bonded_cutoff_standard): for selection_1, selection_2, bond_cutoff in include_selections: if ((i_seq in selection_1 and j_seq in selection_2) or (i_seq in selection_2 and j_seq in selection_1)): metal_coordination_cutoff = bond_cutoff amino_acid_bond_cutoff = bond_cutoff carbohydrate_bond_cutoff = bond_cutoff ligand_bond_cutoff = bond_cutoff small_molecule_bond_cutoff = bond_cutoff inter_residue_bond_cutoff = bond_cutoff saccharide_bond_cutoff = bond_cutoff link_residues = True break else: continue # exclude this nonbond from consideration exclude_this_nonbonded = False if exclude_selections: for selection_1, selection_2 in exclude_selections: if selection_2: # check both if ((i_seq in selection_1 and j_seq in selection_2) or (i_seq in selection_2 and j_seq in selection_1)): exclude_this_nonbonded = True break else: if i_seq in selection_1 or j_seq in selection_1: exclude_this_nonbonded = True # XXX this is a poor job!!! if bond_asu_table.contains(i_seq, j_seq, 0): continue if bond_asu_table.contains(i_seq, j_seq, 1): continue atom1 = atoms[i_seq] atom2 = atoms[j_seq] if exclude_this_nonbonded: key = (selection_1, selection_2) if key not in exclude_out_lines: exclude_out_lines[key] = \ ' bond %s\n%s%s\n%smodel %.2f' % ( atom1.id_str(), ' '*9, atom2.id_str(), ' '*6, distance) continue # # moving sections of this to outside the loop # - SF4 # if link_metals: moved = ['SF4', 'F3S'] if (atom1.parent().resname in moved or atom2.parent().resname in moved): continue if verbose: print(i_seq, j_seq, atom1.quote(), end=' ') print(atom2.quote(), end=' ') print("Distance: %0.2f" % distance, rt_mx_ji, sym_op) # don't link atoms not in the same conformer (works for models also)... if not atom1.is_in_same_conformer_as(atom2): assert 0 continue # don't link atoms in same residue group if atom1.parent().parent() == atom2.parent().parent(): continue atom_group1 = atom1.parent() atom_group2 = atom2.parent() # dont't like atom groups in different altloc expect " " if atom_group1.altloc.strip() == atom_group2.altloc.strip(): pass elif atom_group1.altloc.strip() == "": pass elif atom_group2.altloc.strip() == "": pass else: continue # don't link some classes classes1 = linking_utils.get_classes(atom1) classes2 = linking_utils.get_classes(atom2) use_only_bond_cutoff = False if verbose: print(""" Residue classes %s %s """ % (classes1, classes2)) # why was this commented out??? if not link_ligands and (classes1.other or classes2.other): continue if (not link_small_molecules and (classes1.common_small_molecule or classes2.common_small_molecule)): continue # is_proxy_set between any of the atoms ???????? if classes1.common_amino_acid and classes2.common_amino_acid: if not link_residues: continue # special amino acid linking # - cyclic # - beta, delta ??? if possible_cyclic_peptide(atom1, atom2): # first & last peptide use_only_bond_cutoff = True if sym_op: if classes1.common_amino_acid and classes2.common_saccharide: continue if classes2.common_amino_acid and classes1.common_saccharide: continue # # bonded atoms can't link to same atom, eg MG-PG and MG-O1P # if bond_data: bonded = False if i_seq in bond_data_i_seqs: for t_i in bond_data_i_seqs[i_seq]: if bond_asu_table.contains(j_seq, t_i, 0): bonded = True if j_seq in bond_data_i_seqs: for t_i in bond_data_i_seqs[j_seq]: if bond_asu_table.contains(i_seq, t_i, 0): bonded = True if bonded: continue # key = [ atom1.id_str()[9:-1], atom2.id_str()[9:-1], ] key.sort() if sym_op: key.append(str(rt_mx_ji)) key = tuple(key) # hydrogens if atom1.element.strip() in hydrogens: done[atom2.id_str()] = atom1.id_str() if atom2.element.strip() in hydrogens: done[atom1.id_str()] = atom2.id_str() # bond length cutoff & some logic if not linking_utils.is_atom_pair_linked( atom1, atom2, distance=distance, max_bonded_cutoff=max_bonded_cutoff, amino_acid_bond_cutoff=amino_acid_bond_cutoff, inter_residue_bond_cutoff=inter_residue_bond_cutoff, second_row_buffer=second_row_buffer, saccharide_bond_cutoff=carbohydrate_bond_cutoff, metal_coordination_cutoff=metal_coordination_cutoff, use_only_bond_cutoff=use_only_bond_cutoff, link_metals=link_metals, verbose=verbose, ): if verbose: print("is not linked", atom1.quote(), atom2.quote(), key) print('link_metals', link_metals) if (atom1.element.strip().upper() in hydrogens or atom2.element.strip().upper() in hydrogens): pass else: done.setdefault(key, []) done[key].append([atom1.name, atom2.name]) continue # check some valences... if not (classes1.common_element or classes2.common_element): if not linking_utils.check_valence(self.pdb_hierarchy, atom1): print( " Atom %s rejected from bonding due to valence issues." % atom1.quote(), file=log) continue if not linking_utils.check_valence(self.pdb_hierarchy, atom2): print( " Atom %s rejected from bonding due to valence issues." % atom2.quote(), file=log) continue # got a link.... class1 = linking_utils.get_classes( atom1, #_group1.resname, important_only=True, ) class2 = linking_utils.get_classes( atom2, #_group2.resname, important_only=True, ) class_key = [class1, class2] class_key.sort() class_key = tuple(class_key) if verbose: print('class_key', class_key) # if not link_metals and "metal" in class_key: continue #atoms_must_be = {} if not link_residues: if class_key in [ ("common_amino_acid", "common_amino_acid"), #("common_amino_acid", "other"), ]: continue #else: # atoms_must_be.setdefault(("common_amino_acid", # "common_amino_acid"),["C", "N"]) # atoms_must_be.setdefault(("common_amino_acid", "other"),["C", "N"]) if not link_carbohydrates and "common_saccharide" in class_key: continue if not link_amino_acid_rna_dna: if "common_amino_acid" in class_key and "common_rna_dna" in class_key: continue # names = [atom1.name, atom2.name] if verbose: print('names', names) names.sort() atom1_key = None atom2_key = None if class1 in linking_setup.maximum_per_atom_links: # is one atom1_key = atom1.id_str() if class2 in linking_setup.maximum_per_atom_links: # is one atom2_key = atom2.id_str() if verbose: print('-' * 80) print('class_key', class_key) print('done') for k, item in done.items(): print("> %s : %s" % (k, item)) print('key', key) print('atom keys', atom1_key, atom2_key) # exclude duplicate symmetry op. if key in done: if names in done[key]: continue if atom1.parent().altloc == atom2.parent().altloc: if atom1_key: if atom1_key in done: continue done[atom1_key] = key if atom2_key: if atom2_key in done: continue done[atom2_key] = key # current_number_of_links = len(done.setdefault(key, [])) if (current_number_of_links >= linking_setup.maximum_inter_residue_links.get( class_key, 1)): if verbose: print( "too many links:", current_number_of_links, linking_setup.maximum_inter_residue_links.get( class_key, 1), class_key) continue # done[key].append(names) done_key = key # get all possible links i_seqs = [] for atom in atom_group1.atoms(): i_seqs.append(atom.i_seq) j_seqs = [] for atom in atom_group2.atoms(): j_seqs.append(atom.i_seq) ij_seqs = [] for i in i_seqs: for j in j_seqs: tmp = [i, j] tmp.sort() ij_seqs.append(tuple(tmp)) # check that a link not already made link_found = False if verbose: print('len simple bond proxies', len(geometry_proxy_registries.bond_simple.proxies)) # Consistency check - debugging only # for bsp in geometry_proxy_registries.bond_simple.proxies: # if bsp.i_seqs[1] not in initial_pair_asu_table_table[bsp.i_seqs[0]].keys(): # print "ERROR!!!", bsp.i_seqs # STOP() # Proposed fast loop: Time building additional restraints for # ribosome went from 5272 to 204 seconds. for p in ij_seqs: if p[1] in initial_pair_asu_table_table[p[0]].keys(): link_found = True break # VERY SLOW !!! - original loop # for bond_simple_proxy in geometry_proxy_registries.bond_simple.proxies: # if bond_simple_proxy.i_seqs in ij_seqs: # link_found = True # break if link_found: continue # check for any link between atom groups based on residue name, eg ASN-NAG # get predefined link link, swap, key = linking_utils.is_atom_group_pair_linked( atom_group1, atom_group2, self.mon_lib_srv, ) if verbose: print('link', link) print('swap', swap) print('key', key) if swap: tmp = atom_group2 atom_group2 = atom_group1 atom_group1 = tmp space_group = self.special_position_settings.space_group() # if len(done_key) == 2: link_rt_mx_ji = sgtbx.rt_mx(symbol="x,y,z", t_den=space_group.t_den()) else: link_rt_mx_ji = sgtbx.rt_mx(symbol=done_key[2], t_den=space_group.t_den()) # if link: # apply a standard link origin_id = origin_ids.get_origin_id( 'link_%s' % key, return_none_if_absent=True, ) if origin_id is None: # user defined links should not be applied here continue count, bond_i_seqs = _apply_link_using_proxies( link, atom_group1, atom_group2, bond_params_table, bond_asu_table, geometry_proxy_registries, rt_mx_ji=link_rt_mx_ji, origin_id=origin_id, ) origin_id = None if len(bond_i_seqs) == 0: if verbose: print('failed to link using %s' % key) continue links.setdefault(key, []) links[key].append([atom_group1, atom_group2]) links[key][-1] += bond_i_seqs[0] # odd? if verbose: print("predefined residue named link", key) continue # #if atoms_must_be: # # this could be fancier... # # link_residues is peptide and SG links # atoms_must_be_key = [atom1.element.strip(), atom2.element.strip()] # #atoms_must_be_key = [atom1.name.strip(), atom2.name.strip()] # atoms_must_be_key.sort() # if class_key in atoms_must_be and "S" not in atoms_must_be_key: # if atoms_must_be[class_key]!=atoms_must_be_key: # continue rc = linking_utils.process_atom_groups_for_linking_single_link( self.pdb_hierarchy, atom1, atom2, verbose=verbose, ) if not rc: done.remove_link(done_key, names) continue pdbres, link_key, link_atoms = rc assert len(link_key) == 1 key = link_key[0] link = self.mon_lib_srv.link_link_id_dict.get(key, None) if verbose: print('pdbres', pdbres) print('link', link) print('link_key', link_key) print('link_atoms', link_atoms) if key.find("ALPHA1") > -1 or key.find( "BETA1") > -1: # is handled in elif key, cif, bond_i_seqs = \ glyco_utils.apply_glyco_link_using_proxies_and_atoms( atom_group2, atom_group1, bond_params_table, bond_asu_table, geometry_proxy_registries, rt_mx_ji=link_rt_mx_ji, link_carbon_dist=carbohydrate_bond_cutoff, origin_id=origin_ids['glycosidic custom'], ) links.setdefault(key, []) links[key].append([atom_group1, atom_group2]) links[key][-1] += bond_i_seqs continue elif link: origin_id = origin_ids['link_%s' % key] count, bond_i_seqs = _apply_link_using_proxies( link, atom_group1, atom_group2, bond_params_table, bond_asu_table, geometry_proxy_registries, rt_mx_ji=link_rt_mx_ji, origin_id=origin_id, ) origin_id = None links.setdefault(key, []) links[key].append([atom_group1, atom_group2]) links[key][-1] += bond_i_seqs[0] continue else: # possible peptide or rna/dna link rc = check_for_peptide_links(atom1, atom2) # no peptide links across symmetry if len(done_key) == 3: rc = None if rc: key, swap = rc link = self.mon_lib_srv.link_link_id_dict.get(key) if swap: tmp = atom_group2 atom_group2 = atom_group1 atom_group1 = tmp origin_id = origin_ids['link_%s' % key] rc = _apply_link_using_proxies( link, atom_group1, atom_group2, bond_params_table, bond_asu_table, geometry_proxy_registries, rt_mx_ji=link_rt_mx_ji, origin_id=origin_id, ) if not rc: tmp = atom_group2 atom_group2 = atom_group1 atom_group1 = tmp rc = _apply_link_using_proxies( link, atom_group1, atom_group2, bond_params_table, bond_asu_table, geometry_proxy_registries, rt_mx_ji=link_rt_mx_ji, origin_id=origin_id, ) origin_id = None # not added to links so not LINK record if sym_op: sym_links += 1 link_data.append(( atoms[i_seq].id_str(), atoms[j_seq].id_str(), rt_mx_ji, key, )) else: simple_links += 1 link_data.append(( atoms[i_seq].id_str(), atoms[j_seq].id_str(), None, #rt_mx_ji, key, )) continue # custom_links.setdefault(ii, []) custom_links[ii].append([atom_group1, atom_group2, atom1, atom2]) # simple origin_id = origin_ids['Misc. bond'] if ((classes1.common_rna_dna or classes1.ccp4_mon_lib_rna_dna) and (classes2.common_rna_dna or classes2.ccp4_mon_lib_rna_dna)): bond_name = "h-dna" assert 0 elif (linking_utils.get_classes(atom1, important_only=True) == "metal" or linking_utils.get_classes( atom2, important_only=True) == "metal"): origin_id = origin_ids['metal coordination'] if sym_op: sym_bonds += 1 bond_data.append(( atoms[i_seq].id_str(), atoms[j_seq].id_str(), rt_mx_ji, origin_id, )) bond_data_i_seqs.setdefault(i_seq, []) bond_data_i_seqs.setdefault(j_seq, []) bond_data_i_seqs[i_seq].append(j_seq) bond_data_i_seqs[j_seq].append(i_seq) pair_asu_table.add_pair(proxy) else: simple_bonds += 1 bond_data.append(( atoms[i_seq].id_str(), atoms[j_seq].id_str(), None, #rt_mx, origin_id, )) bond_data_i_seqs.setdefault(i_seq, []) bond_data_i_seqs.setdefault(j_seq, []) bond_data_i_seqs[i_seq].append(j_seq) bond_data_i_seqs[j_seq].append(i_seq) pair_asu_table.add_pair(proxy.i_seqs) # END MAIN LOOP for ii, item in enumerate(nonbonded?) # # if verbose: for key in sorted(custom_links): print('-' * 80) print(key) for pair in custom_links[key]: for atom in pair: try: print(atom.quote()) except Exception: print(atom) pair_sym_table = pair_asu_table.extract_pair_sym_table() n_simple, n_symmetry = 0, 0 self.pdb_link_records.setdefault("LINK", []) retain = [] for ijk, sym_pair in enumerate(pair_sym_table.iterator()): i_seq, j_seq = sym_pair.i_seqs() origin_id = bond_data[ijk][-1] assert i_seq == nonbonded_i_seqs[i_seq] assert j_seq == nonbonded_i_seqs[j_seq] atom1 = atoms[i_seq] atom2 = atoms[j_seq] # check for NA linkage classes1 = linking_utils.get_classes(atom1) classes2 = linking_utils.get_classes(atom2) ans = bondlength_defaults.run(atom1, atom2) equil = 2.3 weight = 0.02 slack = 0. if len(ans) > 0: equil = ans[0] if len(ans) > 1: weight = ans[1] if len(ans) > 2: slack = ans[2] if equil is None: equil = 2.3 added_to_asu_table = False try: #bond_asu_table.add_pair([i_seq, j_seq]) bond_asu_table.add_pair(i_seq=i_seq, j_seq=j_seq, rt_mx_ji=sym_pair.rt_mx_ji) added_to_asu_table = True except RuntimeError as e: error = """ Difficulties linking atoms %s %s Suggestions include providing restraints for any unknown residues. """ % (atom1.quote(), atom2.quote()) print(error, file=log) if added_to_asu_table: retain.append(ijk) if (sym_pair.rt_mx_ji.is_unit_mx()): n_simple += 1 else: n_symmetry += 1 assert origin_id bond_params_table.update( i_seq=i_seq, j_seq=j_seq, params=geometry_restraints.bond_params( distance_ideal=equil, weight=1.0 / weight**2, slack=slack, origin_id=origin_id, )) # adding link to PDB self.pdb_link_records["LINK"].append([ self.pdb_atoms[i_seq], self.pdb_atoms[j_seq], sym_pair.rt_mx_ji ]) # output if link_data: print(" Number of additional links: simple=%d, symmetry=%d" % ( simple_links, sym_bonds, ), file=log) for label1, label2, sym_op, link_name in sorted(link_data): if sym_op is None: print(" Simple link: %s - %s" % (label1, label2), file=log) for label1, label2, sym_op, bond_type in sorted(bond_data): if sym_op: print(" Symmetry link: %s - %s sym. op: %s" % ( label1, label2, sym_op, ), file=log) if (log is not None): print(" Number of custom bonds: simple=%d, symmetry=%d" % (n_simple, n_symmetry), file=log) if (n_symmetry == 0): blanks = "" else: blanks = " " # def _sort_on_id(ag1, ag2): ag1 = ag1[0] ag2 = ag2[0] if ag1.id_str()[4:] == ag2.id_str()[4:]: if ag1.altloc < ag2.altloc: return -1 return 1 elif ag1.id_str()[4:] < ag2.id_str()[4:]: return -1 return 1 # if exclude_out_lines: print( " Excluded links - shortest distance candidate listed for each exclusion", file=log) for key, item in exclude_out_lines.items(): print(item, file=log) if links: explained = [] print(" Links applied", file=log) for key in sorted(links): print(" %s" % key, file=log) links[key].sort(key=cmp_to_key(_sort_on_id)) for ag1, ag2, i_seq, j_seq in links[key]: self.pdb_link_records["LINK"].append([ self.pdb_atoms[i_seq], self.pdb_atoms[j_seq], "x,y,z", #link_rt_mx_ji, ]) if ag1.altloc or ag2.altloc: print(' "%s" - "%s" : altloc "%s" - "%s"' % ( ag1.id_str(), ag2.id_str(), ag1.altloc, ag2.altloc, ), file=log) else: print(' "%s" - "%s"' % ( ag1.id_str(), ag2.id_str(), ), file=log) explain = "" if key.find("ALPHA") == 0 or key.find("BETA") == 0: true_alpha_beta = glyco_utils.get_alpha_beta( ag2.resname, fake=False, ) if true_alpha_beta and key.find( true_alpha_beta.upper()) == -1: one = "a beta" two = "an alpha" if true_alpha_beta == "alpha": one = "an alpha" two = "a beta" explain = "%s~> Even though %s is %s isomer," % ( ' ' * 7, ag2.resname, one) explain += " %s linkage is required..." % (two) if explain not in explained: print(explain, file=log) explained.append(explain) if bond_data: print(" Number of additional bonds: simple=%d, symmetry=%d" % ( simple_bonds, sym_bonds, ), file=log) for caption, bond_type in [ ("Coordination", 'metal coordination'), ("Other bonds", 'bond'), ]: print(" %s:" % caption, file=log) for ijk, (label1, label2, sym_op, bt) in enumerate(sorted(bond_data)): if sym_op is None and bt == bond_type and ijk in retain: print(" Simple bond: %s - %s" % (label1, label2), file=log) for ijk, (label1, label2, sym_op, bt) in enumerate(sorted(bond_data)): if sym_op and bt == bond_type and ijk in retain: print(" Symmetry bond: %s - %s sym. op: %s" % ( label1, label2, sym_op, ), file=log) if (log is not None): print(' Time building additional restraints: %0.2f' % (time.time() - t0), file=log)
def CheckFormat(self, value): if type(value) == type(u'abc'): value = value.encode("ascii", "ignore") from cctbx import sgtbx rt_mx = sgtbx.rt_mx(symbol=value) return value
def mat_to_symop(mat): return sgtbx.change_of_basis_op( sgtbx.rt_mx(matrix.sqr(mat), (0, 0, 0), r_den=12, t_den=144)).as_hkl()