def ensure_correct_element_symbol(): from cctbx.eltbx import tiny_pse for e in xray_scattering.it1992_iterator(): l = e.label() e, c = xray_scattering.get_element_and_charge_symbols( scattering_type=l, exact=False) assert tiny_pse.table(l).symbol() == e assert tiny_pse.table(l.lower()).symbol() == e assert tiny_pse.table(l.upper()).symbol() == e
def __init__(self, unit_cell, target_map, residue, vector = None, selection=None): adopt_init_args(self, locals()) self.target = None self.sites_cart = None self.i_seqs = [] self.weights = flex.double() for el in self.residue.atoms().extract_element(): std_lbl = eltbx.xray_scattering.get_standard_label( label=el, exact=True, optional=True) self.weights.append(tiny_pse.table(std_lbl).weight()) self.occ = self.residue.atoms().extract_occ() self.vector_flat = None if(vector is not None): self.vector_flat = flex.size_t(flatten(self.vector)) self.sites_cart = self.residue.atoms().extract_xyz() if(selection is None): selection = self.vector_flat self.target = maptbx.real_space_target_simple( unit_cell = self.unit_cell, density_map = self.target_map, sites_cart = self.sites_cart, selection = selection)
def weights(self, normal_eqns, jacobian_transpose_matching_grad_fc, params): z_max = max([ tiny_pse.table(p.scatterer.element_symbol()).atomic_number() for p in params ]) return flex.double(params.size(), z_max**2)
def __init__(self, unit_cell, target_map, residue, vector=None, selection=None): adopt_init_args(self, locals()) self.target = None self.sites_cart = None self.i_seqs = [] self.weights = flex.double() for el in self.residue.atoms().extract_element(): std_lbl = eltbx.xray_scattering.get_standard_label(label=el, exact=True, optional=True) self.weights.append(tiny_pse.table(std_lbl).weight()) self.occ = self.residue.atoms().extract_occ() self.vector_flat = None if (vector is not None): self.vector_flat = flex.size_t(flatten(self.vector)) self.sites_cart = self.residue.atoms().extract_xyz() if (selection is None): selection = self.vector_flat self.target = maptbx.real_space_target_simple( unit_cell=self.unit_cell, density_map=self.target_map, sites_cart=self.sites_cart, selection=selection)
def exercise_basic(): std_labels = xray_scattering.standard_labels_list() assert len(std_labels) == 217 assert std_labels[:5] == ["H", "D", "T", "Hiso", "He"] assert std_labels[-1] == "Pu6+" for l in std_labels: assert xray_scattering.get_standard_label(label=l, exact=True, optional=False) == l assert xray_scattering.get_standard_label(label="na+") == "Na1+" assert xray_scattering.get_standard_label(label="na+") == "Na1+" assert xray_scattering.get_standard_label(label="o-") == "O1-" assert xray_scattering.get_standard_label(label="SI4+A") == "Si4+" assert xray_scattering.get_standard_label(label="SI1+") == "Si" assert xray_scattering.get_standard_label( label="SI1+", exact=True, optional=True) is None try: xray_scattering.get_standard_label(label="SI1+", exact=True, optional=False) except ValueError as e: assert str(e) == 'Unknown scattering type label: "SI1+"' else: raise Exception_expected # from cctbx.eltbx import tiny_pse for sl in std_labels: e, c = xray_scattering.get_element_and_charge_symbols( scattering_type=sl) assert e == "T" or tiny_pse.table(e, True).symbol() == e if (c != ""): assert len(c) == 2 assert "123456789".find(c[0]) >= 0 assert c[1] in ["+", "-"]
def exercise(): t = tiny_pse.table("SI") assert t.atomic_number() == 14 assert t.symbol() == "Si" assert t.name() == "silicon" assert approx_equal(t.weight(), 28.086) n = 0 for t in tiny_pse.table_iterator(): n += 1 if (n == 1): assert t.symbol() == "H" elif (n == 104): assert t.atomic_number() == 103 assert t.symbol() == "Lr" u = tiny_pse.table(t.symbol()) assert u.symbol() == t.symbol() assert n == 104
def expected_labels(kissel_dir): result = [] if (kissel_dir is None): for wk in xray_scattering.wk1995_iterator(): result.append(wk.label()) else: for atomic_number in xrange(1,100): result.append(tiny_pse.table(atomic_number).symbol()) return result
def expected_labels(kissel_dir): result = [] if (kissel_dir is None): for wk in xray_scattering.wk1995_iterator(): result.append(wk.label()) else: for atomic_number in xrange(1, 100): result.append(tiny_pse.table(atomic_number).symbol()) return result
def sorted_as_c_h_then_by_increasing_atomic_number(self): head = [] for elt in ('C', 'H'): n = self.count_of_element.get(elt) if n: head.append((elt, n)) tail = [ (tiny_pse.table(elt).atomic_number(), (elt, n)) for elt, n in self.count_of_element.iteritems() if elt not in ('C', 'H') ] tail.sort() self.element_count_pairs = head + [ item[-1] for item in tail ] return self
def GetDensity(self): """ kg/m^3 """ vol=self.uc.volume()*1e-30 self.CalcMultiplicity() density=0 for s in self.scatt: #site symmetry #TODO : calculate real multiplicity for special positions ! #ssym=s.apply_symmetry(self.uc,self.spg.group()) tpse=tiny_pse.table(s.element_symbol()) density+=s.occupancy*s.multiplicity()*tpse.weight()/1000/(vol*6.0221353e23) return density
def GetDensity(self): """ kg/m^3 """ vol = self.uc.volume() * 1e-30 self.CalcMultiplicity() density = 0 for s in self.scatt: #site symmetry #TODO : calculate real multiplicity for special positions ! #ssym=s.apply_symmetry(self.uc,self.spg.group()) tpse = tiny_pse.table(s.element_symbol()) density += s.occupancy * s.multiplicity() * tpse.weight( ) / 1000 / (vol * 6.0221353e23) return density
def __init__(self, unit_cell, target_map, residue, rotamer_eval = None, vector = None): adopt_init_args(self, locals()) self.target = None self.sites_cart = None self.i_seqs = [] self.weights = flex.double() for el in self.residue.atoms().extract_element(): std_lbl = eltbx.xray_scattering.get_standard_label( label=el, exact=True, optional=True) self.weights.append(tiny_pse.table(std_lbl).weight()) self.occ = self.residue.atoms().extract_occ() self.vector_flat = flatten(self.vector)
def cross_check(args): quick_summaries = [] for file_name in args: quick_summaries.append(easy_pickle.load(file_name)) assert len(quick_summaries) == 2 lines = [] max_of_errors = flex.double() atomic_numbers = flex.double() n_less = 0 n_greater = 0 n_equal = 0 for label_1, error_1 in quick_summaries[0].items(): error_2 = quick_summaries[1].get(label_1, None) if (error_2 is not None): line = "%-10s %7.4f %7.4f" % (label_1, error_1, error_2) if (error_1 < error_2): line += " less %7.4f" % (error_2 / error_1) n_less += 1 elif (error_1 > error_2): line += " greater %7.4f" % (error_1 / error_2) n_greater += 1 else: line += " equal" n_equal += 1 lines.append(line) max_of_errors.append(max(error_1, error_2)) atomic_numbers.append( tiny_pse.table(label_1.split("_")[0]).atomic_number()) for sort_key, reverse in [(max_of_errors, True), (atomic_numbers, False)]: perm = flex.sort_permutation(data=sort_key, reverse=reverse) perm_lines = flex.select(lines, perm) for line in perm_lines: print line print print "n_less:", n_less print "n_greater:", n_greater print "n_equal:", n_equal print "total:", n_less + n_greater + n_equal
def cross_check(args): quick_summaries = [] for file_name in args: quick_summaries.append(easy_pickle.load(file_name)) assert len(quick_summaries) == 2 lines = [] max_of_errors = flex.double() atomic_numbers = flex.double() n_less = 0 n_greater = 0 n_equal = 0 for label_1,error_1 in quick_summaries[0].items(): error_2 = quick_summaries[1].get(label_1, None) if (error_2 is not None): line = "%-10s %7.4f %7.4f" % (label_1, error_1, error_2) if (error_1 < error_2): line += " less %7.4f" % (error_2/error_1) n_less += 1 elif (error_1 > error_2): line += " greater %7.4f" % (error_1/error_2) n_greater += 1 else: line += " equal" n_equal += 1 lines.append(line) max_of_errors.append(max(error_1, error_2)) atomic_numbers.append( tiny_pse.table(label_1.split("_")[0]).atomic_number()) for sort_key,reverse in [(max_of_errors,True), (atomic_numbers,False)]: perm = flex.sort_permutation(data=sort_key, reverse=reverse) perm_lines = flex.select(lines, perm) for line in perm_lines: print line print print "n_less:", n_less print "n_greater:", n_greater print "n_equal:", n_equal print "total:", n_less + n_greater + n_equal
def need_sidechain_fit( residue, rotamer_evaluator, mon_lib_srv, unit_cell, f_map, fdiff_map=None, small_f_map=0.9): """ Important: maps assumed to be sigma-scaled! """ get_class = iotbx.pdb.common_residue_names_get_class assert get_class(residue.resname) == "common_amino_acid" if(residue.resname.strip().upper() in ["ALA", "GLY"]): return False cl = mmtbx.refinement.real_space.aa_residue_axes_and_clusters( residue = residue, mon_lib_srv = mon_lib_srv, backbone_sample = False).clusters if(len(cl)==0): return False # service functions def anal(x): for i,e in enumerate(x): if(e<0): return True r=None if(i+1<len(x)): e1=abs(x[i]) e2=abs(x[i+1]) if(e1>e2): if(e2!=0): r = e1/e2 else: if(e1!=0): r = e2/e1 if(r is not None and r>3): return True return False def anal2(x): for i,e in enumerate(x): if(e<-3.0): return True return False def anal3(x): return (flex.double(x)>=small_f_map).count(True)==len(x) # last = cl[0].vector[len(cl[0].vector)-1] vector = flatten(cl[0].vector) bs = residue.atoms().extract_b() # weights = [] for el in residue.atoms().extract_element(): std_lbl = eltbx.xray_scattering.get_standard_label( label=el, exact=True, optional=True) weights.append(tiny_pse.table(std_lbl).weight()) # side_chain_sel = flex.size_t() main_chain_sel = flex.size_t() for i_seq, a in enumerate(list(residue.atoms())): if(a.name.strip().upper() not in ["N","CA","C","O","CB"]): side_chain_sel.append(i_seq) elif(a.name.strip().upper() in ["N","CA","C"]): main_chain_sel.append(i_seq) # sites_frac = unit_cell.fractionalize(residue.atoms().extract_xyz()) ### If it is rotamer OUTLIER if(rotamer_evaluator.evaluate_residue(residue)=="OUTLIER"): map_values = flex.double() for i in side_chain_sel: map_values.append(f_map.value_at_closest_grid_point(sites_frac[i])) valid_outlier = map_values.all_gt(1.0) return not valid_outlier ### mv = [] mv_orig = [] if(fdiff_map is not None): diff_mv = [] mv2 = flex.double() for v_ in vector: sf = sites_frac[v_] f_map_epi = f_map.eight_point_interpolation(sf) mv.append( f_map_epi/weights[v_]*bs[v_]) mv2.append( f_map_epi/weights[v_]) mv_orig.append(f_map_epi) if(fdiff_map is not None): diff_mv.append(fdiff_map.value_at_closest_grid_point(sf)) f = anal(mv) if(fdiff_map is not None): f2 = anal2(diff_mv) f3 = anal3(mv_orig) # main vs side chain mvbb = flex.double() mvbb_orig = flex.double() for mcs in main_chain_sel: sf = sites_frac[mcs] f_map_epi = f_map.eight_point_interpolation(sf) mvbb.append( f_map_epi/weights[mcs]) mvbb_orig.append(f_map_epi) f4 = flex.min(mvbb_orig)<small_f_map or flex.mean(mvbb)<flex.mean(mv2) c_id = "none" if(residue.parent() is not None): c_id = residue.parent().parent().id.strip() id_str = "%s_%s_%s"%(c_id, residue.resname.strip(), residue.resid().strip()) # result = False if(fdiff_map is not None): if((f or f2 and not f3) and not f4): result = True else: result = False else: if((f and not f3) and not f4): result = True else: result = False return result
def need_sidechain_fit(residue, rotamer_evaluator, mon_lib_srv, unit_cell, f_map, fdiff_map=None, small_f_map=0.9): """ Important: maps assumed to be sigma-scaled! """ get_class = iotbx.pdb.common_residue_names_get_class assert get_class(residue.resname) == "common_amino_acid" if (residue.resname.strip().upper() in ["ALA", "GLY"]): return False cl = mmtbx.refinement.real_space.aa_residue_axes_and_clusters( residue=residue, mon_lib_srv=mon_lib_srv, backbone_sample=False).clusters if (len(cl) == 0): return False # service functions def anal(x): for i, e in enumerate(x): if (e < 0): return True r = None if (i + 1 < len(x)): e1 = abs(x[i]) e2 = abs(x[i + 1]) if (e1 > e2): if (e2 != 0): r = e1 / e2 else: if (e1 != 0): r = e2 / e1 if (r is not None and r > 3): return True return False def anal2(x): for i, e in enumerate(x): if (e < -3.0): return True return False def anal3(x): return (flex.double(x) >= small_f_map).count(True) == len(x) # last = cl[0].vector[len(cl[0].vector) - 1] vector = flatten(cl[0].vector) bs = residue.atoms().extract_b() # weights = [] for el in residue.atoms().extract_element(): std_lbl = eltbx.xray_scattering.get_standard_label(label=el, exact=True, optional=True) weights.append(tiny_pse.table(std_lbl).weight()) # side_chain_sel = flex.size_t() main_chain_sel = flex.size_t() for i_seq, a in enumerate(list(residue.atoms())): if (a.name.strip().upper() not in ["N", "CA", "C", "O", "CB"]): side_chain_sel.append(i_seq) elif (a.name.strip().upper() in ["N", "CA", "C"]): main_chain_sel.append(i_seq) # sites_frac = unit_cell.fractionalize(residue.atoms().extract_xyz()) ### If it is rotamer OUTLIER if (rotamer_evaluator.evaluate_residue(residue) == "OUTLIER"): map_values = flex.double() for i in side_chain_sel: map_values.append(f_map.value_at_closest_grid_point(sites_frac[i])) valid_outlier = map_values.all_gt(1.0) return not valid_outlier ### mv = [] mv_orig = [] if (fdiff_map is not None): diff_mv = [] mv2 = flex.double() for v_ in vector: sf = sites_frac[v_] f_map_epi = f_map.eight_point_interpolation(sf) mv.append(f_map_epi / weights[v_] * bs[v_]) mv2.append(f_map_epi / weights[v_]) mv_orig.append(f_map_epi) if (fdiff_map is not None): diff_mv.append(fdiff_map.value_at_closest_grid_point(sf)) f = anal(mv) if (fdiff_map is not None): f2 = anal2(diff_mv) f3 = anal3(mv_orig) # main vs side chain mvbb = flex.double() mvbb_orig = flex.double() for mcs in main_chain_sel: sf = sites_frac[mcs] f_map_epi = f_map.eight_point_interpolation(sf) mvbb.append(f_map_epi / weights[mcs]) mvbb_orig.append(f_map_epi) f4 = flex.min(mvbb_orig) < small_f_map or flex.mean(mvbb) < flex.mean(mv2) c_id = "none" if (residue.parent() is not None): c_id = residue.parent().parent().id.strip() id_str = "%s_%s_%s" % (c_id, residue.resname.strip(), residue.resid().strip()) # result = False if (fdiff_map is not None): if ((f or f2 and not f3) and not f4): result = True else: result = False else: if ((f and not f3) and not f4): result = True else: result = False return result
def __init__(self, atomic_number, x, y, sigmas): adopt_init_args(self, locals()) self.element = tiny_pse.table(atomic_number).symbol()
def __init__(self, residue, mon_lib_srv, backbone_sample): self.clusters = [] atoms = residue.atoms() atoms_as_list = list(atoms) atom_names = atoms.extract_name() self.weights = flex.double() self.clash_eval_selection = flex.size_t() self.clash_eval_h_selection = flex.bool(len(atoms_as_list), False) self.rsr_eval_selection = flex.size_t() # Backbone sample backrub_axis = [] backrub_atoms_to_rotate = [] backrub_atoms_to_evaluate = [] counter = 0 # XXX DOES THIS RELY ON ORDER? for atom in atoms: an = atom.name.strip().upper() ae = atom.element.strip().upper() if(ae in ["H","D"]): self.clash_eval_h_selection[counter]=True if(an in ["N", "C"]): backrub_axis.append(counter) else: backrub_atoms_to_rotate.append(counter) if(an in ["CA", "O", "CB"]): backrub_atoms_to_evaluate.append(counter) if(not an in ["CA", "O", "CB", "C", "N", "HA", "H"]): self.clash_eval_selection.append(counter) if(not ae in ["H","D"]): self.rsr_eval_selection.append(counter) std_lbl = eltbx.xray_scattering.get_standard_label( label=ae, exact=True, optional=True) self.weights.append(tiny_pse.table(std_lbl).weight()) # counter += 1 # if(backbone_sample): if(len(backrub_axis)==2 and len(backrub_atoms_to_evaluate)>0): self.clusters.append(cluster( axis = flex.size_t(backrub_axis), atom_names = atom_names, atoms_to_rotate = flex.size_t(backrub_atoms_to_rotate), selection = flex.size_t(backrub_atoms_to_evaluate))) self.axes_and_atoms_aa_specific = \ rotatable_bonds.axes_and_atoms_aa_specific( residue = residue, mon_lib_srv = mon_lib_srv) if(self.axes_and_atoms_aa_specific is not None): for i_aa, aa in enumerate(self.axes_and_atoms_aa_specific): if(i_aa == len(self.axes_and_atoms_aa_specific)-1): selection = flex.size_t(aa[1]) else: selection = flex.size_t([aa[1][0]]) # Exclude pure H or D rotatable groups elements_to_rotate = flex.std_string() for etr in aa[1]: elements_to_rotate.append(atoms_as_list[etr].element.strip()) c_H = elements_to_rotate.count("H") c_D = elements_to_rotate.count("D") etr_sz = elements_to_rotate.size() if(c_H==etr_sz or c_D==etr_sz or c_H+c_D==etr_sz): continue # self.clusters.append(cluster( axis = flex.size_t(aa[0]), atom_names = atom_names, atoms_to_rotate = flex.size_t(aa[1]), selection = flex.size_t(selection))) vector_selections = [] if(len(self.clusters)>0): for i_aa, aa in enumerate(self.axes_and_atoms_aa_specific): for aa_ in aa[0]: if(not aa_ in vector_selections): vector_selections.append(aa_) vector_selections.append( self.clusters[len(self.clusters)-1].atoms_to_rotate) for cl in self.clusters: cl.vector = vector_selections
def generator(xray_structure, data_are_intensities=True, title=None, wavelength=None, temperature=None, full_matrix_least_squares_cycles=None, conjugate_gradient_least_squares_cycles=None, overall_scale_factor=None, weighting_scheme_params=None, sort_scatterers=True, unit_cell_dims=None, unit_cell_esds=None ): space_group = xray_structure.space_group() assert not space_group.is_centric() or space_group.is_origin_centric(),\ centric_implies_centrosymmetric_error_msg assert [full_matrix_least_squares_cycles, conjugate_gradient_least_squares_cycles].count(None) in (0, 1) if title is None: title = '????' if wavelength is None: wavelength = wavelengths.characteristic('Mo').as_angstrom() sgi = xray_structure.space_group_info() uc = xray_structure.unit_cell() yield 'TITL %s in %s\n' % (title, sgi.type().lookup_symbol()) if unit_cell_dims is None: unit_cell_dims = uc.parameters() yield 'CELL %.5f %s\n' % ( wavelength, ' '.join(('%.4f ',)*3 + ('%.3f',)*3) % unit_cell_dims) if unit_cell_esds: yield 'ZERR %i %f %f %f %f %f %f\n' % ((sgi.group().order_z(),) + unit_cell_esds) else: yield 'ZERR %i 0. 0. 0. 0. 0. 0.\n' % sgi.group().order_z() latt = 1 + 'PIRFABC'.find(sgi.group().conventional_centring_type_symbol()) if not space_group.is_origin_centric(): latt = -latt yield 'LATT %i\n' % latt for i in xrange(space_group.n_smx()): rt_mx = space_group(0, 0, i) if rt_mx.is_unit_mx(): continue yield 'SYMM %s\n' % rt_mx yield '\n' uc_content = xray_structure.unit_cell_content() for e in uc_content: uc_content[e] = "%.1f" % uc_content[e] sfac = [] unit = [] prior = ('C', 'H') for e in prior: if e in uc_content: sfac.append(e) unit.append(uc_content[e]) dsu = [ (tiny_pse.table(e).atomic_number(), e) for e in uc_content ] dsu.sort() sorted = [ item[-1] for item in dsu ] for e in sorted: if (e not in prior): sfac.append(e) unit.append(uc_content[e]) yield 'SFAC %s\n' % ' '.join(sfac) for e in sfac: yield 'DISP %s 0 0 0\n' % e yield 'UNIT %s\n' % ' '.join(unit) sf_idx = dict([ (e, i + 1) for i, e in enumerate(sfac) ]) yield '\n' if temperature: yield 'TEMP %.0f\n' % temperature if full_matrix_least_squares_cycles: yield 'L.S. %i\n' % full_matrix_least_squares_cycles if conjugate_gradient_least_squares_cycles: yield 'CGLS %i\n' % conjugate_gradient_least_squares_cycles yield '\n' if weighting_scheme_params is not None: if (isinstance(weighting_scheme_params, str)): yield 'WGHT %s\n' % weighting_scheme_params else: a, b = weighting_scheme_params if b is None: yield 'WGHT %.6f\n' % a else: yield 'WGHT %.6f %.6f\n' % (a, b) if overall_scale_factor is not None: yield 'FVAR %.8f\n' % overall_scale_factor fmt_tmpl = ('%-4s', '%2i') + ('%11.6f',)*3 + ('%11.5f',) fmt_iso = ' '.join(fmt_tmpl + ('%10.5f',)) fmt_aniso = ' '.join(fmt_tmpl + ('%.5f',)*2 + ('=\n ',) + ('%.5f',)*4) if sort_scatterers: dsu = [ (tiny_pse.table(sc.scattering_type).atomic_number(), sc) for sc in xray_structure.scatterers() ] dsu.sort(reverse=True) scatterers = flex.xray_scatterer([ item[-1] for item in dsu ]) else: scatterers = xray_structure.scatterers() atomname_set = set() for sc in scatterers: assert sc.fp == 0 # not implemented assert sc.fdp == 0 # not implemented assert sc.flags.use_u_iso() ^ sc.flags.use_u_aniso(),\ both_iso_and_aniso_in_use_error_msg atomname = sc.label.strip() assert len(atomname) != 0 assert len(atomname) <= 4 if (atomname in atomname_set): raise RuntimeError('Duplicate atom name: "%s"' % atomname) atomname_set.add(atomname) params = (atomname, sf_idx[sc.scattering_type]) + sc.site occ = sc.weight() if not sc.flags.grad_occupancy(): occ += 10 params += (occ, ) if sc.flags.use_u_iso(): yield fmt_iso % (params + (sc.u_iso,)) + "\n" else: u11, u22, u33, u12, u13, u23 = adptbx.u_star_as_u_cif(uc, sc.u_star) yield fmt_aniso % (params + (u11, u22, u33, u23, u13, u12)) + "\n" if data_are_intensities: hklf = 4 else: hklf = 3 yield 'HKLF %i\n' % hklf
def generator(xray_structure, data_are_intensities=True, title=None, wavelength=None, temperature=None, full_matrix_least_squares_cycles=None, conjugate_gradient_least_squares_cycles=None, overall_scale_factor=None, weighting_scheme_params=None, sort_scatterers=True, unit_cell_dims=None, unit_cell_esds=None): space_group = xray_structure.space_group() assert not space_group.is_centric() or space_group.is_origin_centric(),\ centric_implies_centrosymmetric_error_msg assert [ full_matrix_least_squares_cycles, conjugate_gradient_least_squares_cycles ].count(None) in (0, 1) if title is None: title = '????' if wavelength is None: wavelength = wavelengths.characteristic('Mo').as_angstrom() sgi = xray_structure.space_group_info() uc = xray_structure.unit_cell() yield 'TITL %s in %s\n' % (title, sgi.type().lookup_symbol()) if unit_cell_dims is None: unit_cell_dims = uc.parameters() yield 'CELL %.5f %s\n' % (wavelength, ' '.join(('%.4f ', ) * 3 + ('%.3f', ) * 3) % unit_cell_dims) if unit_cell_esds: yield 'ZERR %i %f %f %f %f %f %f\n' % ( (sgi.group().order_z(), ) + unit_cell_esds) else: yield 'ZERR %i 0. 0. 0. 0. 0. 0.\n' % sgi.group().order_z() latt = 1 + 'PIRFABC'.find(sgi.group().conventional_centring_type_symbol()) if not space_group.is_origin_centric(): latt = -latt yield 'LATT %i\n' % latt for i in xrange(space_group.n_smx()): rt_mx = space_group(0, 0, i) if rt_mx.is_unit_mx(): continue yield 'SYMM %s\n' % rt_mx yield '\n' uc_content = xray_structure.unit_cell_content() for e in uc_content: uc_content[e] = "%.1f" % uc_content[e] sfac = [] unit = [] prior = ('C', 'H') for e in prior: if e in uc_content: sfac.append(e) unit.append(uc_content[e]) dsu = [(tiny_pse.table(e).atomic_number(), e) for e in uc_content] dsu.sort() sorted = [item[-1] for item in dsu] for e in sorted: if (e not in prior): sfac.append(e) unit.append(uc_content[e]) yield 'SFAC %s\n' % ' '.join(sfac) for e in sfac: yield 'DISP %s 0 0 0\n' % e yield 'UNIT %s\n' % ' '.join(unit) sf_idx = dict([(e, i + 1) for i, e in enumerate(sfac)]) yield '\n' if temperature: yield 'TEMP %.0f\n' % temperature if full_matrix_least_squares_cycles: yield 'L.S. %i\n' % full_matrix_least_squares_cycles if conjugate_gradient_least_squares_cycles: yield 'CGLS %i\n' % conjugate_gradient_least_squares_cycles yield '\n' if weighting_scheme_params is not None: if (isinstance(weighting_scheme_params, str)): yield 'WGHT %s\n' % weighting_scheme_params else: a, b = weighting_scheme_params if b is None: yield 'WGHT %.6f\n' % a else: yield 'WGHT %.6f %.6f\n' % (a, b) if overall_scale_factor is not None: yield 'FVAR %.8f\n' % overall_scale_factor fmt_tmpl = ('%-4s', '%2i') + ('%11.6f', ) * 3 + ('%11.5f', ) fmt_iso = ' '.join(fmt_tmpl + ('%10.5f', )) fmt_aniso = ' '.join(fmt_tmpl + ('%.5f', ) * 2 + ('=\n ', ) + ('%.5f', ) * 4) if sort_scatterers: dsu = [(tiny_pse.table(sc.scattering_type).atomic_number(), sc) for sc in xray_structure.scatterers()] dsu.sort(reverse=True) scatterers = flex.xray_scatterer([item[-1] for item in dsu]) else: scatterers = xray_structure.scatterers() atomname_set = set() for sc in scatterers: assert sc.fp == 0 # not implemented assert sc.fdp == 0 # not implemented assert sc.flags.use_u_iso() ^ sc.flags.use_u_aniso(),\ both_iso_and_aniso_in_use_error_msg atomname = sc.label.strip() assert len(atomname) != 0 assert len(atomname) <= 4 if (atomname in atomname_set): raise RuntimeError('Duplicate atom name: "%s"' % atomname) atomname_set.add(atomname) params = (atomname, sf_idx[sc.scattering_type]) + sc.site occ = sc.weight() if not sc.flags.grad_occupancy(): occ += 10 params += (occ, ) if sc.flags.use_u_iso(): yield fmt_iso % (params + (sc.u_iso, )) + "\n" else: u11, u22, u33, u12, u13, u23 = adptbx.u_star_as_u_cif( uc, sc.u_star) yield fmt_aniso % (params + (u11, u22, u33, u23, u13, u12)) + "\n" if data_are_intensities: hklf = 4 else: hklf = 3 yield 'HKLF %i\n' % hklf
def run(gaussian_fit_pickle_file_names, itvc_file_name, kissel_dir): itvc_tab = None if (itvc_file_name is not None): itvc_tab = itvc_section61_io.read_table6111(itvc_file_name) fits = read_pickled_fits(gaussian_fit_pickle_file_names) #easy_pickle.dump("all_fits.pickle", fits) for k,v in fits.parameters.items(): print "# %s:" % k, v print max_errors = flex.double() labeled_fits = [] n_processed = 0 for label in expected_labels(kissel_dir): try: fit_group = fits.all[label] except Exception: print "# Warning: Missing scattering_type:", label else: print "scattering_type:", label prev_fit = None for fit in fit_group: if (prev_fit is not None): if (fit.stol > prev_fit.stol): print "# Warning: decreasing stol" elif (fit.stol == prev_fit.stol): if (fit.max_error < prev_fit.max_error): print "# Warning: same stol but previous has larger error" prev_fit = fit fit.sort().show() gaussian_fit = None if (itvc_tab is not None and label != "O2-"): entry = itvc_tab.entries[label] sel = international_tables_stols <= fit.stol + 1.e-6 gaussian_fit = scitbx.math.gaussian.fit( international_tables_stols.select(sel), entry.table_y.select(sel), entry.table_sigmas.select(sel), fit) elif (kissel_dir is not None): file_name = os.path.join(kissel_dir, "%02d_%s_rf" % ( tiny_pse.table(label).atomic_number(), label)) tab = kissel_io.read_table(file_name) sel = tab.itvc_sampling_selection() & (tab.x <= fit.stol + 1.e-6) gaussian_fit = scitbx.math.gaussian.fit( tab.x.select(sel), tab.y.select(sel), tab.sigmas.select(sel), fit) if (gaussian_fit is not None): max_errors.append( flex.max(gaussian_fit.significant_relative_errors())) labeled_fits.append(labeled_fit(label, gaussian_fit)) n_processed += 1 print if (n_processed != len(fits.all)): print "# Warning: %d fits were not processed." % ( len(fits.all) - n_processed) print if (max_errors.size() > 0): print "Summary:" perm = flex.sort_permutation(data=max_errors, reverse=True) max_errors = max_errors.select(perm) labeled_fits = flex.select(labeled_fits, perm) quick_summary = {} for me,lf in zip(max_errors, labeled_fits): print lf.label, "n_terms=%d max_error: %.4f" % ( lf.gaussian_fit.n_terms(), me) quick_summary[lf.label + "_" + str(lf.gaussian_fit.n_terms())] = me if (me > 0.01): fit = lf.gaussian_fit re = fit.significant_relative_errors() for s,y,a,r in zip(fit.table_x(),fit.table_y(),fit.fitted_values(),re): comment = "" if (r > 0.01): comment = " large error" print "%4.2f %7.4f %7.4f %7.4f %7.4f%s" % (s,y,a,a-y,r,comment) print print
assert xray_scattering.get_standard_label( label="SI1+", exact=True, optional=True) is None try: xray_scattering.get_standard_label(label="SI1+", exact=True, optional=False) except ValueError, e: assert str(e) == 'Unknown scattering type label: "SI1+"' else: raise Exception_expected # from cctbx.eltbx import tiny_pse for sl in std_labels: e, c = xray_scattering.get_element_and_charge_symbols( scattering_type=sl) assert e == "T" or tiny_pse.table(e, True).symbol() == e if (c != ""): assert len(c) == 2 assert "123456789".find(c[0]) >= 0 assert c[1] in ["+", "-"] def exercise_gaussian(): g = xray_scattering.gaussian(0) assert g.n_terms() == 0 assert approx_equal(g.c(), 0) assert g.use_c() assert g.n_parameters() == 1 g = xray_scattering.gaussian(0, False) assert g.n_terms() == 0 assert approx_equal(g.c(), 0)
assert xray_scattering.get_standard_label(label="o-") == "O1-" assert xray_scattering.get_standard_label(label="SI4+A") == "Si4+" assert xray_scattering.get_standard_label(label="SI1+") == "Si" assert xray_scattering.get_standard_label(label="SI1+", exact=True, optional=True) is None try: xray_scattering.get_standard_label(label="SI1+", exact=True, optional=False) except ValueError, e: assert str(e) == 'Unknown scattering type label: "SI1+"' else: raise Exception_expected # from cctbx.eltbx import tiny_pse for sl in std_labels: e, c = xray_scattering.get_element_and_charge_symbols(scattering_type=sl) assert e == "T" or tiny_pse.table(e, True).symbol() == e if (c != ""): assert len(c) == 2 assert "123456789".find(c[0]) >= 0 assert c[1] in ["+", "-"] def exercise_gaussian(): g = xray_scattering.gaussian(0) assert g.n_terms() == 0 assert approx_equal(g.c(), 0) assert g.use_c() assert g.n_parameters() == 1 g = xray_scattering.gaussian(0, False) assert g.n_terms() == 0 assert approx_equal(g.c(), 0) assert not g.use_c()
def run(gaussian_fit_pickle_file_names, itvc_file_name, kissel_dir): itvc_tab = None if (itvc_file_name is not None): itvc_tab = itvc_section61_io.read_table6111(itvc_file_name) fits = read_pickled_fits(gaussian_fit_pickle_file_names) #easy_pickle.dump("all_fits.pickle", fits) for k, v in fits.parameters.items(): print "# %s:" % k, v print max_errors = flex.double() labeled_fits = [] n_processed = 0 for label in expected_labels(kissel_dir): try: fit_group = fits.all[label] except Exception: print "# Warning: Missing scattering_type:", label else: print "scattering_type:", label prev_fit = None for fit in fit_group: if (prev_fit is not None): if (fit.stol > prev_fit.stol): print "# Warning: decreasing stol" elif (fit.stol == prev_fit.stol): if (fit.max_error < prev_fit.max_error): print "# Warning: same stol but previous has larger error" prev_fit = fit fit.sort().show() gaussian_fit = None if (itvc_tab is not None and label != "O2-"): entry = itvc_tab.entries[label] sel = international_tables_stols <= fit.stol + 1.e-6 gaussian_fit = scitbx.math.gaussian.fit( international_tables_stols.select(sel), entry.table_y.select(sel), entry.table_sigmas.select(sel), fit) elif (kissel_dir is not None): file_name = os.path.join( kissel_dir, "%02d_%s_rf" % (tiny_pse.table(label).atomic_number(), label)) tab = kissel_io.read_table(file_name) sel = tab.itvc_sampling_selection() & (tab.x <= fit.stol + 1.e-6) gaussian_fit = scitbx.math.gaussian.fit( tab.x.select(sel), tab.y.select(sel), tab.sigmas.select(sel), fit) if (gaussian_fit is not None): max_errors.append( flex.max(gaussian_fit.significant_relative_errors())) labeled_fits.append(labeled_fit(label, gaussian_fit)) n_processed += 1 print if (n_processed != len(fits.all)): print "# Warning: %d fits were not processed." % (len(fits.all) - n_processed) print if (max_errors.size() > 0): print "Summary:" perm = flex.sort_permutation(data=max_errors, reverse=True) max_errors = max_errors.select(perm) labeled_fits = flex.select(labeled_fits, perm) quick_summary = {} for me, lf in zip(max_errors, labeled_fits): print lf.label, "n_terms=%d max_error: %.4f" % ( lf.gaussian_fit.n_terms(), me) quick_summary[lf.label + "_" + str(lf.gaussian_fit.n_terms())] = me if (me > 0.01): fit = lf.gaussian_fit re = fit.significant_relative_errors() for s, y, a, r in zip(fit.table_x(), fit.table_y(), fit.fitted_values(), re): comment = "" if (r > 0.01): comment = " large error" print "%4.2f %7.4f %7.4f %7.4f %7.4f%s" % (s, y, a, a - y, r, comment) print print