def write_xml(self, file, command_line=''): fout = open(file, 'w') fout.write('<?xml version="1.0"?>') fout.write('<AutoProcContainer>\n') for crystal in sorted(self._crystals): xcrystal = self._crystals[crystal] cell = xcrystal.get_cell() spacegroup = xcrystal.get_likely_spacegroups()[0] fout.write('<AutoProc><spaceGroup>%s</spaceGroup>' % spacegroup) self.write_refined_cell(fout, cell) fout.write('</AutoProc>') fout.write('<AutoProcScalingContainer>') fout.write('<AutoProcScaling>') self.write_date(fout) fout.write('</AutoProcScaling>') statistics_all = xcrystal.get_statistics() reflection_files = xcrystal.get_scaled_merged_reflections() wavelength_names = xcrystal.get_wavelength_names() for key in statistics_all.keys(): pname, xname, dname = key # FIXME should assert that the dname is a # valid wavelength name available = statistics_all[key].keys() stats = [] keys = [ 'High resolution limit', 'Low resolution limit', 'Completeness', 'Multiplicity', 'I/sigma', 'Rmerge(I+/-)', 'CC half', 'Anomalous completeness', 'Anomalous correlation', 'Anomalous multiplicity', 'Total observations', 'Total unique', 'Rmeas(I)', 'Rmeas(I+/-)', 'Rpim(I)', 'Rpim(I+/-)', 'Partial Bias' ] for k in keys: if k in available: stats.append(k) xwavelength = xcrystal.get_xwavelength(dname) sweeps = xwavelength.get_sweeps() for j, name in enumerate(['overall', 'innerShell', 'outerShell']): statistics_cache = {} for s in stats: if isinstance(statistics_all[key][s], type([])): statistics_cache[s] = statistics_all[key][s][j] elif isinstance(statistics_all[key][s], type(())): statistics_cache[s] = statistics_all[key][s][j] # send these to be written out self.write_scaling_statistics(fout, name, statistics_cache) for sweep in sweeps: fout.write('<AutoProcIntegrationContainer>\n') if '#' in sweep.get_template(): image_name = sweep.get_image_name(0) else: image_name = os.path.join(sweep.get_directory(), sweep.get_template()) fout.write('<Image><fileName>%s</fileName>' % \ os.path.split(image_name)[-1]) fout.write('<fileLocation>%s</fileLocation></Image>' % sanitize(os.path.split(image_name)[0])) fout.write('<AutoProcIntegration>\n') cell = sweep.get_integrater_cell() self.write_cell(fout, cell) # FIXME this is naughty intgr = sweep._get_integrater() start, end = intgr.get_integrater_wedge() fout.write('<startImageNumber>%d</startImageNumber>' % \ start) fout.write('<endImageNumber>%d</endImageNumber>' % \ end) # FIXME this is naughty indxr = sweep._get_indexer() fout.write( '<refinedDetectorDistance>%f</refinedDetectorDistance>' % \ indxr.get_indexer_distance()) beam = indxr.get_indexer_beam_centre() fout.write('<refinedXBeam>%f</refinedXBeam>' % beam[0]) fout.write('<refinedYBeam>%f</refinedYBeam>' % beam[1]) fout.write('</AutoProcIntegration>\n') fout.write('</AutoProcIntegrationContainer>\n') fout.write('</AutoProcScalingContainer>') # file unpacking nonsense if not command_line: from xia2.Handlers.CommandLine import CommandLine command_line = CommandLine.get_command_line() fout.write('<AutoProcProgramContainer><AutoProcProgram>') fout.write('<processingCommandLine>%s</processingCommandLine>' \ % sanitize(command_line)) fout.write('<processingPrograms>xia2</processingPrograms>') fout.write('</AutoProcProgram>') from xia2.Handlers.Environment import Environment data_directory = Environment.generate_directory('DataFiles') log_directory = Environment.generate_directory('LogFiles') for k in reflection_files: reflection_file = reflection_files[k] if not isinstance(reflection_file, type('')): continue reflection_file = FileHandler.get_data_file(reflection_file) basename = os.path.basename(reflection_file) if os.path.isfile(os.path.join(data_directory, basename)): # Use file in DataFiles directory in preference (if it exists) reflection_file = os.path.join(data_directory, basename) fout.write('<AutoProcProgramAttachment><fileType>Result') fout.write('</fileType><fileName>%s</fileName>' % \ os.path.split(reflection_file)[-1]) fout.write('<filePath>%s</filePath>' % \ sanitize(os.path.split(reflection_file)[0])) fout.write('</AutoProcProgramAttachment>\n') import glob g = glob.glob(os.path.join(log_directory, '*merging-statistics.json')) for merging_stats_json in g: fout.write('<AutoProcProgramAttachment><fileType>Graph') fout.write('</fileType><fileName>%s</fileName>' % os.path.split(merging_stats_json)[-1]) fout.write('<filePath>%s</filePath>' % sanitize(log_directory)) fout.write('</AutoProcProgramAttachment>\n') # add the xia2.txt file... fout.write('<AutoProcProgramAttachment><fileType>Log') fout.write('</fileType><fileName>xia2.txt</fileName>') fout.write('<filePath>%s</filePath>' % sanitize(os.getcwd())) fout.write('</AutoProcProgramAttachment>\n') fout.write('</AutoProcProgramContainer>') fout.write('</AutoProcContainer>\n') fout.close()
def json_object(self, command_line=''): result = {} for crystal in sorted(self._crystals): xcrystal = self._crystals[crystal] cell = xcrystal.get_cell() spacegroup = xcrystal.get_likely_spacegroups()[0] result['AutoProc'] = {} tmp = result['AutoProc'] tmp['spaceGroup'] = spacegroup for name, value in zip(['a', 'b', 'c', 'alpha', 'beta', 'gamma'], cell): tmp['refinedCell_%s' % name] = value result['AutoProcScalingContainer'] = {} tmp = result['AutoProcScalingContainer'] tmp['AutoProcScaling'] = { 'recordTimeStamp': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) } statistics_all = xcrystal.get_statistics() reflection_files = xcrystal.get_scaled_merged_reflections() wavelength_names = xcrystal.get_wavelength_names() for key in statistics_all.keys(): pname, xname, dname = key # FIXME should assert that the dname is a # valid wavelength name available = statistics_all[key].keys() stats = [] keys = [ 'High resolution limit', 'Low resolution limit', 'Completeness', 'Multiplicity', 'I/sigma', 'Rmerge(I+/-)', 'CC half', 'Anomalous completeness', 'Anomalous correlation', 'Anomalous multiplicity', 'Total observations', 'Total unique', 'Rmeas(I)', 'Rmeas(I+/-)', 'Rpim(I)', 'Rpim(I+/-)', 'Partial Bias' ] for k in keys: if k in available: stats.append(k) xwavelength = xcrystal.get_xwavelength(dname) sweeps = xwavelength.get_sweeps() tmp['AutoProcScalingStatistics'] = [] tmp2 = tmp['AutoProcScalingStatistics'] for j, name in enumerate( ['overall', 'innerShell', 'outerShell']): statistics_cache = {'scalingStatisticsType':name} for s in stats: if s in self._name_map: n = self._name_map[s] else: continue if isinstance(statistics_all[key][s], type([])): statistics_cache[n] = statistics_all[key][s][j] elif isinstance(statistics_all[key][s], type(())): statistics_cache[n] = statistics_all[key][s][j] tmp2.append(statistics_cache) tmp['AutoProcIntegrationContainer'] = [] tmp2 = tmp['AutoProcIntegrationContainer'] for sweep in sweeps: if '#' in sweep.get_template(): image_name = sweep.get_image_name(0) else: image_name = os.path.join(sweep.get_directory(), sweep.get_template()) cell = sweep.get_integrater_cell() intgr_tmp = {} for name, value in zip(['a', 'b', 'c', 'alpha', 'beta', 'gamma'], cell): intgr_tmp['cell_%s' % name] = value # FIXME this is naughty indxr = sweep._get_indexer() intgr = sweep._get_integrater() start, end = intgr.get_integrater_wedge() intgr_tmp['startImageNumber'] = start intgr_tmp['endImageNumber'] = end intgr_tmp['refinedDetectorDistance'] = indxr.get_indexer_distance() beam = indxr.get_indexer_beam_centre() intgr_tmp['refinedXBeam'] = beam[0] intgr_tmp['refinedYBeam'] = beam[1] tmp2.append( {'Image':{'fileName':os.path.split(image_name)[-1], 'fileLocation':sanitize(os.path.split(image_name)[0])}, 'AutoProcIntegration': intgr_tmp}) # file unpacking nonsense result['AutoProcProgramContainer'] = {} tmp = result['AutoProcProgramContainer'] tmp2 = {} if not command_line: from xia2.Handlers.CommandLine import CommandLine command_line = CommandLine.get_command_line() tmp2['processingCommandLine'] = sanitize(command_line) tmp2['processingProgram'] = 'xia2' tmp['AutoProcProgram'] = tmp2 tmp['AutoProcProgramAttachment'] = [] tmp2 = tmp['AutoProcProgramAttachment'] from xia2.Handlers.Environment import Environment data_directory = Environment.generate_directory('DataFiles') for k in reflection_files: reflection_file = reflection_files[k] if not isinstance(reflection_file, type('')): continue reflection_file = FileHandler.get_data_file(reflection_file) basename = os.path.basename(reflection_file) if os.path.isfile(os.path.join(data_directory, basename)): # Use file in DataFiles directory in preference (if it exists) reflection_file = os.path.join(data_directory, basename) tmp2.append({ 'fileType': 'Result', 'fileName': os.path.split(reflection_file)[-1], 'filePath': sanitize(os.path.split(reflection_file)[0]), }) tmp2.append({'fileType':'Log', 'fileName':'xia2.txt', 'filePath':sanitize(os.getcwd())}) return result
def get_output(self): result = "Crystal: %s\n" % self._name if self._aa_sequence: result += "Sequence: %s\n" % self._aa_sequence.get_sequence() for wavelength in self._wavelengths.keys(): result += self._wavelengths[wavelength].get_output() scaler = self._get_scaler() if scaler.get_scaler_finish_done(): for wname, xwav in self._wavelengths.iteritems(): for xsweep in xwav.get_sweeps(): idxr = xsweep._get_indexer() if PhilIndex.params.xia2.settings.show_template: result += "%s\n" % banner( "Autoindexing %s (%s)" % (idxr.get_indexer_sweep_name(), idxr.get_template())) else: result += "%s\n" % banner( "Autoindexing %s" % idxr.get_indexer_sweep_name()) result += "%s\n" % idxr.show_indexer_solutions() intgr = xsweep._get_integrater() if PhilIndex.params.xia2.settings.show_template: result += "%s\n" % banner( "Integrating %s (%s)" % (intgr.get_integrater_sweep_name(), intgr.get_template())) else: result += "%s\n" % banner( "Integrating %s" % intgr.get_integrater_sweep_name()) result += "%s\n" % intgr.show_per_image_statistics() result += "%s\n" % banner("Scaling %s" % self.get_name()) for ( (dname, sname), (limit, suggested), ) in scaler.get_scaler_resolution_limits().iteritems(): if suggested is None or limit == suggested: result += "Resolution limit for %s/%s: %5.2f\n" % ( dname, sname, limit, ) else: result += ( "Resolution limit for %s/%s: %5.2f (%5.2f suggested)\n" % (dname, sname, limit, suggested)) # this is now deprecated - be explicit in what you are # asking for... reflections_all = self.get_scaled_merged_reflections() statistics_all = self._get_scaler().get_scaler_statistics() # print some of these statistics, perhaps? for key in statistics_all.keys(): result += format_statistics(statistics_all[key], caption="For %s/%s/%s" % key) # then print out some "derived" information based on the # scaling - this is presented through the Scaler interface # explicitly... cell = self._get_scaler().get_scaler_cell() cell_esd = self._get_scaler().get_scaler_cell_esd() spacegroups = self._get_scaler().get_scaler_likely_spacegroups() spacegroup = spacegroups[0] resolution = self._get_scaler().get_scaler_highest_resolution() from cctbx import sgtbx sg = sgtbx.space_group_type(str(spacegroup)) spacegroup = sg.lookup_symbol() CIF.set_spacegroup(sg) mmCIF.set_spacegroup(sg) if len(self._wavelengths) == 1: CIF.set_wavelengths( [w.get_wavelength() for w in self._wavelengths.itervalues()]) mmCIF.set_wavelengths( [w.get_wavelength() for w in self._wavelengths.itervalues()]) else: for wavelength in self._wavelengths.keys(): full_wave_name = "%s_%s_%s" % ( self._project._name, self._name, wavelength, ) CIF.get_block(full_wave_name)[ "_diffrn_radiation_wavelength"] = self._wavelengths[ wavelength].get_wavelength() mmCIF.get_block(full_wave_name)[ "_diffrn_radiation_wavelength"] = self._wavelengths[ wavelength].get_wavelength() CIF.set_wavelengths({ name: wave.get_wavelength() for name, wave in self._wavelengths.iteritems() }) mmCIF.set_wavelengths({ name: wave.get_wavelength() for name, wave in self._wavelengths.iteritems() }) result += "Assuming spacegroup: %s\n" % spacegroup if len(spacegroups) > 1: result += "Other likely alternatives are:\n" for sg in spacegroups[1:]: result += "%s\n" % sg if cell_esd: from libtbx.utils import format_float_with_standard_uncertainty def match_formatting(dimA, dimB): def conditional_split(s): return ((s[:s.index(".")], s[s.index("."):]) if "." in s else (s, "")) A, B = conditional_split(dimA), conditional_split(dimB) maxlen = (max(len(A[0]), len(B[0])), max(len(A[1]), len(B[1]))) return ( A[0].rjust(maxlen[0]) + A[1].ljust(maxlen[1]), B[0].rjust(maxlen[0]) + B[1].ljust(maxlen[1]), ) formatted_cell_esds = tuple( format_float_with_standard_uncertainty(v, sd) for v, sd in zip(cell, cell_esd)) formatted_rows = (formatted_cell_esds[0:3], formatted_cell_esds[3:6]) formatted_rows = zip(*(match_formatting(l, a) for l, a in zip(*formatted_rows))) result += "Unit cell (with estimated std devs):\n" result += "%s %s %s\n%s %s %s\n" % (formatted_rows[0] + formatted_rows[1]) else: result += "Unit cell:\n" result += "%7.3f %7.3f %7.3f\n%7.3f %7.3f %7.3f\n" % tuple(cell) # now, use this information and the sequence (if provided) # and also matthews_coef (should I be using this directly, here?) # to compute a likely number of molecules in the ASU and also # the solvent content... if self._aa_sequence: residues = self._aa_sequence.get_sequence() if residues: nres = len(residues) # first compute the number of molecules using the K&R # method nmol = compute_nmol( cell[0], cell[1], cell[2], cell[3], cell[4], cell[5], spacegroup, resolution, nres, ) # then compute the solvent fraction solvent = compute_solvent( cell[0], cell[1], cell[2], cell[3], cell[4], cell[5], spacegroup, nmol, nres, ) result += "Likely number of molecules in ASU: %d\n" % nmol result += "Giving solvent fraction: %4.2f\n" % solvent self._nmol = nmol if isinstance(reflections_all, type({})): for format in reflections_all.keys(): result += "%s format:\n" % format reflections = reflections_all[format] if isinstance(reflections, type({})): for wavelength in reflections.keys(): target = FileHandler.get_data_file( reflections[wavelength]) result += "Scaled reflections (%s): %s\n" % ( wavelength, target) else: target = FileHandler.get_data_file(reflections) result += "Scaled reflections: %s\n" % target CIF.write_cif() mmCIF.write_cif() return result
def json_object(self, command_line=""): result = {} for crystal in sorted(self._crystals): xcrystal = self._crystals[crystal] cell = xcrystal.get_cell() spacegroup = xcrystal.get_likely_spacegroups()[0] result["AutoProc"] = {} tmp = result["AutoProc"] tmp["spaceGroup"] = spacegroup for name, value in zip(["a", "b", "c", "alpha", "beta", "gamma"], cell): tmp["refinedCell_%s" % name] = value result["AutoProcScalingContainer"] = {} tmp = result["AutoProcScalingContainer"] tmp["AutoProcScaling"] = { "recordTimeStamp": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) } statistics_all = xcrystal.get_statistics() reflection_files = xcrystal.get_scaled_merged_reflections() for key in list(statistics_all.keys()): pname, xname, dname = key # FIXME should assert that the dname is a # valid wavelength name keys = [ "High resolution limit", "Low resolution limit", "Completeness", "Multiplicity", "I/sigma", "Rmerge(I+/-)", "CC half", "Anomalous completeness", "Anomalous correlation", "Anomalous multiplicity", "Total observations", "Total unique", "Rmeas(I)", "Rmeas(I+/-)", "Rpim(I)", "Rpim(I+/-)", "Partial Bias", ] stats = [k for k in keys if k in statistics_all[key]] xwavelength = xcrystal.get_xwavelength(dname) sweeps = xwavelength.get_sweeps() tmp["AutoProcScalingStatistics"] = [] tmp2 = tmp["AutoProcScalingStatistics"] for j, name in enumerate( ["overall", "innerShell", "outerShell"]): statistics_cache = {"scalingStatisticsType": name} for s in stats: if s in self._name_map: n = self._name_map[s] else: continue if isinstance(statistics_all[key][s], type([])): statistics_cache[n] = statistics_all[key][s][j] elif isinstance(statistics_all[key][s], type(())): statistics_cache[n] = statistics_all[key][s][j] tmp2.append(statistics_cache) tmp["AutoProcIntegrationContainer"] = [] tmp2 = tmp["AutoProcIntegrationContainer"] for sweep in sweeps: if "#" in sweep.get_template(): image_name = sweep.get_image_name(0) else: image_name = os.path.join(sweep.get_directory(), sweep.get_template()) cell = sweep.get_integrater_cell() intgr_tmp = {} for name, value in zip( ["a", "b", "c", "alpha", "beta", "gamma"], cell): intgr_tmp["cell_%s" % name] = value # FIXME this is naughty indxr = sweep._get_indexer() intgr = sweep._get_integrater() start, end = intgr.get_integrater_wedge() intgr_tmp["startImageNumber"] = start intgr_tmp["endImageNumber"] = end intgr_tmp[ "refinedDetectorDistance"] = indxr.get_indexer_distance( ) beam = indxr.get_indexer_beam_centre_raw_image() intgr_tmp["refinedXBeam"] = beam[0] intgr_tmp["refinedYBeam"] = beam[1] tmp2.append({ "Image": { "fileName": os.path.split(image_name)[-1], "fileLocation": sanitize(os.path.split(image_name)[0]), }, "AutoProcIntegration": intgr_tmp, }) # file unpacking nonsense result["AutoProcProgramContainer"] = {} tmp = result["AutoProcProgramContainer"] tmp2 = {} if not command_line: from xia2.Handlers.CommandLine import CommandLine command_line = CommandLine.get_command_line() tmp2["processingCommandLine"] = sanitize(command_line) tmp2["processingProgram"] = "xia2" tmp["AutoProcProgram"] = tmp2 tmp["AutoProcProgramAttachment"] = [] tmp2 = tmp["AutoProcProgramAttachment"] data_directory = self._project.path / "DataFiles" for k in reflection_files: reflection_file = reflection_files[k] if not isinstance(reflection_file, type("")): continue reflection_file = FileHandler.get_data_file( self._project.path, reflection_file) basename = os.path.basename(reflection_file) if data_directory.joinpath(basename).exists(): # Use file in DataFiles directory in preference (if it exists) reflection_file = str(data_directory.joinpath(basename)) tmp2.append({ "fileType": "Result", "fileName": os.path.split(reflection_file)[-1], "filePath": sanitize(os.path.split(reflection_file)[0]), }) tmp2.append({ "fileType": "Log", "fileName": "xia2.txt", "filePath": sanitize(os.getcwd()), }) return result
def write_xml(self, file, command_line="", working_phil=None): if working_phil is not None: PhilIndex.merge_phil(working_phil) params = PhilIndex.get_python_object() fout = open(file, "w") fout.write('<?xml version="1.0"?>') fout.write("<AutoProcContainer>\n") for crystal in sorted(self._crystals): xcrystal = self._crystals[crystal] cell = xcrystal.get_cell() spacegroup = xcrystal.get_likely_spacegroups()[0] fout.write("<AutoProc><spaceGroup>%s</spaceGroup>" % spacegroup) self.write_refined_cell(fout, cell) fout.write("</AutoProc>") fout.write("<AutoProcScalingContainer>") fout.write("<AutoProcScaling>") self.write_date(fout) fout.write("</AutoProcScaling>") statistics_all = xcrystal.get_statistics() reflection_files = xcrystal.get_scaled_merged_reflections() for key in statistics_all: pname, xname, dname = key # FIXME should assert that the dname is a # valid wavelength name keys = [ "High resolution limit", "Low resolution limit", "Completeness", "Multiplicity", "I/sigma", "Rmerge(I+/-)", "CC half", "Anomalous completeness", "Anomalous correlation", "Anomalous multiplicity", "Total observations", "Total unique", "Rmeas(I)", "Rmeas(I+/-)", "Rpim(I)", "Rpim(I+/-)", "Partial Bias", ] stats = [k for k in keys if k in statistics_all[key]] xwavelength = xcrystal.get_xwavelength(dname) sweeps = xwavelength.get_sweeps() for j, name in enumerate( ["overall", "innerShell", "outerShell"]): statistics_cache = {} for s in stats: if isinstance(statistics_all[key][s], type([])): statistics_cache[s] = statistics_all[key][s][j] elif isinstance(statistics_all[key][s], type(())): statistics_cache[s] = statistics_all[key][s][j] # send these to be written out self.write_scaling_statistics(fout, name, statistics_cache) for sweep in sweeps: fout.write("<AutoProcIntegrationContainer>\n") if "#" in sweep.get_template(): image_name = sweep.get_image_name(0) else: image_name = os.path.join(sweep.get_directory(), sweep.get_template()) fout.write("<Image><fileName>%s</fileName>" % os.path.split(image_name)[-1]) fout.write("<fileLocation>%s</fileLocation></Image>" % sanitize(os.path.split(image_name)[0])) fout.write("<AutoProcIntegration>\n") cell = sweep.get_integrater_cell() self.write_cell(fout, cell) # FIXME this is naughty intgr = sweep._get_integrater() start, end = intgr.get_integrater_wedge() fout.write("<startImageNumber>%d</startImageNumber>" % start) fout.write("<endImageNumber>%d</endImageNumber>" % end) # FIXME this is naughty indxr = sweep._get_indexer() fout.write( "<refinedDetectorDistance>%f</refinedDetectorDistance>" % indxr.get_indexer_distance()) beam = indxr.get_indexer_beam_centre_raw_image() fout.write("<refinedXBeam>%f</refinedXBeam>" % beam[0]) fout.write("<refinedYBeam>%f</refinedYBeam>" % beam[1]) fout.write("</AutoProcIntegration>\n") fout.write("</AutoProcIntegrationContainer>\n") fout.write("</AutoProcScalingContainer>") # file unpacking nonsense if not command_line: from xia2.Handlers.CommandLine import CommandLine command_line = CommandLine.get_command_line() pipeline = params.xia2.settings.pipeline fout.write("<AutoProcProgramContainer><AutoProcProgram>") fout.write("<processingCommandLine>%s</processingCommandLine>" % sanitize(command_line)) fout.write("<processingPrograms>xia2 %s</processingPrograms>" % pipeline) fout.write("</AutoProcProgram>") data_directory = self._project.path / "DataFiles" log_directory = self._project.path / "LogFiles" for k in reflection_files: reflection_file = reflection_files[k] if not isinstance(reflection_file, type("")): continue reflection_file = FileHandler.get_data_file( self._project.path, reflection_file) basename = os.path.basename(reflection_file) if data_directory.joinpath(basename).exists(): # Use file in DataFiles directory in preference (if it exists) reflection_file = str(data_directory.joinpath(basename)) fout.write("<AutoProcProgramAttachment><fileType>Result") fout.write("</fileType><fileName>%s</fileName>" % os.path.split(reflection_file)[-1]) fout.write("<filePath>%s</filePath>" % sanitize(os.path.split(reflection_file)[0])) fout.write("</AutoProcProgramAttachment>\n") g = log_directory.glob("*merging-statistics.json") for merging_stats_json in g: fout.write("<AutoProcProgramAttachment><fileType>Graph") fout.write("</fileType><fileName>%s</fileName>" % os.path.split(str(merging_stats_json))[-1]) fout.write("<filePath>%s</filePath>" % sanitize(str(log_directory))) fout.write("</AutoProcProgramAttachment>\n") # add the xia2.txt file... fout.write("<AutoProcProgramAttachment><fileType>Log") fout.write("</fileType><fileName>xia2.txt</fileName>") fout.write("<filePath>%s</filePath>" % sanitize(os.getcwd())) fout.write("</AutoProcProgramAttachment>\n") fout.write("</AutoProcProgramContainer>") fout.write("</AutoProcContainer>\n") fout.close()
def write_xml(self, file): fout = open(file, 'w') fout.write('<?xml version="1.0"?>') fout.write('<AutoProcContainer>\n') for crystal in sorted(self._crystals): xcrystal = self._crystals[crystal] cell = xcrystal.get_cell() spacegroup = xcrystal.get_likely_spacegroups()[0] fout.write('<AutoProc><spaceGroup>%s</spaceGroup>' % spacegroup) self.write_refined_cell(fout, cell) fout.write('</AutoProc>') fout.write('<AutoProcScalingContainer>') fout.write('<AutoProcScaling>') self.write_date(fout) fout.write('</AutoProcScaling>') statistics_all = xcrystal.get_statistics() reflection_files = xcrystal.get_scaled_merged_reflections() wavelength_names = xcrystal.get_wavelength_names() for key in statistics_all.keys(): pname, xname, dname = key # FIXME should assert that the dname is a # valid wavelength name available = statistics_all[key].keys() stats = [] keys = [ 'High resolution limit', 'Low resolution limit', 'Completeness', 'Multiplicity', 'I/sigma', 'Rmerge(I+/I-)', 'CC half', 'Anomalous completeness', 'Anomalous correlation', 'Anomalous multiplicity', 'Total observations', 'Total unique', 'Rmeas(I)', 'Rmeas(I+,-)', 'Rpim(I)', 'Rpim(I+/-)', 'Partial Bias' ] for k in keys: if k in available: stats.append(k) xwavelength = xcrystal.get_xwavelength(dname) sweeps = xwavelength.get_sweeps() for j, name in enumerate( ['overall', 'innerShell', 'outerShell']): statistics_cache = { } for s in stats: if type(statistics_all[key][s]) == type([]): statistics_cache[s] = statistics_all[key][s][j] elif type(statistics_all[key][s]) == type(()): statistics_cache[s] = statistics_all[key][s][j] # send these to be written out self.write_scaling_statistics(fout, name, statistics_cache) for sweep in sweeps: fout.write('<AutoProcIntegrationContainer>\n') image_name = sweep.get_all_image_names()[0] fout.write('<Image><fileName>%s</fileName>' % \ os.path.split(image_name)[-1]) fout.write('<fileLocation>%s</fileLocation></Image>' % sanitize(os.path.split(image_name)[0])) fout.write('<AutoProcIntegration>\n') cell = sweep.get_integrater_cell() self.write_cell(fout, cell) # FIXME this is naughty intgr = sweep._get_integrater() start, end = intgr.get_integrater_wedge() fout.write('<startImageNumber>%d</startImageNumber>' % \ start) fout.write('<endImageNumber>%d</endImageNumber>' % \ end) # FIXME this is naughty indxr = sweep._get_indexer() fout.write( '<refinedDetectorDistance>%f</refinedDetectorDistance>' % \ indxr.get_indexer_distance()) beam = indxr.get_indexer_beam_centre() fout.write('<refinedXBeam>%f</refinedXBeam>' % beam[0]) fout.write('<refinedYBeam>%f</refinedYBeam>' % beam[1]) fout.write('</AutoProcIntegration>\n') fout.write('</AutoProcIntegrationContainer>\n') fout.write('</AutoProcScalingContainer>') # file unpacking nonsense from xia2.Handlers.CommandLine import CommandLine fout.write('<AutoProcProgramContainer><AutoProcProgram>') fout.write('<processingCommandLine>%s</processingCommandLine>' \ % sanitize(CommandLine.get_command_line())) fout.write('<processingPrograms>xia2</processingPrograms>') fout.write('</AutoProcProgram>') for k in reflection_files: reflection_file = reflection_files[k] if not type(reflection_file) == type(''): continue reflection_file = FileHandler.get_data_file(reflection_file) fout.write( '<AutoProcProgramAttachment><fileType>Result') fout.write('</fileType><fileName>%s</fileName>' % \ os.path.split(reflection_file)[-1]) fout.write('<filePath>%s</filePath>' % \ sanitize(os.path.split(reflection_file)[0])) fout.write('</AutoProcProgramAttachment>\n') # add the xia2.txt file... fout.write('<AutoProcProgramAttachment><fileType>Log') fout.write('</fileType><fileName>xia2.txt</fileName>') fout.write('<filePath>%s</filePath>' % sanitize(os.getcwd())) fout.write('</AutoProcProgramAttachment>\n') fout.write('</AutoProcProgramContainer>') fout.write('</AutoProcContainer>\n') fout.close() return
def get_output(self): result = 'Crystal: %s\n' % self._name if self._aa_sequence: result += 'Sequence: %s\n' % self._aa_sequence.get_sequence() for wavelength in self._wavelengths.keys(): result += self._wavelengths[wavelength].get_output() scaler = self._get_scaler() if scaler.get_scaler_finish_done(): for wname, xwav in self._wavelengths.iteritems(): for xsweep in xwav.get_sweeps(): idxr = xsweep._get_indexer() if PhilIndex.params.xia2.settings.show_template: result += '%s\n' %banner('Autoindexing %s (%s)' %( idxr.get_indexer_sweep_name(), idxr.get_template())) else: result += '%s\n' %banner( 'Autoindexing %s' %idxr.get_indexer_sweep_name()) result += '%s\n' %idxr.show_indexer_solutions() intgr = xsweep._get_integrater() if PhilIndex.params.xia2.settings.show_template: result += '%s\n' %banner('Integrating %s (%s)' %( intgr.get_integrater_sweep_name(), intgr.get_template())) else: result += '%s\n' %banner( 'Integrating %s' %intgr.get_integrater_sweep_name()) result += '%s\n' % intgr.show_per_image_statistics() result += '%s\n' %banner('Scaling %s' %self.get_name()) for (dname, sname), (limit, suggested) in scaler.get_scaler_resolution_limits().iteritems(): if suggested is None or limit == suggested: result += 'Resolution limit for %s/%s: %5.2f\n' %(dname, sname, limit) else: result += 'Resolution limit for %s/%s: %5.2f (%5.2f suggested)\n' %(dname, sname, limit, suggested) # this is now deprecated - be explicit in what you are # asking for... reflections_all = self.get_scaled_merged_reflections() statistics_all = self._get_scaler().get_scaler_statistics() # print some of these statistics, perhaps? for key in statistics_all.keys(): result += format_statistics(statistics_all[key], caption='For %s/%s/%s' % key) # then print out some "derived" information based on the # scaling - this is presented through the Scaler interface # explicitly... cell = self._get_scaler().get_scaler_cell() cell_esd = self._get_scaler().get_scaler_cell_esd() spacegroups = self._get_scaler().get_scaler_likely_spacegroups() spacegroup = spacegroups[0] resolution = self._get_scaler().get_scaler_highest_resolution() from cctbx import sgtbx sg = sgtbx.space_group_type(str(spacegroup)) spacegroup = sg.lookup_symbol() CIF.set_spacegroup(sg) mmCIF.set_spacegroup(sg) if len(self._wavelengths) == 1: CIF.set_wavelengths([w.get_wavelength() for w in self._wavelengths.itervalues()]) mmCIF.set_wavelengths([w.get_wavelength() for w in self._wavelengths.itervalues()]) else: for wavelength in self._wavelengths.keys(): full_wave_name = '%s_%s_%s' % (self._project._name, self._name, wavelength) CIF.get_block(full_wave_name)['_diffrn_radiation_wavelength'] = \ self._wavelengths[wavelength].get_wavelength() mmCIF.get_block(full_wave_name)['_diffrn_radiation_wavelength'] = \ self._wavelengths[wavelength].get_wavelength() CIF.set_wavelengths({name: wave.get_wavelength() for name, wave in self._wavelengths.iteritems()}) mmCIF.set_wavelengths({name: wave.get_wavelength() for name, wave in self._wavelengths.iteritems()}) result += 'Assuming spacegroup: %s\n' % spacegroup if len(spacegroups) > 1: result += 'Other likely alternatives are:\n' for sg in spacegroups[1:]: result += '%s\n' % sg if cell_esd: from libtbx.utils import format_float_with_standard_uncertainty def match_formatting(dimA, dimB): def conditional_split(s): return (s[:s.index('.')],s[s.index('.'):]) if '.' in s else (s, '') A, B = conditional_split(dimA), conditional_split(dimB) maxlen = (max(len(A[0]), len(B[0])), max(len(A[1]), len(B[1]))) return ( A[0].rjust(maxlen[0])+A[1].ljust(maxlen[1]), B[0].rjust(maxlen[0])+B[1].ljust(maxlen[1]) ) formatted_cell_esds = tuple(format_float_with_standard_uncertainty(v, sd) for v, sd in zip(cell, cell_esd)) formatted_rows = (formatted_cell_esds[0:3], formatted_cell_esds[3:6]) formatted_rows = zip(*(match_formatting(l, a) for l, a in zip(*formatted_rows))) result += 'Unit cell (with estimated std devs):\n' result += '%s %s %s\n%s %s %s\n' % (formatted_rows[0] + formatted_rows[1]) else: result += 'Unit cell:\n' result += '%7.3f %7.3f %7.3f\n%7.3f %7.3f %7.3f\n' % tuple(cell) # now, use this information and the sequence (if provided) # and also matthews_coef (should I be using this directly, here?) # to compute a likely number of molecules in the ASU and also # the solvent content... if self._aa_sequence: residues = self._aa_sequence.get_sequence() if residues: nres = len(residues) # first compute the number of molecules using the K&R # method nmol = compute_nmol(cell[0], cell[1], cell[2], cell[3], cell[4], cell[5], spacegroup, resolution, nres) # then compute the solvent fraction solvent = compute_solvent(cell[0], cell[1], cell[2], cell[3], cell[4], cell[5], spacegroup, nmol, nres) result += 'Likely number of molecules in ASU: %d\n' % nmol result += 'Giving solvent fraction: %4.2f\n' % solvent self._nmol = nmol if type(reflections_all) == type({}): for format in reflections_all.keys(): result += '%s format:\n' % format reflections = reflections_all[format] if type(reflections) == type({}): for wavelength in reflections.keys(): target = FileHandler.get_data_file( reflections[wavelength]) result += 'Scaled reflections (%s): %s\n' % \ (wavelength, target) else: target = FileHandler.get_data_file( reflections) result += 'Scaled reflections: %s\n' % target CIF.write_cif() mmCIF.write_cif() return result