def import_geometry(xds_inp=None, dials_json=None): assert (xds_inp, dials_json).count(None) == 1 geom_kwds = set([ "DIRECTION_OF_DETECTOR_X-AXIS", "DIRECTION_OF_DETECTOR_Y-AXIS", "DETECTOR_DISTANCE", "ORGX", "ORGY", "ROTATION_AXIS", # "X-RAY_WAVELENGTH", "INCIDENT_BEAM_DIRECTION", "SEGMENT", "DIRECTION_OF_SEGMENT_X-AXIS", "DIRECTION_OF_SEGMENT_Y-AXIS", "SEGMENT_DISTANCE", "SEGMENT_ORGX", "SEGMENT_ORGY" ]) # FIXME in case of multi-segment detector.. if xds_inp: inp = get_xdsinp_keyword(xds_inp) inp = filter(lambda x: x[0] in geom_kwds, inp) return map(lambda x: "%s= %s" % x, inp) elif dials_json: import dxtbx.imageset from dxtbx.serialize.load import _decode_dict from dxtbx.model import BeamFactory from dxtbx.model import DetectorFactory from dxtbx.model import GoniometerFactory from dxtbx.model import ScanFactory from dxtbx.serialize.xds import to_xds j = json.loads(open(dials_json).read(), object_hook=_decode_dict) # dummy sweep = dxtbx.imageset.ImageSetFactory.from_template( "####", image_range=[1, 1], check_format=False)[0] sweep.set_detector(DetectorFactory.from_dict(j["detector"][0])) sweep.set_beam(BeamFactory.from_dict(j["beam"][0])) sweep.set_goniometer(GoniometerFactory.from_dict(j["goniometer"][0])) sweep.set_scan( ScanFactory.make_scan(image_range=[1, 1], exposure_times=[1], oscillation=[1, 2], epochs=[0])) # dummy sio = cStringIO.StringIO() to_xds(sweep).XDS_INP(sio) inp = get_xdsinp_keyword(inp_str=sio.getvalue()) inp = filter(lambda x: x[0] in geom_kwds, inp) return map(lambda x: "%s= %s" % x, inp) return []
def set_info_from_xdsinp(self, xdsinp): # XXX x, y, z axes table = [ ("STARTING_FRAME", "starting_frame", lambda x: int(x)), ("STARTING_ANGLE", "starting_angle", lambda x: float(x)), ("OSCILLATION_RANGE", "osc_range", lambda x: float(x)), ("ROTATION_AXIS", "rotation_axis", lambda x: numpy.array(map(lambda y: float(y), x.split()))), ("X-RAY_WAVELENGTH", "wavelength", lambda x: float(x)), ("INCIDENT_BEAM_DIRECTION", "incident_beam", lambda x: numpy.array(map(lambda y: float(y), x.split()))), ("NX", "nx", lambda x: int(x)), ("NY", "ny", lambda x: int(x)), ("QX", "qx", lambda x: float(x)), ("QY", "qy", lambda x: float(x)), ("DETECTOR_DISTANCE", "distance", lambda x: float(x)), ("SPACE_GROUP_NUMBER", "spacegroup", lambda x: int(x)), ("UNIT_CELL_CONSTANTS", "unit_cell", lambda x: numpy.array(map(lambda y: float(y), x.split()))), ("UNIT_CELL_A-AXIS", "a_axis", lambda x: numpy.array(map(lambda y: float(y), x.split()))), ("UNIT_CELL_B-AXIS", "b_axis", lambda x: numpy.array(map(lambda y: float(y), x.split()))), ("UNIT_CELL_C-AXIS", "c_axis", lambda x: numpy.array(map(lambda y: float(y), x.split()))), ] inp = dict(get_xdsinp_keyword(xdsinp)) # I believe dict() removes duplicated parameters and keeps last. for k, at, f in table: if k in inp and inp[k].strip() != "": setattr(self, at, f(inp[k])) if "ORGX" in inp: self.origin[0] = float(inp["ORGX"]) if "ORGY" in inp: self.origin[1] = float(inp["ORGY"])
def run(xscale_inp): inp_dir = os.path.dirname(xscale_inp) files = map(lambda y: y[1].replace("*",""), filter(lambda x: x[0]=="INPUT_FILE", get_xdsinp_keyword(xscale_inp))) files = map(lambda x: os.path.join(inp_dir, x) if not os.path.isabs(x) else x, files) symms = map(lambda x: XDS_ASCII(x,read_data=False).symm, files) cells = numpy.array(map(lambda x: x.unit_cell().parameters(), symms)) sgs = map(lambda x: str(x.space_group_info()), symms) laues = map(lambda x: str(x.space_group().build_derived_reflection_intensity_group(False).info()), symms) median_cell = map(lambda i: numpy.median(cells[:,i]), xrange(6)) mean_cell = map(lambda i: cells[:,i].mean(), xrange(6)) cell_sd = map(lambda i: numpy.std(cells[:,i]), xrange(6)) print "%4d files loaded" % len(files) print "Space groups:", ", ".join(map(lambda x: "%s (%d files)"%(x,sgs.count(x)), set(sgs))) print " Laue groups:", ", ".join(map(lambda x: "%s (%d files)"%(x,laues.count(x)), set(laues))) print " Median cell:", " ".join(map(lambda x: "%7.3f"%x, median_cell)) print " Mean cell:", " ".join(map(lambda x: "%7.3f"%x, mean_cell)) print " SD:", " ".join(map(lambda x: "%7.1e"%x, cell_sd)) # for BLEND $CCP4/share/blend/R/blend0.R # names(macropar) <- c("cn","a","b","c","alpha","beta","gamma","mosa","ctoddist","wlength") ofs = open("forR_macropar.dat", "w") for i, cell in enumerate(cells): print >>ofs, "%4d" % (i+1), print >>ofs, " ".join(map(lambda x: "%7.3f"%x, cell)), print >>ofs, " 0 0 0" ofs.close() shutil.copyfile("forR_macropar.dat", "forR_macropar.dat.bak") print print "Run BLEND?" print "Rscript $CCP4/share/blend/R/blend0.R"
def set_info_from_xdsinp(self, xdsinp): # XXX x, y, z axes table = [("STARTING_FRAME", "starting_frame", lambda x: int(x)), ("STARTING_ANGLE", "starting_angle", lambda x: float(x)), ("OSCILLATION_RANGE", "osc_range", lambda x: float(x)), ("ROTATION_AXIS", "rotation_axis", lambda x: numpy.array(map(lambda y:float(y), x.split()))), ("X-RAY_WAVELENGTH", "wavelength", lambda x: float(x)), ("INCIDENT_BEAM_DIRECTION", "incident_beam", lambda x: numpy.array(map(lambda y:float(y), x.split()))), ("NX", "nx", lambda x: int(x)), ("NY", "ny", lambda x: int(x)), ("QX", "qx", lambda x: float(x)), ("QY", "qy", lambda x: float(x)), ("DETECTOR_DISTANCE", "distance", lambda x: float(x)), ("SPACE_GROUP_NUMBER", "spacegroup", lambda x: int(x)), ("UNIT_CELL_CONSTANTS", "unit_cell", lambda x: numpy.array(map(lambda y:float(y), x.split()))), ("UNIT_CELL_A-AXIS", "a_axis", lambda x: numpy.array(map(lambda y:float(y), x.split()))), ("UNIT_CELL_B-AXIS", "b_axis", lambda x: numpy.array(map(lambda y:float(y), x.split()))), ("UNIT_CELL_C-AXIS", "c_axis", lambda x: numpy.array(map(lambda y:float(y), x.split()))) ] inp = dict(get_xdsinp_keyword(xdsinp)) # I believe dict() removes duplicated parameters and keeps last. for k, at, f in table: if k in inp and inp[k].strip() != "": setattr(self, at, f(inp[k])) if "ORGX" in inp: self.origin[0] = float(inp["ORGX"]) if "ORGY" in inp: self.origin[1] = float(inp["ORGY"])
def set_xdsinp(self, xdsinp): inp = dict(get_xdsinp_keyword(xdsinp)) wavelength = float(inp["X-RAY_WAVELENGTH"]) orgx, orgy = map(float, (inp["ORGX"], inp["ORGY"])) qx = float(inp["QX"]) distance = abs(float(inp["DETECTOR_DISTANCE"])) self.calc_d = lambda x, y: wavelength/2./math.sin(0.5*math.atan(math.sqrt((x-orgx)**2+(y-orgy)**2)*qx/distance))
def set_info_from_xdsinp_or_inpstr(self, xdsinp=None, inpstr=None): assert (xdsinp,inpstr).count(None) == 1 t1 = lambda x: x.split()[0] # may have units that should be removed (if read from INTEGRATE.LP header) table = [("STARTING_FRAME", "starting_frame", lambda x: int(t1(x))), ("STARTING_ANGLE", "starting_angle", lambda x: float(t1(x))), ("OSCILLATION_RANGE", "osc_range", lambda x: float(t1(x))), ("ROTATION_AXIS", "rotation_axis", lambda x: numpy.array(map(lambda y:float(y), x.split()[:3]))), ("DIRECTION_OF_DETECTOR_X-AXIS", "X_axis", lambda x: numpy.array(map(lambda y:float(y), x.split()[:3]))), ("DIRECTION_OF_DETECTOR_Y-AXIS", "Y_axis", lambda x: numpy.array(map(lambda y:float(y), x.split()[:3]))), ("X-RAY_WAVELENGTH", "wavelength", lambda x: float(t1(x))), ("INCIDENT_BEAM_DIRECTION", "incident_beam", lambda x: numpy.array(map(lambda y:float(y), x.split()[:3]))), ("NX", "nx", lambda x: int(t1(x))), ("NY", "ny", lambda x: int(t1(x))), ("QX", "qx", lambda x: float(t1(x))), ("QY", "qy", lambda x: float(t1(x))), ("DETECTOR_DISTANCE", "distance", lambda x: float(t1(x))), ("SPACE_GROUP_NUMBER", "spacegroup", lambda x: int(t1(x))), ("UNIT_CELL_CONSTANTS", "unit_cell", lambda x: numpy.array(map(lambda y:float(y), x.split()[:6]))), ("UNIT_CELL_A-AXIS", "a_axis", lambda x: numpy.array(map(lambda y:float(y), x.split()[:3]))), ("UNIT_CELL_B-AXIS", "b_axis", lambda x: numpy.array(map(lambda y:float(y), x.split()[:3]))), ("UNIT_CELL_C-AXIS", "c_axis", lambda x: numpy.array(map(lambda y:float(y), x.split()[:3]))) ] inp_raw = get_xdsinp_keyword(xdsinp=xdsinp, inp_str=inpstr) inp = dict(inp_raw)# I believe dict() removes duplicated parameters and keeps last. for k, at, f in table: if k in inp and inp[k].strip() != "": setattr(self, at, f(inp[k])) if "ORGX" in inp: self.origin[0] = float(inp["ORGX"]) if "ORGY" in inp: self.origin[1] = float(inp["ORGY"]) if "DIRECTION_OF_DETECTOR_X-AXIS" in inp: self.Z_axis = numpy.cross(self.X_axis, self.Y_axis) self.Z_axis /= numpy.linalg.norm(self.Z_axis) # Segment for k, v in inp_raw: if k == "SEGMENT": sp = map(int, v.split()) self.segments.append(Segment()) self.segments[-1].x1 = sp[0] self.segments[-1].x2 = sp[1] self.segments[-1].y1 = sp[2] self.segments[-1].y2 = sp[3] elif k == "SEGMENT_ORGX": self.segments[-1].orgxs = float(v) elif k == "SEGMENT_ORGY": self.segments[-1].orgys = float(v) elif k == "SEGMENT_DISTANCE": self.segments[-1].fs = float(v) elif k == "DIRECTION_OF_SEGMENT_X-AXIS": self.segments[-1].eds_x = numpy.array(map(float, v.split())) elif k == "DIRECTION_OF_SEGMENT_Y-AXIS": self.segments[-1].eds_y = numpy.array(map(float, v.split()))
def set_xdsinp(self, xdsinp): inp = dict(get_xdsinp_keyword(xdsinp)) wavelength = float(inp["X-RAY_WAVELENGTH"]) orgx, orgy = map(float, (inp["ORGX"], inp["ORGY"])) qx = float(inp["QX"]) distance = abs(float(inp["DETECTOR_DISTANCE"])) # XXX no support for non-normal incident beam or multipanel detector self.calc_d = lambda x, y: wavelength/2./math.sin(0.5*math.atan(math.sqrt((x-orgx)**2+(y-orgy)**2)*qx/distance))
def run_xds_sequence(root, params): tmpdir = None if params.use_tmpdir_if_available: tmpdir = util.get_temp_local_dir("xdskamo", min_gb=2) # TODO guess required tempdir size if tmpdir is None: print "Can't get temp dir with sufficient size." # If tmpdir is not used if tmpdir is None: return xds_sequence(root, params) print "Using %s as temp dir.." % tmpdir # If tempdir is used for f in glob.glob(os.path.join(root, "*")): shutil.copy2(f, tmpdir) xdsinp = os.path.join(tmpdir, "XDS.INP") xdsinp_dict = dict(get_xdsinp_keyword(xdsinp)) # Make a link to data dir org_data_template = xdsinp_dict["NAME_TEMPLATE_OF_DATA_FRAMES"] ord_data_dir = os.path.dirname(org_data_template) if not os.path.isabs(ord_data_dir): ord_data_dir = os.path.join(root, ord_data_dir) datadir_lns = os.path.join(tmpdir, "data_loc") os.symlink(ord_data_dir, datadir_lns) # Modify XDS.INP modify_xdsinp(xdsinp, inp_params=[("NAME_TEMPLATE_OF_DATA_FRAMES", os.path.join("data_loc", os.path.basename(org_data_template)))]) try: ret = xds_sequence(tmpdir, params) finally: # Revert XDS.INP modify_xdsinp(xdsinp, inp_params=[("NAME_TEMPLATE_OF_DATA_FRAMES", org_data_template)]) # Remove link os.remove(datadir_lns) # Move to original directory for f in glob.glob(os.path.join(tmpdir, "*")): f_dest = os.path.join(root, os.path.relpath(f, tmpdir)) if os.path.isfile(f_dest): # Copy only if newer if os.stat(f).st_mtime > os.stat(f_dest).st_mtime: shutil.copy2(f, root) else: print "%s already exists and not modified. skip." % f os.remove(f) else: shutil.move(f, root) # Remove tmpdir shutil.rmtree(tmpdir) return ret
def set_xdsinp(self, xdsinp): inp = dict(get_xdsinp_keyword(xdsinp)) wavelength = float(inp["X-RAY_WAVELENGTH"]) orgx, orgy = map(float, (inp["ORGX"], inp["ORGY"])) qx = float(inp["QX"]) distance = abs(float(inp["DETECTOR_DISTANCE"])) # XXX no support for non-normal incident beam or multipanel detector self.calc_d = lambda x, y: wavelength / 2. / math.sin(0.5 * math.atan( math.sqrt((x - orgx)**2 + (y - orgy)**2) * qx / distance))
def run(xscale_inp): inp_dir = os.path.dirname(xscale_inp) files = map( lambda y: y[1].replace("*", ""), filter(lambda x: x[0] == "INPUT_FILE", get_xdsinp_keyword(xscale_inp))) files = map( lambda x: os.path.join(inp_dir, x) if not os.path.isabs(x) else x, files) symms = map(lambda x: XDS_ASCII(x, read_data=False).symm, files) cells = numpy.array(map(lambda x: x.unit_cell().parameters(), symms)) sgs = map(lambda x: str(x.space_group_info()), symms) laues = map( lambda x: str(x.space_group().build_derived_reflection_intensity_group( False).info()), symms) median_cell = map(lambda i: numpy.median(cells[:, i]), xrange(6)) mean_cell = map(lambda i: cells[:, i].mean(), xrange(6)) cell_sd = map(lambda i: numpy.std(cells[:, i]), xrange(6)) print "%4d files loaded" % len(files) print "Space groups:", ", ".join( map(lambda x: "%s (%d files)" % (x, sgs.count(x)), set(sgs))) print " Laue groups:", ", ".join( map(lambda x: "%s (%d files)" % (x, laues.count(x)), set(laues))) print " Median cell:", " ".join(map(lambda x: "%7.3f" % x, median_cell)) print " Mean cell:", " ".join(map(lambda x: "%7.3f" % x, mean_cell)) print " SD:", " ".join(map(lambda x: "%7.1e" % x, cell_sd)) # for BLEND $CCP4/share/blend/R/blend0.R # names(macropar) <- c("cn","a","b","c","alpha","beta","gamma","mosa","ctoddist","wlength") ofs = open("forR_macropar.dat", "w") for i, cell in enumerate(cells): print >> ofs, "%4d" % (i + 1), print >> ofs, " ".join(map(lambda x: "%7.3f" % x, cell)), print >> ofs, " 0 0 0" ofs.close() shutil.copyfile("forR_macropar.dat", "forR_macropar.dat.bak") print print "Run BLEND?" print "Rscript $CCP4/share/blend/R/blend0.R"
def read_geometry_using_dxtbx(img_file): import dxtbx.datablock import dxtbx.serialize.xds geom_kwds = set([ "DIRECTION_OF_DETECTOR_X-AXIS", "DIRECTION_OF_DETECTOR_Y-AXIS", "DETECTOR_DISTANCE", "ORGX", "ORGY", "ROTATION_AXIS", "X-RAY_WAVELENGTH", "DETECTOR", "MINIMUM_VALID_PIXEL_VALUE", "OVERLOAD", "SENSOR_THICKNESS", "NX", "NY", "QX", "QY", "STARTING_ANGLE", "OSCILLATION_RANGE", "FRACTION_OF_POLARIZATION", "POLARIZATION_PLANE_NORMAL", "INCIDENT_BEAM_DIRECTION", "SEGMENT", "DIRECTION_OF_SEGMENT_X-AXIS", "DIRECTION_OF_SEGMENT_Y-AXIS", "SEGMENT_DISTANCE", "SEGMENT_ORGX", "SEGMENT_ORGY" ]) datablocks = dxtbx.datablock.DataBlockFactory.from_filenames([img_file]) to_xds = dxtbx.serialize.xds.to_xds(datablocks[0].extract_sweeps()[0]) inp = get_xdsinp_keyword(inp_str=to_xds.XDS_INP()) inp = filter(lambda x: x[0] in geom_kwds, inp) return to_xds, map(lambda x: " %s= %s" % x, inp)
def run(params): xds_inp = os.path.join(params.xds_dir, "XDS.INP") spot_xds = os.path.join(params.xds_dir, "SPOT.XDS") spots = idxreflp.SpotXds( spot_xds).indexed_and_unindexed_by_frame_on_detector() inputs = get_xdsinp_keyword(xds_inp) filename_template = dict(inputs).get("NAME_TEMPLATE_OF_DATA_FRAMES", "") if filename_template == "": print "Error! Can't find filename from XDS.INP" return # Start adxv adxv = Adxv(params.adxv_bin) adxv.start(params.xds_dir) type_indexed = adxv.define_spot("blue") type_unindexed = adxv.define_spot("red") num = params.image while True: print "Showing image %d" % num img_file = dataset.template_to_filenames(filename_template, num, num)[0] adxv.open_image(img_file) uninds = map(lambda x: [x[0], x[1], type_unindexed], spots["unindexed"].get(num, [])) inds = map(lambda x: [x[0], x[1], type_indexed], spots["indexed"].get(num, [])) print "Showing %d Indexed spots (blue)" % len(inds) print "Showing %d Unindexed spots (red)" % len(uninds) adxv.load_spots(inds + uninds) time.sleep(1) # wait until adxv finishes process.. num = int(raw_input("Next image number?: "))
def run(params): xdsinp = "XDS.INP" kwds = dict(get_xdsinp_keyword(xdsinp)) orgx_org, orgy_org = map(float, (kwds["ORGX"], kwds["ORGY"])) dx, dy = params.dx, params.dy if params.unit == "mm": assert "QX" in kwds assert "QY" in kwds dx /= float(kwds["QX"]) dy /= float(kwds["QY"]) backup_needed = files.generated_by_IDXREF + ("XDS.INP",) bk_prefix = make_backup(backup_needed) try: results = [] for i in xrange(-params.nx, params.nx+1): for j in xrange(-params.ny, params.ny+1): work_name = "bs_x%+.2d_y%+.2d" % (i, j) orgx = orgx_org + i * dx orgy = orgy_org + j * dy print "Trying", orgx, orgy modify_xdsinp(xdsinp, inp_params=[("JOB", "IDXREF"), ("ORGX", orgx), ("ORGY", orgy), ]) call("xds") make_backup(backup_needed, work_name+"_") results.append([work_name, orgx, orgy]) for ret in results: print ret, analyze_result(ret[0]+"_IDXREF.LP") finally: revert_files(backup_needed, bk_prefix)
def run(params): xdsinp = "XDS.INP" kwds = dict(get_xdsinp_keyword(xdsinp)) orgx_org, orgy_org = map(float, (kwds["ORGX"], kwds["ORGY"])) dx, dy = params.dx, params.dy if params.unit == "mm": assert "QX" in kwds assert "QY" in kwds dx /= float(kwds["QX"]) dy /= float(kwds["QY"]) #backup_needed = files.generated_by_IDXREF + ("XDS.INP",) #bk_prefix = make_backup(backup_needed) orgxy_list = [] for i in xrange(-params.nx, params.nx+1): for j in xrange(-params.ny, params.ny+1): orgxy_list.append((orgx_org + i * dx, orgy_org + j * dy)) easy_mp.pool_map(fixed_func=lambda x: work(os.path.abspath(params.workdir), os.path.abspath(xdsinp), x), args=orgxy_list, processes=params.nproc)
def run(params): xds_inp = os.path.join(params.xds_dir, "XDS.INP") spot_xds = os.path.join(params.xds_dir, "SPOT.XDS") spots = idxreflp.SpotXds(spot_xds).indexed_and_unindexed_by_frame_on_detector() inputs = get_xdsinp_keyword(xds_inp) filename_template = dict(inputs).get("NAME_TEMPLATE_OF_DATA_FRAMES", "") if filename_template == "": print "Error! Can't find filename from XDS.INP" return # Start adxv adxv = Adxv(params.adxv_bin) adxv.start(params.xds_dir) type_indexed = adxv.define_spot("blue") type_unindexed = adxv.define_spot("red") num = params.image while True: print "Showing image %d" % num img_file = dataset.template_to_filenames(filename_template, num, num)[0] adxv.open_image(img_file) uninds = map(lambda x: [x[0], x[1], type_unindexed], spots["unindexed"].get(num, [])) inds = map(lambda x: [x[0], x[1], type_indexed], spots["indexed"].get(num, [])) print "Showing %d Indexed spots (blue)" % len(inds) print "Showing %d Unindexed spots (red)" % len(uninds) adxv.load_spots(inds+uninds) time.sleep(1) # wait until adxv finishes process.. num = int(raw_input("Next image number?: "))
def run(params): xdsinp = "XDS.INP" kwds = dict(get_xdsinp_keyword(xdsinp)) orgx_org, orgy_org = map(float, (kwds["ORGX"], kwds["ORGY"])) dx, dy = params.dx, params.dy if params.unit == "mm": assert "QX" in kwds assert "QY" in kwds dx /= float(kwds["QX"]) dy /= float(kwds["QY"]) #backup_needed = files.generated_by_IDXREF + ("XDS.INP",) #bk_prefix = make_backup(backup_needed) orgxy_list = [] for i in xrange(-params.nx, params.nx + 1): for j in xrange(-params.ny, params.ny + 1): orgxy_list.append((orgx_org + i * dx, orgy_org + j * dy)) easy_mp.pool_map(fixed_func=lambda x: work(os.path.abspath(params.workdir), os.path.abspath(xdsinp), x), args=orgxy_list, processes=params.nproc)
def xds_sequence(root, params): print print os.path.relpath(root, params.topdir) xparm = os.path.join(root, "XPARM.XDS") gxparm = os.path.join(root, "GXPARM.XDS") defpix_lp = os.path.join(root, "DEFPIX.LP") correct_lp = os.path.join(root, "CORRECT.LP") integrate_hkl = os.path.join(root, "INTEGRATE.HKL") xac_hkl = os.path.join(root, "XDS_ASCII.HKL") integrate_lp = os.path.join(root, "INTEGRATE.LP") spot_xds = os.path.join(root, "SPOT.XDS") xdsinp = os.path.join(root, "XDS.INP") assert os.path.isfile(xdsinp) xdsinp_dict = dict(get_xdsinp_keyword(xdsinp)) decilog = multi_out() decilog.register("log", open(os.path.join(root, "decision.log"), "a"), atexit_send_to=None) print >> decilog, "xds_sequence started at %s in %s\n" % ( time.strftime("%Y-%m-%d %H:%M:%S"), root) if params.show_progress: decilog.register("stdout", sys.stdout) if params.mode == "initial" and params.resume and os.path.isfile( correct_lp): print " Already processed." return if params.mode == "recycle" and not os.path.isfile(gxparm): print "GXPARM.XDS not found. Cannot do recycle." return if params.fast_delphi and (params.nproc is None or params.nproc > 1): delphi = optimal_delphi_by_nproc(xdsinp=xdsinp, nproc=params.nproc) print " Setting delphi to ", delphi modify_xdsinp(xdsinp, inp_params=[ ("DELPHI", str(delphi)), ]) if params.nproc is not None and params.nproc > 1: modify_xdsinp(xdsinp, inp_params=[ ("MAXIMUM_NUMBER_OF_PROCESSORS", str(params.nproc)), ]) if params.mode == "initial": # Peak search modify_xdsinp(xdsinp, inp_params=[("JOB", "XYCORR INIT COLSPOT")]) run_xds(wdir=root, show_progress=params.show_progress) if params.auto_frame_exclude_spot_based: sx = idxreflp.SpotXds(spot_xds) sx.set_xdsinp(xdsinp) spots = filter(lambda x: 5 < x[-1] < 30, sx.collected_spots()) # low-res (5 A) frame_numbers = numpy.array(map(lambda x: int(x[2]) + 1, spots)) data_range = map(int, xdsinp_dict["DATA_RANGE"].split()) # XXX this assumes SPOT_RANGE equals to DATA_RANGE. Is this guaranteed? h = numpy.histogram(frame_numbers, bins=numpy.arange(data_range[0], data_range[1] + 2, step=1)) q14 = numpy.percentile(h[0], [25, 75]) iqr = q14[1] - q14[0] cutoff = max(h[0][h[0] <= iqr * 1.5 + q14[1]]) / 5 # magic number print "DEBUG:: IQR= %.2f, Q1/4= %s, cutoff= %.2f" % (iqr, q14, cutoff) cut_frames = h[1][h[0] < cutoff] keep_frames = h[1][h[0] >= cutoff] print "DEBUG:: keep_frames=", keep_frames print "DEBUG:: cut_frames=", cut_frames if len(cut_frames) > 0: cut_ranges = [ [cut_frames[0], cut_frames[0]], ] for fn in cut_frames: if fn - cut_ranges[-1][1] <= 1: cut_ranges[-1][1] = fn else: cut_ranges.append([fn, fn]) # Edit XDS.INP cut_inp_str = "".join( map(lambda x: "EXCLUDE_DATA_RANGE= %6d %6d\n" % tuple(x), cut_ranges)) open(xdsinp, "a").write("\n" + cut_inp_str) # Edit SPOT.XDS shutil.copyfile(spot_xds, spot_xds + ".org") sx.write(open(spot_xds, "w"), frame_selection=set(keep_frames)) # Indexing modify_xdsinp(xdsinp, inp_params=[("JOB", "IDXREF")]) run_xds(wdir=root, show_progress=params.show_progress) print # indexing stats like indexed percentage here. if params.tryhard: try_indexing_hard(root, params.show_progress, decilog, known_sgnum=params.cell_prior.sgnum, known_cell=params.cell_prior.cell, tol_length=params.cell_prior.tol_length, tol_angle=params.cell_prior.tol_angle) if not os.path.isfile(xparm): print >> decilog, " Indexing failed." return if params.cell_prior.check and params.cell_prior.sgnum > 0: xsxds = XPARM(xparm).crystal_symmetry() xsref = crystal.symmetry(params.cell_prior.cell, params.cell_prior.sgnum) cosets = reindex.reindexing_operators(xsref, xsxds, params.cell_prior.tol_length, params.cell_prior.tol_angle) if cosets.double_cosets is None: print >> decilog, " Incompatible cell. Indexing failed." return elif params.mode == "recycle": print " Start recycle. original ISa= %.2f" % correctlp.get_ISa( correct_lp, check_valid=True) for f in xds_files.generated_after_DEFPIX + ("XPARM.XDS", "plot_integrate.log"): util.rotate_file(os.path.join(root, f), copy=True) shutil.copyfile(gxparm + ".1", xparm) else: raise "Unknown mode (%s)" % params.mode # To Integration modify_xdsinp(xdsinp, inp_params=[("JOB", "DEFPIX INTEGRATE"), ("INCLUDE_RESOLUTION_RANGE", "50 0")]) run_xds(wdir=root, show_progress=params.show_progress) if os.path.isfile(integrate_lp): xds_plot_integrate.run(integrate_lp, os.path.join(root, "plot_integrate.log")) if not os.path.isfile(integrate_hkl): print >> decilog, " Integration failed." return # Make _noscale.HKL if needed if params.no_scaling: bk_prefix = make_backup(("XDS.INP", ), wdir=root, quiet=True) xparm_obj = XPARM(xparm) modify_xdsinp(xdsinp, inp_params=[ ("JOB", "CORRECT"), ("CORRECTIONS", ""), ("NBATCH", "1"), ("MINIMUM_I/SIGMA", "50"), ("REFINE(CORRECT)", ""), ("UNIT_CELL_CONSTANTS", " ".join( map(lambda x: "%.3f" % x, xparm_obj.unit_cell))), ("SPACE_GROUP_NUMBER", "%d" % xparm_obj.spacegroup), ]) print >> decilog, " running CORRECT without empirical scaling" run_xds(wdir=root, show_progress=params.show_progress) for f in xds_files.generated_by_CORRECT + ("XDS.INP", ): ff = os.path.join(root, f) if not os.path.isfile(ff): continue if ff.endswith(".cbf"): os.remove(ff) else: os.rename(ff, ff + "_noscale") revert_files(("XDS.INP", ), bk_prefix, wdir=root, quiet=True) # Run pointless symm_by_integrate = None if params.use_pointless: worker = Pointless() result = worker.run_for_symm(xdsin=integrate_hkl, logout=os.path.join( root, "pointless_integrate.log")) if "symm" in result: symm = result["symm"] print >> decilog, " pointless using INTEGRATE.HKL suggested", symm.space_group_info( ) sgnum = symm.space_group_info().type().number() cell = " ".join( map(lambda x: "%.2f" % x, symm.unit_cell().parameters())) modify_xdsinp(xdsinp, inp_params=[("SPACE_GROUP_NUMBER", "%d" % sgnum), ("UNIT_CELL_CONSTANTS", cell)]) symm_by_integrate = symm else: print >> decilog, " pointless failed." # Do Scaling modify_xdsinp(xdsinp, inp_params=[ ("JOB", "CORRECT"), ]) run_xds(wdir=root, show_progress=params.show_progress) if not os.path.isfile(gxparm): print >> decilog, " Scaling failed." return print >> decilog, " OK. ISa= %.2f" % correctlp.get_ISa(correct_lp, check_valid=True) ret = calc_merging_stats(os.path.join(root, "XDS_ASCII.HKL")) if params.cut_resolution: if ret is not None and ret[0] is not None: d_min = ret[0] modify_xdsinp(xdsinp, inp_params=[("JOB", "CORRECT"), ("INCLUDE_RESOLUTION_RANGE", "50 %.2f" % d_min)]) print >> decilog, " Re-scale at %.2f A" % d_min os.rename(os.path.join(root, "CORRECT.LP"), os.path.join(root, "CORRECT_fullres.LP")) os.rename(os.path.join(root, "XDS_ASCII.HKL"), os.path.join(root, "XDS_ASCII_fullres.HKL")) run_xds(wdir=root, show_progress=params.show_progress) print >> decilog, " OK. ISa= %.2f" % correctlp.get_ISa( correct_lp, check_valid=True) print >> decilog, " (Original files are saved as *_fullres.*)" else: print >> decilog, "error: Can't decide resolution." last_ISa = correctlp.get_ISa(correct_lp, check_valid=True) # Run pointless and (if result is different from INTEGRATE) re-scale. if params.use_pointless: worker = Pointless() result = worker.run_for_symm(xdsin=xac_hkl, logout=os.path.join( root, "pointless_correct.log")) if "symm" in result: symm = result["symm"] need_rescale = False if symm_by_integrate is not None: if not xtal.is_same_laue_symmetry( symm_by_integrate.space_group(), symm.space_group()): print >> decilog, "pointless suggested %s, which is different Laue symmetry from INTEGRATE.HKL (%s)" % ( symm.space_group_info(), symm_by_integrate.space_group_info()) need_rescale = True else: print >> decilog, "pointless using XDS_ASCII.HKL suggested %s" % symm.space_group_info( ) need_rescale = True if need_rescale: # make backup, and do correct and compare ISa # if ISa got worse, revert the result. backup_needed = ("XDS.INP", "XDS_ASCII_fullres.HKL", "CORRECT_fullres.LP", "merging_stats.pkl", "merging_stats.log") backup_needed += xds_files.generated_by_CORRECT bk_prefix = make_backup(backup_needed, wdir=root, quiet=True) sgnum = symm.space_group_info().type().number() cell = " ".join( map(lambda x: "%.2f" % x, symm.unit_cell().parameters())) modify_xdsinp(xdsinp, inp_params=[("JOB", "CORRECT"), ("SPACE_GROUP_NUMBER", "%d" % sgnum), ("UNIT_CELL_CONSTANTS", cell), ("INCLUDE_RESOLUTION_RANGE", "50 0") ]) run_xds(wdir=root, show_progress=params.show_progress) ret = calc_merging_stats(os.path.join(root, "XDS_ASCII.HKL")) if params.cut_resolution: if ret is not None and ret[0] is not None: d_min = ret[0] modify_xdsinp(xdsinp, inp_params=[("JOB", "CORRECT"), ("INCLUDE_RESOLUTION_RANGE", "50 %.2f" % d_min)]) print >> decilog, " Re-scale at %.2f A" % d_min os.rename(os.path.join(root, "CORRECT.LP"), os.path.join(root, "CORRECT_fullres.LP")) os.rename(os.path.join(root, "XDS_ASCII.HKL"), os.path.join(root, "XDS_ASCII_fullres.HKL")) run_xds(wdir=root, show_progress=params.show_progress) print >> decilog, " OK. ISa= %.2f" % correctlp.get_ISa( correct_lp, check_valid=True) print >> decilog, " (Original files are saved as *_fullres.*)" else: print >> decilog, "error: Can't decide resolution." for f in ("CORRECT_fullres.LP", "XDS_ASCII_fullres.HKL"): if os.path.isfile(os.path.join(root, f)): print >> decilog, "removing", f os.remove(os.path.join(root, f)) ISa = correctlp.get_ISa(correct_lp, check_valid=True) if ISa >= last_ISa or last_ISa != last_ISa: # if improved or last_ISa is nan print >> decilog, "ISa improved= %.2f" % ISa remove_backups(backup_needed, bk_prefix, wdir=root) else: print >> decilog, "ISa got worse= %.2f" % ISa for f in backup_needed: if os.path.isfile(os.path.join(root, f)): os.remove(os.path.join(root, f)) revert_files(backup_needed, bk_prefix, wdir=root, quiet=True) run_xdsstat(wdir=root) print if params.make_report: html_report.make_individual_report(root, root) print >> decilog, "xds_sequence finished at %s\n" % time.strftime( "%Y-%m-%d %H:%M:%S") decilog.close()
def xds_sequence(root, params): print print os.path.relpath(root, params.topdir) init_lp = os.path.join(root, "INIT.LP") xparm = os.path.join(root, "XPARM.XDS") gxparm = os.path.join(root, "GXPARM.XDS") defpix_lp = os.path.join(root, "DEFPIX.LP") correct_lp = os.path.join(root, "CORRECT.LP") integrate_hkl = os.path.join(root, "INTEGRATE.HKL") xac_hkl = os.path.join(root, "XDS_ASCII.HKL") integrate_lp = os.path.join(root, "INTEGRATE.LP") spot_xds = os.path.join(root, "SPOT.XDS") xdsinp = os.path.join(root, "XDS.INP") assert os.path.isfile(xdsinp) if params.cell_prior.force: assert params.cell_prior.check xdsinp_dict = dict(get_xdsinp_keyword(xdsinp)) if params.cell_prior.sgnum > 0: xs_prior = crystal.symmetry(params.cell_prior.cell, params.cell_prior.sgnum) else: xs_prior = None decilog = multi_out() decilog.register("log", open(os.path.join(root, "decision.log"), "a"), atexit_send_to=None) try: print >> decilog, "xds_sequence started at %s in %s\n" % ( time.strftime("%Y-%m-%d %H:%M:%S"), root) if not kamo_test_installation.tst_xds(): print >> decilog, "XDS is not installed or expired!!" return if params.show_progress: decilog.register("stdout", sys.stdout) if params.mode == "initial" and params.resume and os.path.isfile( correct_lp): print >> decilog, " Already processed." return if params.mode == "recycle" and not os.path.isfile(gxparm): print >> decilog, "GXPARM.XDS not found. Cannot do recycle." return if params.fast_delphi and (params.nproc is None or params.nproc > 1): delphi = optimal_delphi_by_nproc(xdsinp=xdsinp, nproc=params.nproc) print >> decilog, " Setting delphi to ", delphi modify_xdsinp(xdsinp, inp_params=[ ("DELPHI", str(delphi)), ]) if params.nproc is not None and params.nproc > 1: modify_xdsinp(xdsinp, inp_params=[ ("MAXIMUM_NUMBER_OF_PROCESSORS", str(params.nproc)), ]) if params.mode == "initial": modify_xdsinp(xdsinp, inp_params=[("JOB", "XYCORR INIT")]) run_xds(wdir=root, show_progress=params.show_progress) initlp = InitLp(init_lp) first_bad = initlp.check_bad_first_frames() if first_bad: print >> decilog, " first frames look bad (too weak) exposure:", first_bad new_data_range = map( int, dict(get_xdsinp_keyword(xdsinp))["DATA_RANGE"].split()) new_data_range[0] = first_bad[-1] + 1 print >> decilog, " changing DATA_RANGE= to", new_data_range modify_xdsinp(xdsinp, inp_params=[("JOB", "INIT"), ("DATA_RANGE", "%d %d" % tuple(new_data_range))]) for f in xds_files.generated_by_INIT: util.rotate_file(os.path.join(root, f), copy=False) run_xds(wdir=root, show_progress=params.show_progress) # Peak search modify_xdsinp(xdsinp, inp_params=[("JOB", "COLSPOT")]) run_xds(wdir=root, show_progress=params.show_progress) if params.auto_frame_exclude_spot_based: sx = idxreflp.SpotXds(spot_xds) sx.set_xdsinp(xdsinp) spots = filter(lambda x: 5 < x[-1] < 30, sx.collected_spots()) # low-res (5 A) frame_numbers = numpy.array(map(lambda x: int(x[2]) + 1, spots)) data_range = map( int, dict(get_xdsinp_keyword(xdsinp))["DATA_RANGE"].split()) # XXX this assumes SPOT_RANGE equals to DATA_RANGE. Is this guaranteed? h = numpy.histogram(frame_numbers, bins=numpy.arange(data_range[0], data_range[1] + 2, step=1)) q14 = numpy.percentile(h[0], [25, 75]) iqr = q14[1] - q14[0] cutoff = max( h[0][h[0] <= iqr * 1.5 + q14[1]]) / 5 # magic number print >> decilog, "DEBUG:: IQR= %.2f, Q1/4= %s, cutoff= %.2f" % ( iqr, q14, cutoff) cut_frames = h[1][h[0] < cutoff] keep_frames = h[1][h[0] >= cutoff] print >> decilog, "DEBUG:: keep_frames=", keep_frames print >> decilog, "DEBUG:: cut_frames=", cut_frames if len(cut_frames) > 0: cut_ranges = [ [cut_frames[0], cut_frames[0]], ] for fn in cut_frames: if fn - cut_ranges[-1][1] <= 1: cut_ranges[-1][1] = fn else: cut_ranges.append([fn, fn]) # Edit XDS.INP cut_inp_str = "".join( map( lambda x: "EXCLUDE_DATA_RANGE= %6d %6d\n" % tuple( x), cut_ranges)) open(xdsinp, "a").write("\n" + cut_inp_str) # Edit SPOT.XDS shutil.copyfile(spot_xds, spot_xds + ".org") sx.write(open(spot_xds, "w"), frame_selection=set(keep_frames)) # Indexing if params.cell_prior.method == "use_first": modify_xdsinp(xdsinp, inp_params=[ ("JOB", "IDXREF"), ("UNIT_CELL_CONSTANTS", " ".join( map(lambda x: "%.3f" % x, params.cell_prior.cell))), ("SPACE_GROUP_NUMBER", "%d" % params.cell_prior.sgnum), ]) else: modify_xdsinp(xdsinp, inp_params=[("JOB", "IDXREF")]) run_xds(wdir=root, show_progress=params.show_progress) print >> decilog, "" # TODO indexing stats like indexed percentage here. if params.tryhard: try_indexing_hard(root, params.show_progress, decilog, known_sgnum=params.cell_prior.sgnum, known_cell=params.cell_prior.cell, tol_length=params.cell_prior.tol_length, tol_angle=params.cell_prior.tol_angle) if not os.path.isfile(xparm): print >> decilog, " Indexing failed." return if params.cell_prior.sgnum > 0: # Check anyway xsxds = XPARM(xparm).crystal_symmetry() cosets = reindex.reindexing_operators( xs_prior, xsxds, params.cell_prior.tol_length, params.cell_prior.tol_angle) if cosets.double_cosets is None: if params.cell_prior.check: print >> decilog, " Incompatible cell. Indexing failed." return else: print >> decilog, " Warning: Incompatible cell." elif params.cell_prior.method == "symm_constraint_only": cell = xsxds.unit_cell().change_basis( cosets.combined_cb_ops()[0]) print >> decilog, " Trying symmetry-constrained cell parameter:", cell modify_xdsinp(xdsinp, inp_params=[ ("JOB", "IDXREF"), ("UNIT_CELL_CONSTANTS", " ".join( map(lambda x: "%.3f" % x, cell.parameters()))), ("SPACE_GROUP_NUMBER", "%d" % params.cell_prior.sgnum), ]) for f in xds_files.generated_by_IDXREF: util.rotate_file(os.path.join(root, f), copy=(f == "SPOT.XDS")) run_xds(wdir=root, show_progress=params.show_progress) if not os.path.isfile(xparm): print >> decilog, " Indexing failed." return # Check again xsxds = XPARM(xparm).crystal_symmetry() if not xsxds.unit_cell().is_similar_to( xs_prior.unit_cell(), params.cell_prior.tol_length, params.cell_prior.tol_angle): print >> decilog, " Resulted in different cell. Indexing failed." return elif params.mode == "recycle": print >> decilog, " Start recycle. original ISa= %.2f" % correctlp.get_ISa( correct_lp, check_valid=True) for f in xds_files.generated_after_DEFPIX + ("XPARM.XDS", "plot_integrate.log"): util.rotate_file(os.path.join(root, f), copy=True) shutil.copyfile(gxparm + ".1", xparm) else: raise "Unknown mode (%s)" % params.mode # To Integration modify_xdsinp(xdsinp, inp_params=[("JOB", "DEFPIX INTEGRATE"), ("INCLUDE_RESOLUTION_RANGE", "50 0")]) run_xds(wdir=root, show_progress=params.show_progress) if os.path.isfile(integrate_lp): xds_plot_integrate.run(integrate_lp, os.path.join(root, "plot_integrate.log")) if not os.path.isfile(integrate_hkl): print >> decilog, " Integration failed." return # Make _noscale.HKL if needed if params.no_scaling: bk_prefix = make_backup(("XDS.INP", ), wdir=root, quiet=True) xparm_obj = XPARM(xparm) modify_xdsinp(xdsinp, inp_params=[ ("JOB", "CORRECT"), ("CORRECTIONS", ""), ("NBATCH", "1"), ("MINIMUM_I/SIGMA", "50"), ("REFINE(CORRECT)", ""), ("UNIT_CELL_CONSTANTS", " ".join( map(lambda x: "%.3f" % x, xparm_obj.unit_cell))), ("SPACE_GROUP_NUMBER", "%d" % xparm_obj.spacegroup), ]) print >> decilog, " running CORRECT without empirical scaling" run_xds(wdir=root, show_progress=params.show_progress) for f in xds_files.generated_by_CORRECT + ("XDS.INP", ): ff = os.path.join(root, f) if not os.path.isfile(ff): continue if ff.endswith(".cbf"): os.remove(ff) else: os.rename(ff, ff + "_noscale") revert_files(("XDS.INP", ), bk_prefix, wdir=root, quiet=True) # Run pointless pointless_integrate = {} if params.use_pointless: worker = Pointless() pointless_integrate = worker.run_for_symm( xdsin=integrate_hkl, logout=os.path.join(root, "pointless_integrate.log")) if "symm" in pointless_integrate: symm = pointless_integrate["symm"] print >> decilog, " pointless using INTEGRATE.HKL suggested", symm.space_group_info( ) if xs_prior: if xtal.is_same_space_group_ignoring_enantiomorph( symm.space_group(), xs_prior.space_group()): print >> decilog, " which is consistent with given symmetry." elif xtal.is_same_laue_symmetry(symm.space_group(), xs_prior.space_group()): print >> decilog, " which has consistent Laue symmetry with given symmetry." else: print >> decilog, " which is inconsistent with given symmetry." sgnum = symm.space_group_info().type().number() cell = " ".join( map(lambda x: "%.2f" % x, symm.unit_cell().parameters())) modify_xdsinp(xdsinp, inp_params=[("SPACE_GROUP_NUMBER", "%d" % sgnum), ("UNIT_CELL_CONSTANTS", cell)]) else: print >> decilog, " pointless failed." flag_do_not_change_symm = False if xs_prior and params.cell_prior.force: modify_xdsinp(xdsinp, inp_params=[("UNIT_CELL_CONSTANTS", " ".join( map(lambda x: "%.3f" % x, params.cell_prior.cell))), ("SPACE_GROUP_NUMBER", "%d" % params.cell_prior.sgnum)]) flag_do_not_change_symm = True elif params.cell_prior.method == "correct_only": xsxds = XPARM(xparm).crystal_symmetry() cosets = reindex.reindexing_operators(xs_prior, xsxds, params.cell_prior.tol_length, params.cell_prior.tol_angle) if cosets.double_cosets is not None: cell = xsxds.unit_cell().change_basis( cosets.combined_cb_ops()[0]) print >> decilog, " Using given symmetry in CORRECT with symmetry constraints:", cell modify_xdsinp(xdsinp, inp_params=[ ("UNIT_CELL_CONSTANTS", " ".join( map(lambda x: "%.3f" % x, cell.parameters()))), ("SPACE_GROUP_NUMBER", "%d" % params.cell_prior.sgnum), ]) flag_do_not_change_symm = True else: print >> decilog, " Tried to use given symmetry in CORRECT, but cell in integration is incompatible." # Do Scaling modify_xdsinp(xdsinp, inp_params=[ ("JOB", "CORRECT"), ]) run_xds(wdir=root, show_progress=params.show_progress) if not os.path.isfile(xac_hkl): print >> decilog, " CORRECT failed." return if not os.path.isfile(gxparm): print >> decilog, " Refinement in CORRECT failed." print >> decilog, " OK. ISa= %.2f" % correctlp.get_ISa( correct_lp, check_valid=True) ret = calc_merging_stats(xac_hkl) if params.cut_resolution: if ret is not None and ret[0] is not None: d_min = ret[0] modify_xdsinp(xdsinp, inp_params=[("JOB", "CORRECT"), ("INCLUDE_RESOLUTION_RANGE", "50 %.2f" % d_min)]) print >> decilog, " Re-scale at %.2f A" % d_min os.rename(os.path.join(root, "CORRECT.LP"), os.path.join(root, "CORRECT_fullres.LP")) os.rename(xac_hkl, os.path.join(root, "XDS_ASCII_fullres.HKL")) run_xds(wdir=root, show_progress=params.show_progress) print >> decilog, " OK. ISa= %.2f" % correctlp.get_ISa( correct_lp, check_valid=True) print >> decilog, " (Original files are saved as *_fullres.*)" else: print >> decilog, "error: Can't decide resolution." last_ISa = correctlp.get_ISa(correct_lp, check_valid=True) # Run pointless and (if result is different from INTEGRATE) re-scale. if params.use_pointless: worker = Pointless() pointless_correct = worker.run_for_symm( xdsin=xac_hkl, logout=os.path.join(root, "pointless_correct.log")) pointless_best_symm = None if "symm" in pointless_correct: symm = pointless_correct["symm"] need_rescale = False if pointless_integrate.get("symm"): symm_by_integrate = pointless_integrate["symm"] if not xtal.is_same_laue_symmetry( symm_by_integrate.space_group(), symm.space_group()): print >> decilog, "pointless suggested %s, which is different Laue symmetry from INTEGRATE.HKL (%s)" % ( symm.space_group_info(), symm_by_integrate.space_group_info()) prob_integrate = pointless_integrate.get( "laue_prob", float("nan")) prob_correct = pointless_correct.get( "laue_prob", float("nan")) print >> decilog, " Prob(%s |INTEGRATE), Prob(%s |CORRECT) = %.4f, %.4f." % ( symm_by_integrate.space_group_info(), symm.space_group_info(), prob_integrate, prob_correct) if prob_correct > prob_integrate: need_rescale = True pointless_best_symm = symm else: pointless_best_symm = symm_by_integrate else: need_rescale = True pointless_best_symm = symm print >> decilog, "pointless using XDS_ASCII.HKL suggested %s" % symm.space_group_info( ) if xs_prior: if xtal.is_same_space_group_ignoring_enantiomorph( symm.space_group(), xs_prior.space_group()): print >> decilog, " which is consistent with given symmetry." elif xtal.is_same_laue_symmetry( symm.space_group(), xs_prior.space_group()): print >> decilog, " which has consistent Laue symmetry with given symmetry." else: print >> decilog, " which is inconsistent with given symmetry." if need_rescale and not flag_do_not_change_symm: sgnum = symm.space_group_info().type().number() cell = " ".join( map(lambda x: "%.2f" % x, symm.unit_cell().parameters())) modify_xdsinp(xdsinp, inp_params=[ ("JOB", "CORRECT"), ("SPACE_GROUP_NUMBER", "%d" % sgnum), ("UNIT_CELL_CONSTANTS", cell), ("INCLUDE_RESOLUTION_RANGE", "50 0") ]) run_xds(wdir=root, show_progress=params.show_progress) ret = calc_merging_stats(xac_hkl) if params.cut_resolution: if ret is not None and ret[0] is not None: d_min = ret[0] modify_xdsinp(xdsinp, inp_params=[ ("JOB", "CORRECT"), ("INCLUDE_RESOLUTION_RANGE", "50 %.2f" % d_min) ]) print >> decilog, " Re-scale at %.2f A" % d_min os.rename(os.path.join(root, "CORRECT.LP"), os.path.join(root, "CORRECT_fullres.LP")) os.rename( xac_hkl, os.path.join(root, "XDS_ASCII_fullres.HKL")) run_xds(wdir=root, show_progress=params.show_progress) print >> decilog, " OK. ISa= %.2f" % correctlp.get_ISa( correct_lp, check_valid=True) print >> decilog, " (Original files are saved as *_fullres.*)" else: print >> decilog, "error: Can't decide resolution." for f in ("CORRECT_fullres.LP", "XDS_ASCII_fullres.HKL"): if os.path.isfile(os.path.join(root, f)): print >> decilog, "removing", f os.remove(os.path.join(root, f)) ISa = correctlp.get_ISa(correct_lp, check_valid=True) if ISa >= last_ISa or last_ISa != last_ISa: # if improved or last_ISa is nan print >> decilog, "ISa improved= %.2f" % ISa else: print >> decilog, "ISa got worse= %.2f" % ISa if pointless_best_symm: xac_symm = XDS_ASCII(xac_hkl, read_data=False).symm if not xtal.is_same_space_group_ignoring_enantiomorph( xac_symm.space_group(), pointless_best_symm.space_group()): if xtal.is_same_laue_symmetry( xac_symm.space_group(), pointless_best_symm.space_group()): tmp = "same Laue symmetry" else: tmp = "different Laue symmetry" print >> decilog, "WARNING: symmetry in scaling is different from Pointless result (%s)." % tmp run_xdsstat(wdir=root) print if params.make_report: html_report.make_individual_report(root, root) except: print >> decilog, traceback.format_exc() finally: print >> decilog, "\nxds_sequence finished at %s" % time.strftime( "%Y-%m-%d %H:%M:%S") decilog.close()
def total_deg_from_xds_inp(xdsinp): kwds = dict(get_xdsinp_keyword(xdsinp)) dr = map(int, kwds["DATA_RANGE"].split()) osc = float(kwds["OSCILLATION_RANGE"]) return (dr[1] - dr[0] + 1) * osc