def __init__(self, path, json=None, pickle_ext=None, json_ext=None): if json is None: json = find_json(path, pickle_ext, json_ext) if json is None: importer = Importer([path], read_experiments=False, read_reflections=True, check_format=False) print "unable to find experiment list" self.experiments = None else: importer = Importer([path, json], read_experiments=True, read_reflections=True, check_format=False) try: self.experiments = flatten_experiments(importer.experiments)[0] except IndexError: print "unable to read experiment list" self.experiments = None try: self.reflections = flatten_reflections(importer.reflections)[0] except IndexError: print "unable to read reflection table" self.reflections = None
def __init__(self, files): from dials.util.options import Importer, flatten_reflections, flatten_experiments, flatten_datablocks importer = Importer(files, read_experiments=True, read_datablocks=True, read_reflections=True, check_format=False) if importer.unhandled: print "Unable to handle one or more files:", importer.unhandled return reflections = flatten_reflections(importer.reflections) assert len( reflections ) == 1, "Implemented only for one reflection table at a time presently" datablock = None experiment = None if importer.experiments: experiments = flatten_experiments(importer.experiments) assert len( experiments ) == 1, "Implemented only for one experiment at a time presently" experiment = experiments[0] if importer.datablocks: datablocks = flatten_datablocks(importer.datablocks) assert len( datablocks ) == 1, "Implemented only for one datablock at a time presently" datablock = datablocks[0] super(ReflectionsRadialLengthsFromFiles, self).__init__(reflections[0], datablock=datablock, experiment=experiment)
def construct_frames_from_files(refl_name, json_name, outname=None, outdir=None): importer = Importer([refl_name, json_name], read_experiments=True, read_reflections=True, check_format=False) if importer.unhandled: print "unable to process:", importer.unhandled reflections_l = flatten_reflections(importer.reflections)[0] experiments_l = flatten_experiments(importer.experiments) frames = [] if outdir is None: outdir = '.' if outname is None: outname = 'int-%d' + refl_name.split( '.pickle')[0] + '_extracted.pickle' elif '%' not in outname: outname = outname.split(".pickle")[0] + ("_%d.pickle") for i in xrange(len(experiments_l)): refl = reflections_l.select(reflections_l['id'] == i) if len(refl) == 0: continue expt = experiments_l[i] frame = ConstructFrame(refl, expt).make_frame() name = outname % i easy_pickle.dump(os.path.join(outdir, name), frame)
def __init__(self): from dials.util.options import Importer, flatten_experiments, flatten_reflections import libtbx.load_env try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError: print 'SKIP: dials_regression not configured' exit(0) # test data import os refl_path = os.path.join(dials_regression, 'integration_test_data', 'stills_PSII', 'idx-20161021225550223_integrated.pickle') expt_path = os.path.join(dials_regression, 'integration_test_data', 'stills_PSII', 'idx-20161021225550223_refined_experiments.json') importer = Importer([refl_path, refl_path, expt_path, expt_path], read_experiments=True, read_reflections=True, check_format=False) self.reflections = flatten_reflections(importer.reflections) self.experiments = flatten_experiments(importer.experiments) from dials.algorithms.integration.overlaps_filter import OverlapsFilterMultiExpt overlaps_filter = OverlapsFilterMultiExpt(self.reflections[0], self.experiments) overlaps_filter.remove_foreground_foreground_overlaps() overlaps_filter.remove_foreground_background_overlaps() self.reflections = [overlaps_filter.refl]
def __init__(self, expts_path=None, **kwargs): from dials.util.options import Importer, flatten_experiments importer = Importer([expts_path], read_experiments=True, read_reflections=False, check_format=False) if importer.unhandled: # in python 2: raise Exception("unable to process:"), importer.unhandled raise Exception("unable to process:") experiments_l = flatten_experiments(importer.experiments) assert len( experiments_l ) == 1, "Sorry, only supports one experiment per json at present." tcrystal = experiments_l[0].crystal from cctbx import crystal group = tcrystal.get_space_group() self.crystal_symmetry = crystal.symmetry( unit_cell=tcrystal.get_unit_cell(), space_group=group) self.crystal_symmetry.show_summary() self.niggli_cell = self.crystal_symmetry.niggli_cell() self.niggli_cell.show_summary(prefix=" niggli-->") self.uc = self.niggli_cell.unit_cell().parameters() self.mm = self.niggli_cell.unit_cell().metrical_matrix() self.pg = "".join(group.type().lookup_symbol().split()) self.path = expts_path
def __init__(self, pickle_name, json_name): # load the integration.pickle file (reflection table) into memory and # load the experiments.json file (json) into memory, piecewise. # check_format=False because we don't wont to load any imagesets in the # experiement list importer = Importer([pickle_name, json_name], read_experiments=True, read_reflections=True, check_format=False) if importer.unhandled: print "unable to process:", importer.unhandled ConstructFrame.__init__(self, flatten_reflections(importer.reflections)[0], flatten_experiments(importer.experiments)[0])
def __init__(self, refl_name, json_name, outname=None): # load the integration.pickle file (reflection table) into memory and # load the experiments.json file (json) into memory, piecewise. # check_format=False because we don't wont to load any imagesets in the # experiement list importer = Importer([refl_name, json_name], read_experiments=True, read_reflections=True, check_format=False) if importer.unhandled: print "unable to process:", importer.unhandled reflections_l = flatten_reflections(importer.reflections) experiments_l = flatten_experiments(importer.experiments) assert len(experiments_l) == 1, "cannot construct a single frame from multiple experiments" frame = ConstructFrame.__init__(self, reflections_l[0], experiments_l[0]) if frame is not None: self.frame.make_frame()
master_phil_scope = iotbx.phil.parse(""" pickle_name = None .type = path .help = path to a reflection table (integrated.pickle) file json_name = None .type = path .help = path to an experiments.json file output_dir = None .type = path .help = if set, path to directory to save the new pickle file """) parser = OptionParser(phil=master_phil_scope) params, options = parser.parse_args(show_diff_phil=True) #get scan range number importer = Importer([params.pickle_name, params.json_name], read_experiments=True, read_reflections=True, check_format=False) if importer.unhandled: print("unable to process:", importer.unhandled) experiment = flatten_experiments(importer.experiments)[0] scan = experiment.scan for scan_no in range(scan.get_image_range()[0], scan.get_image_range()[1]): #build each frame frame = ConstructFrameFromFiles(params.pickle_name, params.json_name, scan_no).make_frame() if not params.output_dir is None: assert os.path.isdir(params.output_dir) basename = os.path.basename(params.pickle_name) name = os.path.splitext(basename)[0] + "_extracted_" + str( scan_no) + ".pickle" dest_path = os.path.join(params.output_dir, name)
def test_for_overlaps(dials_regression): from cctbx.array_family import flex from dials.algorithms.shoebox import MaskCode code_fgd = MaskCode.Foreground | MaskCode.Valid def is_fgd(code): return (code & code_fgd) == code_fgd code_bgd = MaskCode.Background | MaskCode.Valid code_overlap = code_fgd | code_bgd def is_overlap(code): return (code & code_overlap) == code_overlap from dials.util.options import Importer, flatten_experiments, flatten_reflections # test data refl_path = os.path.join( dials_regression, "integration_test_data", "stills_PSII", "idx-20161021225550223_integrated.pickle", ) expt_path = os.path.join( dials_regression, "integration_test_data", "stills_PSII", "idx-20161021225550223_refined_experiments.json", ) importer = Importer( [refl_path, refl_path, expt_path, expt_path], read_experiments=True, read_reflections=True, check_format=False, ) reflections = flatten_reflections(importer.reflections) experiments = flatten_experiments(importer.experiments) from dials.algorithms.integration.overlaps_filter import OverlapsFilterMultiExpt overlaps_filter = OverlapsFilterMultiExpt(reflections[0], experiments) overlaps_filter.remove_foreground_foreground_overlaps() overlaps_filter.remove_foreground_background_overlaps() reflections = [overlaps_filter.refl] for expt, refl in zip(experiments, reflections): det = expt.detector size_fast, size_slow = det[0].get_image_size() mask_array = flex.size_t(size_fast * size_slow) for obs in refl.rows(): shoebox = obs["shoebox"] fast_coords = range(shoebox.xsize()) slow_coords = range(shoebox.ysize()) for f, s in zip(fast_coords, slow_coords): f_abs = f + shoebox.bbox[0] # relative to detector s_abs = s + shoebox.bbox[2] # relative to detector posn = f_abs + s_abs * size_fast # position in mask_array posn_in_shoebox = f + shoebox.xsize() * s # position in shoebox assert not ( is_fgd(shoebox.mask[posn_in_shoebox]) and is_fgd(mask_array[posn]) ), "Overlapping foreground found at indexed position (%d, %d), " % ( f_abs, s_abs, ) + "observed centroid (%d, %d)" % ( obs["xyzcal.px"][0], obs["xyzcal.px"][1], ) try: mask_array[posn] |= shoebox.mask[posn_in_shoebox] except IndexError: continue for i, this_code in enumerate(mask_array): assert not is_overlap( this_code ), "Overlapping foreground and background found at (%d, %d)" % ( i % shoebox.xsize(), i // shoebox.xsize(), )
def get_data_from_dials(params, files): from dials.util.options import Importer, flatten_reflections, flatten_experiments importer = Importer(files, read_experiments=True, read_reflections=True) reflections = flatten_reflections(importer.reflections) experiments = flatten_experiments(importer.experiments) assert len(reflections) == len(experiments) == 1 xs = crystal.symmetry(experiments[0].crystal.get_unit_cell(), space_group=experiments[0].crystal.get_space_group()) tmp = reflections[0].select(reflections[0]["id"] >= 0) assert max(tmp["id"]) == 0 if params.dials_data == "sum": assert "intensity.sum.value" in tmp assert "intensity.sum.variance" in tmp else: assert "intensity.prf.value" in tmp assert "intensity.prf.variance" in tmp intensity_key = "intensity.sum.value" if params.dials_data == "sum" else "intensity.prf.value" variance_key = "intensity.sum.variance" if params.dials_data == "sum" else "intensity.prf.variance" sel_remove = flex.bool(tmp.size(), False) if params.min_peak is not None: sel = tmp["partiality"] < params.min_peak / 100. sel_remove |= sel elif params.min_peak_percentile is not None: q = numpy.percentile(tmp["partiality"], params.min_peak_percentile) print "percentile %.2f %s" % (q * 100., xac) sel = tmp["partiality"] < q sel_remove |= sel if params.skip_rejected: sel_remove |= tmp[variance_key] <= 0 if params.dmin is not None: sel_remove |= xs.unit_cell().d(tmp["miller_index"]) < params.dmin if params.correct_peak: sel_remove |= (tmp["partiality"] < .01) # remove PEAK==0 # Remove selected print "DEBUG:: removing %d reflections" % sel_remove.count( True) #sum(sel_remove)# tmp = tmp.select(~sel_remove) ret = miller.array(miller.set(xs, tmp["miller_index"], params.anomalous_flag), data=tmp[intensity_key], sigmas=flex.sqrt(flex.abs(tmp[variance_key]))) scale = flex.double(ret.size(), 1.) if not params.cancel_rlp and "lp" in tmp: scale *= tmp["lp"] if "dqe" in tmp: scale /= tmp["dqe"] if params.correct_peak: scale *= tmp["partiality"] ret = ret.apply_scaling(factor=scale) if params.cancel_rlp and params.polarization.correct: raise "Not implemented" return ret