def test_dials_symmetry_indexer_jiffy(helper, refiner_lattices, expected_output): """Test the jiffy""" n = 2 if n > 1: multisweep = True # Create list of experiments, reflections and refiners experiments = [] reflections = [] refiners = [] for i in range(0, n): refl_path, exp_path = ("test_%s.refl" % i, "test_%s.expt" % i) generate_reflections_in_sg("P 2", id_=i, assign_id=True).as_pickle(refl_path) dump.experiment_list(generated_exp(space_group="P 2", id_=i), exp_path) experiments.append(exp_path) reflections.append(refl_path) refiners.append(simple_refiner(refiner_lattices)) result = helper.dials_symmetry_indexer_jiffy( experiments, reflections, refiners, multisweep=multisweep ) pg, reind_op, ntr, pt, reind_refl, reind_exp, reind_init = result refiner_reset = refiners[0].get_refiner_reset() assert (pg, ntr, pt, refiner_reset, reind_init) == expected_output if expected_output[3]: for refiner in refiners[1:]: assert refiner.get_refiner_reset()
def _export_experiments_reflections(self, experiments, reflections, result): from dxtbx.serialize import dump from rstbx.symmetry.constraints import parameter_reduction reindexed_experiments = copy.deepcopy(experiments) reindexed_reflections = flex.reflection_table() cb_op_inp_best = (result.best_solution.subgroup["cb_op_inp_best"] * result.cb_op_inp_min) best_subsym = result.best_solution.subgroup["best_subsym"] for i, expt in enumerate(reindexed_experiments): expt.crystal = expt.crystal.change_basis(result.cb_op_inp_min) expt.crystal.set_space_group(sgtbx.space_group("P 1")) expt.crystal = expt.crystal.change_basis( result.best_solution.subgroup["cb_op_inp_best"]) expt.crystal.set_space_group( best_subsym.space_group().build_derived_acentric_group()) S = parameter_reduction.symmetrize_reduce_enlarge( expt.crystal.get_space_group()) S.set_orientation(expt.crystal.get_B()) S.symmetrize() expt.crystal.set_B(S.orientation.reciprocal_matrix()) reindexed_refl = copy.deepcopy(reflections[i]) reindexed_refl["miller_index"] = cb_op_inp_best.apply( reindexed_refl["miller_index"]) reindexed_reflections.extend(reindexed_refl) logger.info("Saving reindexed experiments to %s" % self._params.output.experiments) dump.experiment_list(reindexed_experiments, self._params.output.experiments) logger.info( "Saving %s reindexed reflections to %s" % (len(reindexed_reflections), self._params.output.reflections)) reindexed_reflections.as_file(self._params.output.reflections)
def test_dials_symmetry_decide_pointgroup( reflection_spacegroup, experiments_spacegroup, expected_lattices, required_spacegroup_order, other_spacegroups, helper, ): """Test for the dials_symmetry_decide_pointgroup helper function """ dump.experiment_list(generated_exp(space_group=experiments_spacegroup), "test.expt") generate_reflections_in_sg(reflection_spacegroup).as_pickle("test.refl") symmetry_analyser = helper.dials_symmetry_decide_pointgroup( ["test.expt"], ["test.refl"] ) # Note : instabilities have been observed in the order of the end of the # spacegroup list - this is likely due to the use of unseeded random number # generation in dials.symmetry symmetry element scoring, but this only seems # to affect the order of groups with a score near zero. Hence only assert the # order of the spacegroups that must be in order, near the start of the list. assert symmetry_analyser.get_possible_lattices() == expected_lattices spacegroups = symmetry_analyser.get_likely_spacegroups() assert spacegroups[: len(required_spacegroup_order)] == required_spacegroup_order assert set(spacegroups[len(required_spacegroup_order) :]) == set(other_spacegroups)
def export_as_json(self, experiments, file_name="indexed_experiments.json", compact=False): from dxtbx.serialize import dump assert experiments.is_consistent() dump.experiment_list(experiments, file_name)
def run(args): from dials.util.options import OptionParser from dials.util.options import flatten_experiments import libtbx.load_env usage = "%s [options] experiments.json" % libtbx.env.dispatcher_name parser = OptionParser( usage=usage, phil=phil_scope, read_experiments=True, check_format=False, epilog=help_message, ) params, options = parser.parse_args(show_diff_phil=True) experiments = flatten_experiments(params.input.experiments) if len(experiments) <= 1: parser.print_help() return from dials.algorithms.indexing.compare_orientation_matrices import ( difference_rotation_matrix_axis_angle, ) crystals = [] for experiment in experiments: crystal = experiment.crystal if params.space_group is not None: crystal.set_space_group(params.space_group.group()) crystals.append(crystal) angles = flex.double() import math padding = int(math.ceil(math.log10(len(experiments)))) output_template = "%s%%0%ii.json" % (params.output.prefix, padding) prev_expt = experiments[0] for i in range(1, len(experiments)): R_ij, axis, angle, cb_op = difference_rotation_matrix_axis_angle( prev_expt.crystal, experiments[i].crystal) angles.append(angle) # print i, angle if abs(angle) > params.max_deviation: continue experiments[i].crystal = experiments[i].crystal.change_basis(cb_op) prev_expt = experiments[i] from dxtbx.serialize import dump dump.experiment_list(experiments[i:i + 1], output_template % i) from matplotlib import pyplot n, bins, patches = pyplot.hist(angles.as_numpy_array(), 100) pyplot.show()
def export_experiments(self, filename): experiments = self._indexed_experiments if self._params.output.split_experiments: logger.info("Splitting experiments before output") experiments = ExperimentList( [copy.deepcopy(re) for re in experiments]) logger.info("Saving refined experiments to %s" % filename) assert experiments.is_consistent() dump.experiment_list(experiments, filename)
def run(self): '''Execute the script.''' from dials.util.options import flatten_reflections, flatten_experiments from libtbx.utils import Sorry from dials.array_family import flex # Parse the command line params, options = self.parser.parse_args(show_diff_phil=True) # Try to load the models and data if not params.input.experiments: print "No Experiments found in the input" self.parser.print_help() return if params.input.reflections: if len(params.input.reflections) != len(params.input.experiments): raise Sorry( "The number of input reflections files does not match the " "number of input experiments") experiments = flatten_experiments(params.input.experiments) if params.input.reflections: reflections = flatten_reflections(params.input.reflections)[0] else: reflections = None import math experiments_template = "%s_%%0%sd.json" % ( params.output.experiments_prefix, int(math.floor(math.log10(len(experiments))) + 1)) reflections_template = "%s_%%0%sd.pickle" % ( params.output.reflections_prefix, int(math.floor(math.log10(len(experiments))) + 1)) for i, experiment in enumerate(experiments): from dxtbx.model.experiment_list import ExperimentList from dxtbx.serialize import dump experiment_filename = experiments_template % i print 'Saving experiment %d to %s' % (i, experiment_filename) dump.experiment_list(ExperimentList([experiment]), experiment_filename) if reflections is not None: reflections_filename = reflections_template % i print 'Saving reflections for experiment %d to %s' % ( i, reflections_filename) ref_sel = reflections.select(reflections['id'] == i) ref_sel['id'] = flex.int(len(ref_sel), 0) ref_sel.as_pickle(reflections_filename) return
def test_assign_and_return_datasets(helper): """Test the combined method of assigning ids and setting in the sweep handler""" n = 3 sweephandler = simple_sweep_handler(n) for i in range(0, n): si = sweephandler.get_sweep_information(i) refl_path, exp_path = ("test_%s.refl" % i, "test_%s.expt" % i) generate_test_refl().as_pickle(refl_path) dump.experiment_list(generated_exp(), exp_path) si.set_experiments(exp_path) si.set_reflections(refl_path) sweephandler = helper.assign_and_return_datasets(sweephandler) check_data_in_sweep_handler(sweephandler)
def run(args): from dials.util.options import OptionParser from dials.util.options import flatten_datablocks from dials.util.options import flatten_experiments import libtbx.load_env usage = "%s [options] datablock.json | experiments.json" %( libtbx.env.dispatcher_name) parser = OptionParser( usage=usage, phil=phil_scope, read_datablocks=True, read_experiments=True, check_format=False, epilog=help_message) params, options = parser.parse_args(show_diff_phil=True) experiments = flatten_experiments(params.input.experiments) datablocks = flatten_datablocks(params.input.datablock) if len(experiments) == 0 and len(datablocks) == 0: parser.print_help() exit(0) from dials.command_line.dials_import import ManualGeometryUpdater update_geometry = ManualGeometryUpdater(params) if len(experiments): imagesets = experiments.imagesets() elif len(datablocks): assert len(datablocks) == 1 imagesets = datablocks[0].extract_imagesets() for imageset in imagesets: imageset_new = update_geometry(imageset) imageset.set_detector(imageset_new.get_detector()) imageset.set_beam(imageset_new.get_beam()) imageset.set_goniometer(imageset_new.get_goniometer()) imageset.set_scan(imageset_new.get_scan()) from dxtbx.serialize import dump if len(experiments): print "Saving modified experiments to %s" %params.output.experiments dump.experiment_list(experiments, params.output.experiments) elif len(datablocks): print "Saving modified datablock to %s" %params.output.datablock dump.datablock(datablocks, params.output.datablock)
def run(args): from dials.util.options import OptionParser from dials.util.options import flatten_datablocks from dials.util.options import flatten_experiments import libtbx.load_env usage = "%s [options] datablock.json | experiments.json" %( libtbx.env.dispatcher_name) parser = OptionParser( usage=usage, phil=phil_scope, read_datablocks=True, read_experiments=True, check_format=False, epilog=help_message) params, options = parser.parse_args(show_diff_phil=True) experiments = flatten_experiments(params.input.experiments) datablocks = flatten_datablocks(params.input.datablock) if len(experiments) == 0 and len(datablocks) == 0: parser.print_help() exit(0) from dials.command_line.dials_import import ManualGeometryUpdater update_geometry = ManualGeometryUpdater(params) if len(experiments): imagesets = experiments.imagesets() elif len(datablocks): assert len(datablocks) == 1 imagesets = datablocks[0].extract_imageset() for imageset in imagesets: imageset_new = update_geometry(imageset) imageset.set_detector(imageset_new.get_detector()) imageset.set_beam(imageset_new.get_beam()) imageset.set_goniometer(imageset_new.get_goniometer()) imageset.set_scan(imageset_new.get_scan()) from dxtbx.serialize import dump if len(experiments): print "Saving modified experiments to %s" %params.output.experiments dump.experiment_list(experiments, params.output.experiments) elif len(datablocks): raise NotImplemented
def save_experiments(filename): from xia2.Schema import imageset_cache from dxtbx.model.experiment_list import ExperimentList from dxtbx.model.experiment_list import ExperimentListFactory from dxtbx.serialize import dump experiments = ExperimentList([]) for imagesets in imageset_cache.values(): for imageset in imagesets.values(): experiments.extend( ExperimentListFactory.from_imageset_and_crystal( imageset, None)) dump.experiment_list(experiments, filename, compact=True)
def run(self): '''Execute the script.''' from dials.util.options import flatten_reflections, flatten_experiments from libtbx.utils import Sorry from dials.array_family import flex # Parse the command line params, options = self.parser.parse_args(show_diff_phil=True) # Try to load the models and data if not params.input.experiments: print "No Experiments found in the input" self.parser.print_help() return if params.input.reflections: if len(params.input.reflections) != len(params.input.experiments): raise Sorry("The number of input reflections files does not match the " "number of input experiments") experiments = flatten_experiments(params.input.experiments) if params.input.reflections: reflections = flatten_reflections(params.input.reflections)[0] else: reflections = None import math experiments_template = "%s_%%0%sd.json" %( params.output.experiments_prefix, int(math.floor(math.log10(len(experiments))) + 1)) reflections_template = "%s_%%0%sd.pickle" %( params.output.reflections_prefix, int(math.floor(math.log10(len(experiments))) + 1)) for i, experiment in enumerate(experiments): from dxtbx.model.experiment.experiment_list import ExperimentList from dxtbx.serialize import dump experiment_filename = experiments_template %i print 'Saving experiment %d to %s' %(i, experiment_filename) dump.experiment_list(ExperimentList([experiment]), experiment_filename) if reflections is not None: reflections_filename = reflections_template %i print 'Saving reflections for experiment %d to %s' %(i, reflections_filename) ref_sel = reflections.select(reflections['id'] == i) ref_sel['id'] = flex.int(len(ref_sel), 0) ref_sel.as_pickle(reflections_filename) return
def test_split_experiments(number_of_experiments, helper): """Test the call to split experiments: should split the dataset on experiment id, giving single datasets with unique ids from 0..n-1""" sweephandler = simple_sweep_handler(number_of_experiments) exp_path = "test.expt" refl_path = "test.refl" dump.experiment_list( generated_exp(number_of_experiments, assign_ids=True), exp_path ) reflections = flex.reflection_table() for i in range(number_of_experiments): reflections.extend(generate_test_refl(id_=i, assign_id=True)) reflections.as_pickle(refl_path) # Now call split_experiments and inspect handler to check result sweephandler = helper.split_experiments(exp_path, refl_path, sweephandler) check_data_in_sweep_handler(sweephandler)
def test_assign_identifiers(helper): """Test the call to the assign identifiers wrapper""" experiments = [] reflections = [] for i in range(0, 3): refl_path, exp_path = ("test_%s.refl" % i, "test_%s.expt" % i) generate_test_refl().as_pickle(refl_path) dump.experiment_list(generated_exp(), exp_path) experiments.append(exp_path) reflections.append(refl_path) assigner = helper.assign_dataset_identifiers(experiments, reflections) assigned_exp = load.experiment_list(assigner.get_output_experiments_filename()) assert assigned_exp[0].identifier == "0" assert assigned_exp[1].identifier == "1" assert assigned_exp[2].identifier == "2" assigned_refl = flex.reflection_table.from_file( assigner.get_output_reflections_filename() ) assert assigned_refl.experiment_identifiers()[0] == "0" assert assigned_refl.experiment_identifiers()[1] == "1" assert assigned_refl.experiment_identifiers()[2] == "2"
def run(args): from dials.util.options import OptionParser from dials.util.options import flatten_experiments usage = "dials.modify_geometry [options] models.expt" parser = OptionParser( usage=usage, phil=phil_scope, read_experiments=True, check_format=False, epilog=help_message, ) params, options = parser.parse_args(show_diff_phil=True) experiments = flatten_experiments(params.input.experiments) if len(experiments) == 0: parser.print_help() exit(0) from dials.command_line.dials_import import ManualGeometryUpdater update_geometry = ManualGeometryUpdater(params) if len(experiments): imagesets = experiments.imagesets() for imageset in imagesets: imageset_new = update_geometry(imageset) imageset.set_detector(imageset_new.get_detector()) imageset.set_beam(imageset_new.get_beam()) imageset.set_goniometer(imageset_new.get_goniometer()) imageset.set_scan(imageset_new.get_scan()) from dxtbx.serialize import dump if len(experiments): print("Saving modified experiments to %s" % params.output.experiments) dump.experiment_list(experiments, params.output.experiments)
crystal.rotate_around_origin(axis, start_angle + (delta_angle/2), deg=True) if (output_format == "json"): exp_list = ExperimentList() exp_list.append(Experiment(imageset=ImageSetFactory.make_imageset(list([imgname])), beam=beam, detector=detector, goniometer=gonio, scan=scan, crystal=crystal)) if (add_background_images==True): if (len(bkglist) != 1): bkgname=bkglist[i] else: bkgname=bkglist[0] exp_list[0].imageset.external_lookup.pedestal.filename=os.path.abspath(bkgname) dump.experiment_list(exp_list,json_dir+"/experiments_for_lunus_{0:05d}.json".format(imnum)) else: from scitbx import matrix A_matrix = matrix.sqr(crystal.get_A()).inverse() At = np.asarray(A_matrix.transpose()).reshape((3,3)) print At workdir=amatrix_dir_prefix+"{0}".format(imnum) if (not os.path.isdir(workdir)): command = 'mkdir {}'.format(workdir) call_params = shlex.split(command) subprocess.call(call_params) # np.save(workdir+"/At.npy",At) At.astype('float32').tofile(workdir+"/At.bin") imnum = imnum +1
def _refine(self): for epoch, idxr in self._refinr_indexers.iteritems(): # decide what images we are going to process, if not already # specified #if not self._intgr_wedge: #images = self.get_matching_images() #self.set_integrater_wedge(min(images), #max(images)) #Debug.write('DIALS INTEGRATE PREPARE:') #Debug.write('Wavelength: %.6f' % self.get_wavelength()) #Debug.write('Distance: %.2f' % self.get_distance()) #if not self._intgr_indexer: #self.set_integrater_indexer(DialsIndexer()) #self.get_integrater_indexer().set_indexer_sweep( #self.get_integrater_sweep()) #self._intgr_indexer.set_working_directory( #self.get_working_directory()) #self._intgr_indexer.setup_from_imageset(self.get_imageset()) #if self.get_frame_wedge(): #wedge = self.get_frame_wedge() #Debug.write('Propogating wedge limit: %d %d' % wedge) #self._intgr_indexer.set_frame_wedge(wedge[0], wedge[1], #apply_offset = False) ## this needs to be set up from the contents of the ## Integrater frame processer - wavelength &c. #if self.get_beam_centre(): #self._intgr_indexer.set_beam_centre(self.get_beam_centre()) #if self.get_distance(): #self._intgr_indexer.set_distance(self.get_distance()) #if self.get_wavelength(): #self._intgr_indexer.set_wavelength( #self.get_wavelength()) # get the unit cell from this indexer to initiate processing # if it is new... and also copy out all of the information for # the Dials indexer if not... experiments = idxr.get_indexer_experiment_list() indexed_experiments = idxr.get_indexer_payload("experiments_filename") indexed_reflections = idxr.get_indexer_payload("indexed_filename") if len(experiments) > 1: xsweeps = idxr._indxr_sweeps assert len(xsweeps) == len(experiments) assert len(self._refinr_sweeps) == 1 # don't currently support joint refinement xsweep = self._refinr_sweeps[0] i = xsweeps.index(xsweep) experiments = experiments[i:i+1] # Extract and output experiment and reflections for current sweep indexed_experiments = os.path.join( self.get_working_directory(), "%s_indexed_experiments.json" %xsweep.get_name()) indexed_reflections = os.path.join( self.get_working_directory(), "%s_indexed_reflections.pickle" %xsweep.get_name()) from dxtbx.serialize import dump dump.experiment_list(experiments, indexed_experiments) from libtbx import easy_pickle from scitbx.array_family import flex reflections = easy_pickle.load( idxr.get_indexer_payload("indexed_filename")) sel = reflections['id'] == i assert sel.count(True) > 0 imageset_id = reflections['imageset_id'].select(sel) assert imageset_id.all_eq(imageset_id[0]) sel = reflections['imageset_id'] == imageset_id[0] reflections = reflections.select(sel) # set indexed reflections to id == 0 and imageset_id == 0 reflections['id'].set_selected(reflections['id'] == i, 0) reflections['imageset_id'] = flex.int(len(reflections), 0) easy_pickle.dump(indexed_reflections, reflections) assert len(experiments.crystals()) == 1 # currently only handle one lattice/sweep crystal_model = experiments.crystals()[0] lattice = idxr.get_indexer_lattice() # check if the lattice was user assigned... user_assigned = idxr.get_indexer_user_input_lattice() # XXX check that the indexer is an Dials indexer - if not then # create one... # set a low resolution limit (which isn't really used...) # this should perhaps be done more intelligently from an # analysis of the spot list or something...? #if not self.get_integrater_low_resolution(): #dmax = idxr.get_indexer_low_resolution() #self.set_integrater_low_resolution(dmax) #Debug.write('Low resolution set to: %s' % \ #self.get_integrater_low_resolution()) ## copy the data across from dxtbx.serialize import load, dump refiner = self.Refine() refiner.set_experiments_filename(indexed_experiments) refiner.set_indexed_filename(indexed_reflections) # XXX Temporary workaround for dials.refine error for scan_varying # refinement with smaller wedges total_phi_range = idxr._indxr_imagesets[0].get_scan().get_oscillation_range()[1] if total_phi_range < 5: # arbitrary value refiner.set_scan_varying(False) elif total_phi_range < 36: refiner.set_interval_width_degrees(total_phi_range/2) FileHandler.record_log_file('%s REFINE' % idxr.get_indexer_full_name(), refiner.get_log_file()) refiner.run() self._refinr_experiments_filename \ = refiner.get_refined_experiments_filename() experiments = load.experiment_list(self._refinr_experiments_filename) self._refinr_indexed_filename = refiner.get_refined_filename() self.set_refiner_payload("experiments.json", self._refinr_experiments_filename) self.set_refiner_payload("reflections.pickle", self._refinr_indexed_filename) # this is the result of the cell refinement self._refinr_cell = experiments.crystals()[0].get_unit_cell().parameters()
def run(args): from dials.util import log import libtbx.load_env usage = "%s experiments.json indexed.pickle [options]" % libtbx.env.dispatcher_name parser = OptionParser(usage=usage, phil=phil_scope, read_experiments=True, read_reflections=True, check_format=False, epilog=help_message) params, options = parser.parse_args(show_diff_phil=False) # Configure the logging log.config(info=params.output.log, debug=params.output.debug_log) from dials.util.version import dials_version logger.info(dials_version()) # Log the diff phil diff_phil = parser.diff_phil.as_str() if diff_phil is not '': logger.info('The following parameters have been modified:\n') logger.info(diff_phil) experiments = flatten_experiments(params.input.experiments) reflections = flatten_reflections(params.input.reflections) if len(reflections) == 0 or len(experiments) == 0: parser.print_help() return assert (len(reflections) == 1) reflections = reflections[0] if len(experiments) == 0: parser.print_help() return elif len(experiments.crystals()) > 1: if params.crystal_id is not None: assert params.crystal_id < len(experiments.crystals()) experiment_ids = experiments.where( crystal=experiments.crystals()[params.crystal_id]) from dxtbx.model.experiment_list import ExperimentList experiments = ExperimentList( [experiments[i] for i in experiment_ids]) refl_selections = [reflections['id'] == i for i in experiment_ids] reflections['id'] = flex.int(len(reflections), -1) for i, sel in enumerate(refl_selections): reflections['id'].set_selected(sel, i) reflections = reflections.select(reflections['id'] > -1) else: raise Sorry( "Only one crystal can be processed at a time: set crystal_id to choose experiment." ) if params.refinement.reflections.outlier.algorithm in ('auto', libtbx.Auto): if experiments[0].goniometer is None: params.refinement.reflections.outlier.algorithm = 'sauter_poon' else: # different default to dials.refine # tukey is faster and more appropriate at the indexing step params.refinement.reflections.outlier.algorithm = 'tukey' from dials.algorithms.indexing.symmetry \ import refined_settings_factory_from_refined_triclinic cb_op_to_primitive = experiments[0].crystal.get_space_group().info()\ .change_of_basis_op_to_primitive_setting() if experiments[0].crystal.get_space_group().n_ltr() > 1: effective_group = experiments[0].crystal.get_space_group()\ .build_derived_reflection_intensity_group(anomalous_flag=True) sys_absent_flags = effective_group.is_sys_absent( reflections['miller_index']) reflections = reflections.select(~sys_absent_flags) experiments[0].crystal.update( experiments[0].crystal.change_basis(cb_op_to_primitive)) miller_indices = reflections['miller_index'] miller_indices = cb_op_to_primitive.apply(miller_indices) reflections['miller_index'] = miller_indices Lfat = refined_settings_factory_from_refined_triclinic( params, experiments, reflections, lepage_max_delta=params.lepage_max_delta, nproc=params.nproc, refiner_verbosity=params.verbosity) s = StringIO() possible_bravais_settings = set(solution['bravais'] for solution in Lfat) bravais_lattice_to_space_group_table(possible_bravais_settings) Lfat.labelit_printout(out=s) logger.info(s.getvalue()) from json import dumps from os.path import join prefix = params.output.prefix if prefix is None: prefix = '' summary_file = '%sbravais_summary.json' % prefix logger.info('Saving summary as %s' % summary_file) open(join(params.output.directory, summary_file), 'wb').write(dumps(Lfat.as_dict())) from dxtbx.serialize import dump import copy for subgroup in Lfat: expts = subgroup.refined_experiments soln = int(subgroup.setting_number) bs_json = '%sbravais_setting_%i.json' % (prefix, soln) logger.info('Saving solution %i as %s' % (soln, bs_json)) dump.experiment_list(expts, join(params.output.directory, bs_json)) return
def test1(): dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_narrow_wedges") # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory( suffix="tst_combine_experiments_and_reflections") os.chdir(tmp_dir) input_phil = phil_input.format(data_dir) + """ reference_from_experiment.beam=0 reference_from_experiment.scan=0 reference_from_experiment.goniometer=0 reference_from_experiment.detector=0 """ with open("input.phil", "w") as phil_file: phil_file.writelines(input_phil) cmd = "dials.combine_experiments input.phil" #print cmd result = easy_run.fully_buffered(command=cmd).raise_if_errors() # load results exp = ExperimentListFactory.from_json_file("combined_experiments.json", check_format=False) ref = flex.reflection_table.from_pickle("combined_reflections.pickle") # test the experiments assert len(exp) == 103 assert len(exp.crystals()) == 103 assert len(exp.beams()) == 1 assert len(exp.scans()) == 1 assert len(exp.detectors()) == 1 assert len(exp.goniometers()) == 1 for e in exp: assert e.imageset is not None # test the reflections assert len(ref) == 11689 cmd = " ".join([ "dials.split_experiments", "combined_experiments.json", "combined_reflections.pickle" ]) result = easy_run.fully_buffered(command=cmd).raise_if_errors() for i in range(len(exp)): assert os.path.exists("experiments_%03d.json" % i) assert os.path.exists("reflections_%03d.pickle" % i) exp_single = ExperimentListFactory.from_json_file( "experiments_%03d.json" % i, check_format=False) ref_single = flex.reflection_table.from_pickle( "reflections_%03d.pickle" % i) assert len(exp_single) == 1 assert exp_single[0].crystal == exp[i].crystal assert exp_single[0].beam == exp[i].beam assert exp_single[0].detector == exp[i].detector assert exp_single[0].scan == exp[i].scan assert exp_single[0].goniometer == exp[i].goniometer assert exp_single[0].imageset == exp[i].imageset assert len(ref_single) == len(ref.select(ref['id'] == i)) assert ref_single['id'].all_eq(0) cmd = " ".join([ "dials.split_experiments", "combined_experiments.json", "output.experiments_prefix=test" ]) result = easy_run.fully_buffered(command=cmd).raise_if_errors() for i in range(len(exp)): assert os.path.exists("test_%03d.json" % i) # Modify a copy of the detector import copy detector = copy.deepcopy(exp.detectors()[0]) panel = detector[0] x, y, z = panel.get_origin() panel.set_frame(panel.get_fast_axis(), panel.get_slow_axis(), (x, y, z + 10)) # Set half of the experiments to the new detector for i in xrange(len(exp) // 2): exp[i].detector = detector from dxtbx.serialize import dump dump.experiment_list(exp, "modded_experiments.json") cmd = " ".join([ "dials.split_experiments", "modded_experiments.json", "combined_reflections.pickle", "output.experiments_prefix=test_by_detector", "output.reflections_prefix=test_by_detector", "by_detector=True" ]) result = easy_run.fully_buffered(command=cmd).raise_if_errors() for i in range(2): assert os.path.exists("test_by_detector_%03d.json" % i) assert os.path.exists("test_by_detector_%03d.pickle" % i) assert not os.path.exists("test_by_detector_%03d.json" % 2) assert not os.path.exists("test_by_detector_%03d.pickle" % 2) return
def run(args): from dials.util import log from logging import info import libtbx.load_env usage = "%s experiments.json indexed.pickle [options]" %libtbx.env.dispatcher_name parser = OptionParser( usage=usage, phil=phil_scope, read_experiments=True, read_reflections=True, check_format=False, epilog=help_message) params, options = parser.parse_args(show_diff_phil=False) # Configure the logging log.config(info=params.output.log, debug=params.output.debug_log) from dials.util.version import dials_version info(dials_version()) # Log the diff phil diff_phil = parser.diff_phil.as_str() if diff_phil is not '': info('The following parameters have been modified:\n') info(diff_phil) experiments = flatten_experiments(params.input.experiments) reflections = flatten_reflections(params.input.reflections) assert(len(reflections) == 1) reflections = reflections[0] if len(experiments) == 0: parser.print_help() return elif len(experiments.crystals()) > 1: if params.crystal_id is not None: assert params.crystal_id < len(experiments.crystals()) experiment_ids = experiments.where(crystal=experiments.crystals()[params.crystal_id]) from dxtbx.model.experiment.experiment_list import ExperimentList experiments = ExperimentList([experiments[i] for i in experiment_ids]) refl_selections = [reflections['id'] == i for i in experiment_ids] reflections['id'] = flex.int(len(reflections), -1) for i, sel in enumerate(refl_selections): reflections['id'].set_selected(sel, i) reflections = reflections.select(reflections['id'] > -1) else: raise Sorry("Only one crystal can be processed at a time: set crystal_id to choose experiment.") if params.refinement.reflections.outlier.algorithm in ('auto', libtbx.Auto): if experiments[0].goniometer is None: params.refinement.reflections.outlier.algorithm = 'sauter_poon' else: # different default to dials.refine # tukey is faster and more appropriate at the indexing step params.refinement.reflections.outlier.algorithm = 'tukey' from dials.algorithms.indexing.symmetry \ import refined_settings_factory_from_refined_triclinic cb_op_to_primitive = experiments[0].crystal.get_space_group().info()\ .change_of_basis_op_to_primitive_setting() if experiments[0].crystal.get_space_group().n_ltr() > 1: effective_group = experiments[0].crystal.get_space_group()\ .build_derived_reflection_intensity_group(anomalous_flag=True) sys_absent_flags = effective_group.is_sys_absent( reflections['miller_index']) reflections = reflections.select(~sys_absent_flags) experiments[0].crystal.update(experiments[0].crystal.change_basis(cb_op_to_primitive)) miller_indices = reflections['miller_index'] miller_indices = cb_op_to_primitive.apply(miller_indices) reflections['miller_index'] = miller_indices Lfat = refined_settings_factory_from_refined_triclinic( params, experiments, reflections, lepage_max_delta=params.lepage_max_delta, nproc=params.nproc, refiner_verbosity=params.verbosity) s = StringIO() Lfat.labelit_printout(out=s) info(s.getvalue()) from json import dumps from os.path import join open(join(params.output.directory, 'bravais_summary.json'), 'wb').write(dumps(Lfat.as_dict())) from dxtbx.serialize import dump import copy for subgroup in Lfat: expts = copy.deepcopy(experiments) for expt in expts: expt.crystal.update(subgroup.refined_crystal) expt.detector = subgroup.detector expt.beam = subgroup.beam dump.experiment_list( expts, join(params.output.directory, 'bravais_setting_%i.json' % (int(subgroup.setting_number)))) return
def run(args): import libtbx.load_env from libtbx.utils import Sorry usage = "%s [options] experiments.json indexed.pickle" % libtbx.env.dispatcher_name parser = OptionParser(usage=usage, phil=phil_scope, read_reflections=True, read_experiments=True, check_format=False, epilog=help_message) params, options = parser.parse_args(show_diff_phil=True) reflections = flatten_reflections(params.input.reflections) experiments = flatten_experiments(params.input.experiments) if len(experiments) == 0 and len(reflections) == 0: parser.print_help() return if params.change_of_basis_op is None: raise Sorry("Please provide a change_of_basis_op.") reference_crystal = None if params.reference is not None: from dxtbx.serialize import load reference_experiments = load.experiment_list(params.reference, check_format=False) assert len(reference_experiments.crystals()) == 1 reference_crystal = reference_experiments.crystals()[0] if len(experiments) and params.change_of_basis_op is libtbx.Auto: if reference_crystal is not None: if len(experiments.crystals()) > 1: raise Sorry("Only one crystal can be processed at a time") from dials.algorithms.indexing.compare_orientation_matrices \ import difference_rotation_matrix_axis_angle cryst = experiments.crystals()[0] R, axis, angle, change_of_basis_op = difference_rotation_matrix_axis_angle( cryst, reference_crystal) print("Change of basis op: %s" % change_of_basis_op) print("Rotation matrix to transform input crystal to reference::") print(R.mathematica_form(format="%.3f", one_row_per_line=True)) print("Rotation of %.3f degrees" % angle, "about axis (%.3f, %.3f, %.3f)" % axis) elif len(reflections): assert len(reflections) == 1 # always re-map reflections to reciprocal space from dials.algorithms.indexing import indexer refl_copy = flex.reflection_table() for i, imageset in enumerate(experiments.imagesets()): if 'imageset_id' in reflections[0]: sel = (reflections[0]['imageset_id'] == i) else: sel = (reflections[0]['id'] == i) refl = indexer.indexer_base.map_spots_pixel_to_mm_rad( reflections[0].select(sel), imageset.get_detector(), imageset.get_scan()) indexer.indexer_base.map_centroids_to_reciprocal_space( refl, imageset.get_detector(), imageset.get_beam(), imageset.get_goniometer()) refl_copy.extend(refl) # index the reflection list using the input experiments list refl_copy['id'] = flex.int(len(refl_copy), -1) from dials.algorithms.indexing import index_reflections index_reflections(refl_copy, experiments, tolerance=0.2) hkl_expt = refl_copy['miller_index'] hkl_input = reflections[0]['miller_index'] change_of_basis_op = derive_change_of_basis_op(hkl_input, hkl_expt) # reset experiments list since we don't want to reindex this experiments = [] else: change_of_basis_op = sgtbx.change_of_basis_op( params.change_of_basis_op) if len(experiments): for crystal in experiments.crystals(): cryst_orig = copy.deepcopy(crystal) cryst_reindexed = cryst_orig.change_basis(change_of_basis_op) if params.space_group is not None: a, b, c = cryst_reindexed.get_real_space_vectors() cryst_reindexed = Crystal( a, b, c, space_group=params.space_group.group()) crystal.update(cryst_reindexed) print("Old crystal:") print(cryst_orig) print() print("New crystal:") print(cryst_reindexed) print() print("Saving reindexed experimental models to %s" % params.output.experiments) dump.experiment_list(experiments, params.output.experiments) if len(reflections): assert (len(reflections) == 1) reflections = reflections[0] miller_indices = reflections['miller_index'] if params.hkl_offset is not None: h, k, l = miller_indices.as_vec3_double().parts() h += params.hkl_offset[0] k += params.hkl_offset[1] l += params.hkl_offset[2] miller_indices = flex.miller_index(h.iround(), k.iround(), l.iround()) non_integral_indices = change_of_basis_op.apply_results_in_non_integral_indices( miller_indices) if non_integral_indices.size() > 0: print( "Removing %i/%i reflections (change of basis results in non-integral indices)" % (non_integral_indices.size(), miller_indices.size())) sel = flex.bool(miller_indices.size(), True) sel.set_selected(non_integral_indices, False) miller_indices_reindexed = change_of_basis_op.apply( miller_indices.select(sel)) reflections['miller_index'].set_selected(sel, miller_indices_reindexed) reflections['miller_index'].set_selected(~sel, (0, 0, 0)) print("Saving reindexed reflections to %s" % params.output.reflections) easy_pickle.dump(params.output.reflections, reflections)
def export_experiments(self, filename): dump.experiment_list(self._experiments, filename) return filename
axis = gonio.get_rotation_axis() start_angle, delta_angle = scan.get_oscillation() crystal.rotate_around_origin(axis, start_angle + (delta_angle/2), deg=True) if (output_format == "json"): exp_list = ExperimentList() exp_list.append(Experiment(imageset=ImageSetFactory.make_imageset(list([imgname])), beam=beam, detector=detector, goniometer=gonio, scan=scan, crystal=crystal)) if (add_background_images==True): if (len(bkglist) != 1): bkgname=bkglist[i] exp_list[0].imageset.external_lookup.pedestal.filename=os.path.abspath(bkgname) dump.experiment_list(exp_list,json_dir+"/experiments_for_lunus_{0:05d}.json".format(imnum)) else: from scitbx import matrix A_matrix = matrix.sqr(crystal.get_A()).inverse() At = np.asarray(A_matrix.transpose()).reshape((3,3)) print At workdir=amatrix_dir_prefix+"{0}".format(imnum) if (not os.path.isdir(workdir)): command = 'mkdir {}'.format(workdir) call_params = shlex.split(command) subprocess.call(call_params) # np.save(workdir+"/At.npy",At) At.astype('float32').tofile(workdir+"/At.bin") imnum = imnum +1
def _index_prepare(self): """Prepare to do autoindexing - in XDS terms this will mean calling xycorr, init and colspot on the input images.""" # decide on images to work with Debug.write("XDS INDEX PREPARE:") Debug.write("Wavelength: %.6f" % self.get_wavelength()) Debug.write("Distance: %.2f" % self.get_distance()) if self._indxr_images == []: _select_images_function = getattr( self, "_index_select_images_%s" % (self._index_select_images)) wedges = _select_images_function() for wedge in wedges: self.add_indexer_image_wedge(wedge) self.set_indexer_prepare_done(True) all_images = self.get_matching_images() first = min(all_images) last = max(all_images) # next start to process these - first xycorr xycorr = self.Xycorr() xycorr.set_data_range(first, last) xycorr.set_background_range(self._indxr_images[0][0], self._indxr_images[0][1]) from dxtbx.serialize.xds import to_xds converter = to_xds(self.get_imageset()) xds_beam_centre = converter.detector_origin xycorr.set_beam_centre(xds_beam_centre[0], xds_beam_centre[1]) for block in self._indxr_images: xycorr.add_spot_range(block[0], block[1]) # FIXME need to set the origin here xycorr.run() for file in ["X-CORRECTIONS.cbf", "Y-CORRECTIONS.cbf"]: self._indxr_payload[file] = xycorr.get_output_data_file(file) # next start to process these - then init if PhilIndex.params.xia2.settings.input.format.dynamic_shadowing: imageset = self._indxr_imagesets[0] masker = (imageset.get_format_class().get_instance( imageset.paths()[0]).get_masker()) if masker is None: # disable dynamic_shadowing PhilIndex.params.xia2.settings.input.format.dynamic_shadowing = False if PhilIndex.params.xia2.settings.input.format.dynamic_shadowing: # find the region of the scan with the least predicted shadow # to use for background determination in XDS INIT step from dxtbx.serialize import dump from dxtbx.model.experiment_list import ExperimentListFactory imageset = self._indxr_imagesets[0] xsweep = self._indxr_sweeps[0] sweep_filename = os.path.join( self.get_working_directory(), "%s_indexed.expt" % xsweep.get_name()) dump.experiment_list( ExperimentListFactory.from_imageset_and_crystal( imageset, None), sweep_filename, ) from xia2.Wrappers.Dials.ShadowPlot import ShadowPlot shadow_plot = ShadowPlot() shadow_plot.set_working_directory(self.get_working_directory()) auto_logfiler(shadow_plot) shadow_plot.set_sweep_filename(sweep_filename) shadow_plot.set_json_filename( os.path.join( self.get_working_directory(), "%s_shadow_plot.json" % shadow_plot.get_xpid(), )) shadow_plot.run() results = shadow_plot.get_results() from scitbx.array_family import flex fraction_shadowed = flex.double(results["fraction_shadowed"]) if flex.max(fraction_shadowed) == 0: PhilIndex.params.xia2.settings.input.format.dynamic_shadowing = False else: scan_points = flex.double(results["scan_points"]) scan = imageset.get_scan() oscillation = scan.get_oscillation() if self._background_images is not None: bg_images = self._background_images bg_range_deg = ( scan.get_angle_from_image_index(bg_images[0]), scan.get_angle_from_image_index(bg_images[1]), ) bg_range_width = bg_range_deg[1] - bg_range_deg[0] min_shadow = 100 best_bg_range = bg_range_deg from libtbx.utils import frange for bg_range_start in frange( flex.min(scan_points), flex.max(scan_points) - bg_range_width, step=oscillation[1], ): bg_range_deg = (bg_range_start, bg_range_start + bg_range_width) sel = (scan_points >= bg_range_deg[0]) & ( scan_points <= bg_range_deg[1]) mean_shadow = flex.mean(fraction_shadowed.select(sel)) if mean_shadow < min_shadow: min_shadow = mean_shadow best_bg_range = bg_range_deg self._background_images = ( scan.get_image_index_from_angle(best_bg_range[0]), scan.get_image_index_from_angle(best_bg_range[1]), ) Debug.write("Setting background images: %s -> %s" % self._background_images) init = self.Init() for file in ["X-CORRECTIONS.cbf", "Y-CORRECTIONS.cbf"]: init.set_input_data_file(file, self._indxr_payload[file]) init.set_data_range(first, last) if self._background_images: init.set_background_range(self._background_images[0], self._background_images[1]) else: init.set_background_range(self._indxr_images[0][0], self._indxr_images[0][1]) for block in self._indxr_images: init.add_spot_range(block[0], block[1]) init.run() # at this stage, need to (perhaps) modify the BKGINIT.cbf image # to mark out the back stop if PhilIndex.params.xds.backstop_mask: Debug.write("Applying mask to BKGINIT.pck") # copy the original file cbf_old = os.path.join(init.get_working_directory(), "BKGINIT.cbf") cbf_save = os.path.join(init.get_working_directory(), "BKGINIT.sav") shutil.copyfile(cbf_old, cbf_save) # modify the file to give the new mask from xia2.Toolkit.BackstopMask import BackstopMask mask = BackstopMask(PhilIndex.params.xds.backstop_mask) mask.apply_mask_xds(self.get_header(), cbf_save, cbf_old) init.reload() for file in ["BLANK.cbf", "BKGINIT.cbf", "GAIN.cbf"]: self._indxr_payload[file] = init.get_output_data_file(file) if PhilIndex.params.xia2.settings.developmental.use_dials_spotfinder: spotfinder = self.DialsSpotfinder() for block in self._indxr_images: spotfinder.add_spot_range(block[0], block[1]) spotfinder.run() export = self.DialsExportSpotXDS() export.set_input_data_file( "observations.refl", spotfinder.get_output_data_file("observations.refl"), ) export.run() for file in ["SPOT.XDS"]: self._indxr_payload[file] = export.get_output_data_file(file) else: # next start to process these - then colspot colspot = self.Colspot() for file in [ "X-CORRECTIONS.cbf", "Y-CORRECTIONS.cbf", "BLANK.cbf", "BKGINIT.cbf", "GAIN.cbf", ]: colspot.set_input_data_file(file, self._indxr_payload[file]) colspot.set_data_range(first, last) colspot.set_background_range(self._indxr_images[0][0], self._indxr_images[0][1]) for block in self._indxr_images: colspot.add_spot_range(block[0], block[1]) colspot.run() for file in ["SPOT.XDS"]: self._indxr_payload[file] = colspot.get_output_data_file(file)
def run(args): import libtbx.load_env from dials.util import Sorry usage = "dials.reindex [options] indexed.expt indexed.refl" parser = OptionParser( usage=usage, phil=phil_scope, read_reflections=True, read_experiments=True, check_format=False, epilog=help_message, ) params, options = parser.parse_args(show_diff_phil=True) reflections = flatten_reflections(params.input.reflections) experiments = flatten_experiments(params.input.experiments) if len(experiments) == 0 and len(reflections) == 0: parser.print_help() return if params.change_of_basis_op is None: raise Sorry("Please provide a change_of_basis_op.") reference_crystal = None if params.reference.experiments is not None: from dxtbx.serialize import load reference_experiments = load.experiment_list( params.reference.experiments, check_format=False) assert len(reference_experiments.crystals()) == 1 reference_crystal = reference_experiments.crystals()[0] if params.reference.reflections is not None: # First check that we have everything as expected for the reference reindexing # Currently only supports reindexing one dataset at a time if params.reference.experiments is None: raise Sorry( """For reindexing against a reference dataset, a reference experiments file must also be specified with the option: reference= """) if not os.path.exists(params.reference.reflections): raise Sorry("Could not locate reference dataset reflection file") if len(experiments) != 1 or len(reflections) != 1: raise Sorry( "Only one dataset can be reindexed to a reference at a time") reference_reflections = flex.reflection_table().from_file( params.reference.reflections) test_reflections = reflections[0] if (reference_crystal.get_space_group().type().number() != experiments.crystals()[0].get_space_group().type().number()): raise Sorry("Space group of input does not match reference") # Set some flags to allow filtering, if wanting to reindex against # reference with data that has not yet been through integration if (test_reflections.get_flags( test_reflections.flags.integrated_sum).count(True) == 0): assert ( "intensity.sum.value" in test_reflections), "No 'intensity.sum.value' in reflections" test_reflections.set_flags( flex.bool(test_reflections.size(), True), test_reflections.flags.integrated_sum, ) if (reference_reflections.get_flags( reference_reflections.flags.integrated_sum).count(True) == 0): assert ("intensity.sum.value" in test_reflections ), "No 'intensity.sum.value in reference reflections" reference_reflections.set_flags( flex.bool(reference_reflections.size(), True), reference_reflections.flags.integrated_sum, ) # Make miller array of the two datasets try: test_miller_set = filtered_arrays_from_experiments_reflections( experiments, [test_reflections])[0] except ValueError: raise Sorry( "No reflections remain after filtering the test dataset") try: reference_miller_set = filtered_arrays_from_experiments_reflections( reference_experiments, [reference_reflections])[0] except ValueError: raise Sorry( "No reflections remain after filtering the reference dataset") from dials.algorithms.symmetry.reindex_to_reference import ( determine_reindex_operator_against_reference, ) change_of_basis_op = determine_reindex_operator_against_reference( test_miller_set, reference_miller_set) elif len(experiments) and params.change_of_basis_op is libtbx.Auto: if reference_crystal is not None: if len(experiments.crystals()) > 1: raise Sorry("Only one crystal can be processed at a time") from dials.algorithms.indexing.compare_orientation_matrices import ( difference_rotation_matrix_axis_angle, ) cryst = experiments.crystals()[0] R, axis, angle, change_of_basis_op = difference_rotation_matrix_axis_angle( cryst, reference_crystal) print("Change of basis op: %s" % change_of_basis_op) print("Rotation matrix to transform input crystal to reference::") print(R.mathematica_form(format="%.3f", one_row_per_line=True)) print( "Rotation of %.3f degrees" % angle, "about axis (%.3f, %.3f, %.3f)" % axis, ) elif len(reflections): assert len(reflections) == 1 # always re-map reflections to reciprocal space refl_copy = flex.reflection_table() for i, imageset in enumerate(experiments.imagesets()): if "imageset_id" in reflections[0]: sel = reflections[0]["imageset_id"] == i else: sel = reflections[0]["id"] == i refl = reflections[0].select(sel) refl.centroid_px_to_mm(imageset.get_detector(), imageset.get_scan()) refl.map_centroids_to_reciprocal_space( imageset.get_detector(), imageset.get_beam(), imageset.get_goniometer(), ) refl_copy.extend(refl) # index the reflection list using the input experiments list refl_copy["id"] = flex.int(len(refl_copy), -1) from dials.algorithms.indexing import index_reflections index_reflections(refl_copy, experiments, tolerance=0.2) hkl_expt = refl_copy["miller_index"] hkl_input = reflections[0]["miller_index"] change_of_basis_op = derive_change_of_basis_op(hkl_input, hkl_expt) # reset experiments list since we don't want to reindex this experiments = [] else: change_of_basis_op = sgtbx.change_of_basis_op( params.change_of_basis_op) if len(experiments): for crystal in experiments.crystals(): cryst_orig = copy.deepcopy(crystal) cryst_reindexed = cryst_orig.change_basis(change_of_basis_op) if params.space_group is not None: a, b, c = cryst_reindexed.get_real_space_vectors() A_varying = [ cryst_reindexed.get_A_at_scan_point(i) for i in range(cryst_reindexed.num_scan_points) ] cryst_reindexed = Crystal( a, b, c, space_group=params.space_group.group()) cryst_reindexed.set_A_at_scan_points(A_varying) crystal.update(cryst_reindexed) print("Old crystal:") print(cryst_orig) print() print("New crystal:") print(cryst_reindexed) print() print("Saving reindexed experimental models to %s" % params.output.experiments) dump.experiment_list(experiments, params.output.experiments) if len(reflections): assert len(reflections) == 1 reflections = reflections[0] miller_indices = reflections["miller_index"] if params.hkl_offset is not None: h, k, l = miller_indices.as_vec3_double().parts() h += params.hkl_offset[0] k += params.hkl_offset[1] l += params.hkl_offset[2] miller_indices = flex.miller_index(h.iround(), k.iround(), l.iround()) non_integral_indices = change_of_basis_op.apply_results_in_non_integral_indices( miller_indices) if non_integral_indices.size() > 0: print( "Removing %i/%i reflections (change of basis results in non-integral indices)" % (non_integral_indices.size(), miller_indices.size())) sel = flex.bool(miller_indices.size(), True) sel.set_selected(non_integral_indices, False) miller_indices_reindexed = change_of_basis_op.apply( miller_indices.select(sel)) reflections["miller_index"].set_selected(sel, miller_indices_reindexed) reflections["miller_index"].set_selected(~sel, (0, 0, 0)) print("Saving reindexed reflections to %s" % params.output.reflections) easy_pickle.dump(params.output.reflections, reflections)
def run(args): usage = "dials.search_beam_position [options] imported.expt strong.refl" parser = OptionParser( usage=usage, phil=phil_scope, read_experiments=True, read_reflections=True, check_format=False, epilog=help_message, ) params, options = parser.parse_args(show_diff_phil=False) experiments = flatten_experiments(params.input.experiments) reflections = flatten_reflections(params.input.reflections) if len(experiments) == 0 or len(reflections) == 0: parser.print_help() exit(0) # Configure the logging log.config(info=params.output.log, debug=params.output.debug_log) # Log the diff phil diff_phil = parser.diff_phil.as_str() if diff_phil != "": logger.info("The following parameters have been modified:\n") logger.info(diff_phil) if params.seed is not None: flex.set_random_seed(params.seed) random.seed(params.seed) imagesets = experiments.imagesets() # Split all the refln tables by ID, corresponding to the respective imagesets reflections = [ refl_unique_id for refl in reflections for refl_unique_id in refl.split_by_experiment_id() ] assert len(imagesets) > 0 assert len(reflections) == len(imagesets) if params.image_range is not None and len(params.image_range) > 0: reflections = [ slice_reflections(refl, params.image_range) for refl in reflections ] dps_params = dps_phil_scope.extract() # for development, we want an exhaustive plot of beam probability map: dps_params.indexing.plot_search_scope = params.plot_search_scope dps_params.indexing.mm_search_scope = params.mm_search_scope for i in range(params.n_macro_cycles): if params.n_macro_cycles > 1: logger.info("Starting macro cycle %i" % (i + 1)) new_detector, new_beam = discover_better_experimental_model( imagesets, reflections, params, dps_params, nproc=params.nproc, wide_search_binning=params.wide_search_binning, ) for experiment in experiments: experiment.beam = new_beam experiment.detector = new_detector logger.info("") logger.info("Saving optimized experiments to %s" % params.output.experiments) dump.experiment_list(experiments, params.output.experiments)
def test(dials_regression, tmpdir): from dials.array_family import flex from dxtbx.model.experiment_list import ExperimentListFactory tmpdir.chdir() data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_narrow_wedges") input_range = range(2, 49) for i in (8, 10, 15, 16, 34, 39, 45): input_range.remove(i) phil_input = "\n".join( (" input.experiments={0}/data/sweep_%03d/experiments.json\n" + " input.reflections={0}/data/sweep_%03d/reflections.pickle") % (i, i) for i in input_range) # assert phil_input == "\n" + phil_input2 + "\n " input_phil = phil_input.format(data_dir) + """ reference_from_experiment.beam=0 reference_from_experiment.scan=0 reference_from_experiment.goniometer=0 reference_from_experiment.detector=0 """ with open("input.phil", "w") as phil_file: phil_file.writelines(input_phil) result = procrunner.run_process( ["dials.combine_experiments", "input.phil"]) assert result['exitcode'] == 0 assert result['stderr'] == '' # load results exp = ExperimentListFactory.from_json_file("combined_experiments.json", check_format=False) ref = flex.reflection_table.from_pickle("combined_reflections.pickle") # test the experiments assert len(exp) == 103 assert len(exp.crystals()) == 103 assert len(exp.beams()) == 1 assert len(exp.scans()) == 1 assert len(exp.detectors()) == 1 assert len(exp.goniometers()) == 1 for e in exp: assert e.imageset is not None # test the reflections assert len(ref) == 11689 result = procrunner.run_process([ "dials.split_experiments", "combined_experiments.json", "combined_reflections.pickle", ]) assert result['exitcode'] == 0 assert result['stderr'] == '' for i, e in enumerate(exp): assert os.path.exists("experiments_%03d.json" % i) assert os.path.exists("reflections_%03d.pickle" % i) exp_single = ExperimentListFactory.from_json_file( "experiments_%03d.json" % i, check_format=False) ref_single = flex.reflection_table.from_pickle( "reflections_%03d.pickle" % i) assert len(exp_single) == 1 assert exp_single[0].crystal == e.crystal assert exp_single[0].beam == e.beam assert exp_single[0].detector == e.detector assert exp_single[0].scan == e.scan assert exp_single[0].goniometer == e.goniometer assert exp_single[0].imageset == e.imageset assert len(ref_single) == len(ref.select(ref['id'] == i)) assert ref_single['id'].all_eq(0) result = procrunner.run_process([ "dials.split_experiments", "combined_experiments.json", "output.experiments_prefix=test", ]) assert result['exitcode'] == 0 assert result['stderr'] == '' for i in range(len(exp)): assert os.path.exists("test_%03d.json" % i) # Modify a copy of the detector detector = copy.deepcopy(exp.detectors()[0]) panel = detector[0] x, y, z = panel.get_origin() panel.set_frame(panel.get_fast_axis(), panel.get_slow_axis(), (x, y, z + 10)) # Set half of the experiments to the new detector for i in xrange(len(exp) // 2): exp[i].detector = detector from dxtbx.serialize import dump dump.experiment_list(exp, "modded_experiments.json") result = procrunner.run_process([ "dials.split_experiments", "modded_experiments.json", "combined_reflections.pickle", "output.experiments_prefix=test_by_detector", "output.reflections_prefix=test_by_detector", "by_detector=True", ]) assert result['exitcode'] == 0 assert result['stderr'] == '' for i in range(2): assert os.path.exists("test_by_detector_%03d.json" % i) assert os.path.exists("test_by_detector_%03d.pickle" % i) assert not os.path.exists("test_by_detector_%03d.json" % 2) assert not os.path.exists("test_by_detector_%03d.pickle" % 2)
def run(self): ''' Parse the options. ''' # Parse the command line arguments params, options = self.parser.parse_args(show_diff_phil=True) reference_experiments = ExperimentListFactory.from_json_file( params.reference_experiments, check_format=False) if len(reference_experiments.detectors()) != 1: raise Sorry("Please ensure reference has only 1 detector model") reference = reference_experiments.detectors()[0] moving_experiments = ExperimentListFactory.from_json_file( params.moving_experiments, check_format=False) if len(moving_experiments.detectors()) != 1: raise Sorry("Please ensure moving has only 1 detector model") moving = moving_experiments.detectors()[0] # Get list of panels to compare if params.panel_list is None or len(params.panel_list) == 0: assert len(reference) == len(moving), "Detectors not same length" panel_ids = range(len(reference)) else: max_p_id = max(params.panel_list) assert max_p_id < len( reference ), "Reference detector must be at least %d panels long given the panel list" % ( max_p_id + 1) assert max_p_id < len( moving ), "Moving detector must be at least %d panels long given the panel list" % ( max_p_id + 1) panel_ids = params.panel_list if params.fit_target == "centers": assert len( panel_ids ) >= 3, "When using centers as target for superpose, detector needs at least 3 panels" def rmsd_from_centers(a, b): assert len(a) == len(b) assert len(a) % 4 == len(b) % 4 == 0 ca = flex.vec3_double() cb = flex.vec3_double() for i in xrange(len(a) // 4): ca.append(a[i:i + 4].mean()) cb.append(b[i:i + 4].mean()) return 1000 * math.sqrt((ca - cb).sum_sq() / len(ca)) cycles = 0 while True: cycles += 1 # Treat panels as a list of 4 sites (corners) or 1 site (centers) for use with lsq superpose reference_sites = flex.vec3_double() moving_sites = flex.vec3_double() for panel_id in panel_ids: for detector, sites in zip([reference, moving], [reference_sites, moving_sites]): panel = detector[panel_id] size = panel.get_image_size() corners = flex.vec3_double([ panel.get_pixel_lab_coord(point) for point in [(0, 0), (0, size[1] - 1), (size[0] - 1, size[1] - 1), (size[0] - 1, 0)] ]) if params.fit_target == "corners": sites.extend(corners) elif params.fit_target == "centers": sites.append(corners.mean()) # Compute super position rmsd = 1000 * math.sqrt((reference_sites - moving_sites).sum_sq() / len(reference_sites)) print("RMSD before fit: %.1f microns" % rmsd) if params.fit_target == "corners": rmsd = rmsd_from_centers(reference_sites, moving_sites) print("RMSD of centers before fit: %.1f microns" % rmsd) lsq = least_squares_fit(reference_sites, moving_sites) rmsd = 1000 * math.sqrt( (reference_sites - lsq.other_sites_best_fit()).sum_sq() / len(reference_sites)) print("RMSD of fit: %.1f microns" % rmsd) if params.fit_target == "corners": rmsd = rmsd_from_centers(reference_sites, lsq.other_sites_best_fit()) print("RMSD of fit of centers: %.1f microns" % rmsd) angle, axis = lsq.r.r3_rotation_matrix_as_unit_quaternion( ).unit_quaternion_as_axis_and_angle(deg=True) print( "Axis and angle of rotation: (%.3f, %.3f, %.3f), %.2f degrees" % (axis[0], axis[1], axis[2], angle)) print("Translation (x, y, z, in microns): (%.3f, %.3f, %.3f)" % (1000 * lsq.t).elems) # Apply the shifts if params.apply_at_hierarchy_level == None: iterable = moving else: iterable = iterate_detector_at_level( moving.hierarchy(), level=params.apply_at_hierarchy_level) for group in iterable: fast = col(group.get_fast_axis()) slow = col(group.get_slow_axis()) ori = col(group.get_origin()) group.set_frame(lsq.r * fast, lsq.r * slow, (lsq.r * ori) + lsq.t) fast = col(group.get_fast_axis()) slow = col(group.get_slow_axis()) ori = col(group.get_origin()) if not params.repeat_until_converged: break if approx_equal(angle, 0.0, out=None) and approx_equal( (1000 * lsq.t).length(), 0.0, out=None): print("Converged after", cycles, "cycles") break else: print("Movement not close to zero, repeating fit") print() from dxtbx.serialize import dump dump.experiment_list(moving_experiments, params.output_experiments) moved_sites = flex.vec3_double() for panel_id in panel_ids: panel = moving[panel_id] size = panel.get_image_size() corners = flex.vec3_double([ panel.get_pixel_lab_coord(point) for point in [(0, 0), (0, size[1] - 1), (size[0] - 1, size[1] - 1), (size[0] - 1, 0)] ]) if params.fit_target == "corners": moved_sites.extend(corners) elif params.fit_target == "centers": moved_sites.append(corners.mean()) # Re-compute RMSD after moving detector components rmsd = 1000 * math.sqrt( (reference_sites - moved_sites).sum_sq() / len(reference_sites)) print("RMSD of fit after movement: %.1f microns" % rmsd) if params.fit_target == "corners": rmsd = rmsd_from_centers(reference_sites, moved_sites) print("RMSD of fit of centers after movement: %.1f microns" % rmsd) if params.panel_list is not None: reference_sites = flex.vec3_double() moved_sites = flex.vec3_double() for panel_id in xrange(len(reference)): for detector, sites in zip([reference, moving], [reference_sites, moved_sites]): panel = detector[panel_id] size = panel.get_image_size() corners = flex.vec3_double([ panel.get_pixel_lab_coord(point) for point in [(0, 0), (0, size[1] - 1), (size[0] - 1, size[1] - 1), (size[0] - 1, 0)] ]) if params.fit_target == "corners": sites.extend(corners) elif params.fit_target == "centers": sites.append(corners.mean()) # Re-compute RMSD for full detector after moving detector components rmsd = 1000 * math.sqrt((reference_sites - moved_sites).sum_sq() / len(reference_sites)) print("RMSD of whole detector fit after movement: %.1f microns" % rmsd) if params.fit_target == "corners": rmsd = rmsd_from_centers(reference_sites, moved_sites) print( "RMSD of whole detector fit of centers after movement: %.1f microns" % rmsd)
def run(args=None): from dials.util import log usage = "dials.refine_bravais_settings indexed.expt indexed.refl [options]" parser = OptionParser( usage=usage, phil=phil_scope, read_experiments=True, read_reflections=True, check_format=False, epilog=help_message, ) params, options = parser.parse_args(args=args, show_diff_phil=False) # Configure the logging log.config(verbosity=options.verbose, logfile=params.output.log) from dials.util.version import dials_version logger.info(dials_version()) # Log the diff phil diff_phil = parser.diff_phil.as_str() if diff_phil != "": logger.info("The following parameters have been modified:\n") logger.info(diff_phil) experiments = flatten_experiments(params.input.experiments) reflections = flatten_reflections(params.input.reflections) if len(reflections) == 0 or len(experiments) == 0: parser.print_help() return assert len(reflections) == 1 reflections = reflections[0] if len(experiments) == 0: parser.print_help() return elif len(experiments.crystals()) > 1: if params.crystal_id is not None: assert params.crystal_id < len(experiments.crystals()) experiment_ids = experiments.where( crystal=experiments.crystals()[params.crystal_id]) from dxtbx.model.experiment_list import ExperimentList experiments = ExperimentList( [experiments[i] for i in experiment_ids]) refl_selections = [reflections["id"] == i for i in experiment_ids] reflections["id"] = flex.int(len(reflections), -1) for i, sel in enumerate(refl_selections): reflections["id"].set_selected(sel, i) reflections = reflections.select(reflections["id"] > -1) else: raise Sorry( "Only one crystal can be processed at a time: set crystal_id to choose experiment." ) if params.refinement.reflections.outlier.algorithm in ("auto", libtbx.Auto): if experiments[0].goniometer is None: params.refinement.reflections.outlier.algorithm = "sauter_poon" else: # different default to dials.refine # tukey is faster and more appropriate at the indexing step params.refinement.reflections.outlier.algorithm = "tukey" from dials.algorithms.indexing.symmetry import ( refined_settings_factory_from_refined_triclinic, ) cb_op_to_primitive = (experiments[0].crystal.get_space_group().info(). change_of_basis_op_to_primitive_setting()) if experiments[0].crystal.get_space_group().n_ltr() > 1: effective_group = (experiments[0].crystal.get_space_group( ).build_derived_reflection_intensity_group(anomalous_flag=True)) sys_absent_flags = effective_group.is_sys_absent( reflections["miller_index"]) reflections = reflections.select(~sys_absent_flags) experiments[0].crystal.update( experiments[0].crystal.change_basis(cb_op_to_primitive)) miller_indices = reflections["miller_index"] miller_indices = cb_op_to_primitive.apply(miller_indices) reflections["miller_index"] = miller_indices Lfat = refined_settings_factory_from_refined_triclinic( params, experiments, reflections, lepage_max_delta=params.lepage_max_delta, nproc=params.nproc, ) possible_bravais_settings = {solution["bravais"] for solution in Lfat} bravais_lattice_to_space_group_table(possible_bravais_settings) logger.info(Lfat.labelit_printout()) prefix = params.output.prefix if prefix is None: prefix = "" summary_file = "%sbravais_summary.json" % prefix logger.info("Saving summary as %s" % summary_file) with open(os.path.join(params.output.directory, summary_file), "w") as fh: json.dump(Lfat.as_dict(), fh) from dxtbx.serialize import dump for subgroup in Lfat: expts = subgroup.refined_experiments soln = int(subgroup.setting_number) bs_json = "%sbravais_setting_%i.expt" % (prefix, soln) logger.info("Saving solution %i as %s" % (soln, bs_json)) dump.experiment_list(expts, os.path.join(params.output.directory, bs_json))
def run(args): import libtbx.load_env from libtbx.utils import Sorry usage = "%s [options] experiments.json indexed.pickle" % libtbx.env.dispatcher_name parser = OptionParser( usage=usage, phil=phil_scope, read_reflections=True, read_experiments=True, check_format=False, epilog=help_message, ) params, options = parser.parse_args(show_diff_phil=True) reflections = flatten_reflections(params.input.reflections) experiments = flatten_experiments(params.input.experiments) if len(experiments) == 0 and len(reflections) == 0: parser.print_help() return elif len(experiments.crystals()) > 1: raise Sorry("Only one crystal can be processed at a time") if params.change_of_basis_op is None: raise Sorry("Please provide a change_of_basis_op.") reference_crystal = None if params.reference is not None: from dxtbx.serialize import load reference_experiments = load.experiment_list(params.reference, check_format=False) assert len(reference_experiments.crystals()) == 1 reference_crystal = reference_experiments.crystals()[0] if len(experiments) and params.change_of_basis_op is libtbx.Auto: if reference_crystal is not None: from dials.algorithms.indexing.compare_orientation_matrices import ( difference_rotation_matrix_and_euler_angles, ) cryst = experiments.crystals()[0] R, euler_angles, change_of_basis_op = difference_rotation_matrix_and_euler_angles(cryst, reference_crystal) print "Change of basis op: %s" % change_of_basis_op print "Rotation matrix to transform input crystal to reference::" print R.mathematica_form(format="%.3f", one_row_per_line=True) print "Euler angles (xyz): %.2f, %.2f, %.2f" % euler_angles elif len(reflections): assert len(reflections) == 1 # always re-map reflections to reciprocal space from dials.algorithms.indexing import indexer refl_copy = flex.reflection_table() for i, imageset in enumerate(experiments.imagesets()): if "imageset_id" in reflections[0]: sel = reflections[0]["imageset_id"] == i else: sel = reflections[0]["id"] == i refl = indexer.indexer_base.map_spots_pixel_to_mm_rad( reflections[0].select(sel), imageset.get_detector(), imageset.get_scan() ) indexer.indexer_base.map_centroids_to_reciprocal_space( refl, imageset.get_detector(), imageset.get_beam(), imageset.get_goniometer() ) refl_copy.extend(refl) # index the reflection list using the input experiments list refl_copy["id"] = flex.int(len(refl_copy), -1) from dials.algorithms.indexing import index_reflections index_reflections(refl_copy, experiments, tolerance=0.2) hkl_expt = refl_copy["miller_index"] hkl_input = reflections[0]["miller_index"] change_of_basis_op = derive_change_of_basis_op(hkl_input, hkl_expt) # reset experiments list since we don't want to reindex this experiments = [] else: change_of_basis_op = sgtbx.change_of_basis_op(params.change_of_basis_op) if len(experiments): experiment = experiments[0] cryst_orig = copy.deepcopy(experiment.crystal) cryst_reindexed = cryst_orig.change_basis(change_of_basis_op) if params.space_group is not None: a, b, c = cryst_reindexed.get_real_space_vectors() cryst_reindexed = crystal_model(a, b, c, space_group=params.space_group.group()) experiment.crystal.update(cryst_reindexed) print "Old crystal:" print cryst_orig print print "New crystal:" print cryst_reindexed print print "Saving reindexed experimental models to %s" % params.output.experiments dump.experiment_list(experiments, params.output.experiments) if len(reflections): assert len(reflections) == 1 reflections = reflections[0] miller_indices = reflections["miller_index"] if params.hkl_offset is not None: h, k, l = miller_indices.as_vec3_double().parts() h += params.hkl_offset[0] k += params.hkl_offset[1] l += params.hkl_offset[2] miller_indices = flex.miller_index(h.iround(), k.iround(), l.iround()) non_integral_indices = change_of_basis_op.apply_results_in_non_integral_indices(miller_indices) if non_integral_indices.size() > 0: print "Removing %i/%i reflections (change of basis results in non-integral indices)" % ( non_integral_indices.size(), miller_indices.size(), ) sel = flex.bool(miller_indices.size(), True) sel.set_selected(non_integral_indices, False) miller_indices_reindexed = change_of_basis_op.apply(miller_indices.select(sel)) reflections["miller_index"].set_selected(sel, miller_indices_reindexed) reflections["miller_index"].set_selected(~sel, (0, 0, 0)) print "Saving reindexed reflections to %s" % params.output.reflections easy_pickle.dump(params.output.reflections, reflections)
def run(self): '''Execute the script.''' from dials.util.options import flatten_reflections, flatten_experiments from libtbx.utils import Sorry from dials.array_family import flex # Parse the command line params, options = self.parser.parse_args(show_diff_phil=True) # Try to load the models and data if not params.input.experiments: print("No Experiments found in the input") self.parser.print_help() return if params.input.reflections: if len(params.input.reflections) != len(params.input.experiments): raise Sorry( "The number of input reflections files does not match the " "number of input experiments") experiments = flatten_experiments(params.input.experiments) if params.input.reflections: reflections = flatten_reflections(params.input.reflections)[0] else: reflections = None import math experiments_template = "%s_%%0%sd.json" % ( params.output.experiments_prefix, int(math.floor(math.log10(len(experiments))) + 1)) reflections_template = "%s_%%0%sd.pickle" % ( params.output.reflections_prefix, int(math.floor(math.log10(len(experiments))) + 1)) from dxtbx.model.experiment_list import ExperimentList from dxtbx.serialize import dump if params.by_detector: if reflections is None: split_data = { detector: { 'experiments': ExperimentList() } for detector in experiments.detectors() } else: split_data = { detector: { 'experiments': ExperimentList(), 'reflections': flex.reflection_table() } for detector in experiments.detectors() } for i, experiment in enumerate(experiments): split_expt_id = experiments.detectors().index( experiment.detector) experiment_filename = experiments_template % split_expt_id print('Adding experiment %d to %s' % (i, experiment_filename)) split_data[experiment.detector]['experiments'].append( experiment) if reflections is not None: reflections_filename = reflections_template % split_expt_id print('Adding reflections for experiment %d to %s' % (i, reflections_filename)) ref_sel = reflections.select(reflections['id'] == i) ref_sel['id'] = flex.int( len(ref_sel), len(split_data[experiment.detector]['experiments']) - 1) split_data[experiment.detector]['reflections'].extend( ref_sel) for i, detector in enumerate(experiments.detectors()): experiment_filename = experiments_template % i print('Saving experiment %d to %s' % (i, experiment_filename)) dump.experiment_list(split_data[detector]['experiments'], experiment_filename) if reflections is not None: reflections_filename = reflections_template % i print('Saving reflections for experiment %d to %s' % (i, reflections_filename)) split_data[detector]['reflections'].as_pickle( reflections_filename) else: for i, experiment in enumerate(experiments): from dxtbx.model.experiment_list import ExperimentList from dxtbx.serialize import dump experiment_filename = experiments_template % i print('Saving experiment %d to %s' % (i, experiment_filename)) dump.experiment_list(ExperimentList([experiment]), experiment_filename) if reflections is not None: reflections_filename = reflections_template % i print('Saving reflections for experiment %d to %s' % (i, reflections_filename)) ref_sel = reflections.select(reflections['id'] == i) ref_sel['id'] = flex.int(len(ref_sel), 0) ref_sel.as_pickle(reflections_filename) return
def run(args): import libtbx from libtbx import easy_pickle from dials.util import log from dials.util.options import OptionParser parser = OptionParser( #usage=usage, phil=phil_scope, read_reflections=True, read_datablocks=False, read_experiments=True, check_format=False, #epilog=help_message ) params, options, args = parser.parse_args(show_diff_phil=False, return_unhandled=True) # Configure the logging log.config(params.verbosity, info=params.output.log, debug=params.output.debug_log) from dials.util.version import dials_version logger.info(dials_version()) # Log the diff phil diff_phil = parser.diff_phil.as_str() if diff_phil is not '': logger.info('The following parameters have been modified:\n') logger.info(diff_phil) if params.seed is not None: import random flex.set_random_seed(params.seed) random.seed(params.seed) if params.save_plot and not params.animate: import matplotlib # http://matplotlib.org/faq/howto_faq.html#generate-images-without-having-a-window-appear matplotlib.use('Agg') # use a non-interactive backend datasets_input = [] experiments = flatten_experiments(params.input.experiments) reflections = flatten_reflections(params.input.reflections) if len(experiments) or len(reflections): if len(reflections) == 1: reflections_input = reflections[0] reflections = [] for i in range(len(experiments)): reflections.append( reflections_input.select(reflections_input['id'] == i)) if len(experiments) > len(reflections): flattened_reflections = [] for refl in reflections: for i in range(0, flex.max(refl['id']) + 1): sel = refl['id'] == i flattened_reflections.append(refl.select(sel)) reflections = flattened_reflections assert len(experiments) == len(reflections) i_refl = 0 for i_expt in enumerate(experiments): refl = reflections[i_refl] for expt, refl in zip(experiments, reflections): crystal_symmetry = crystal.symmetry( unit_cell=expt.crystal.get_unit_cell(), space_group=expt.crystal.get_space_group()) if 0 and 'intensity.prf.value' in refl: sel = refl.get_flags(refl.flags.integrated_prf) assert sel.count(True) > 0 refl = refl.select(sel) data = refl['intensity.prf.value'] variances = refl['intensity.prf.variance'] else: assert 'intensity.sum.value' in refl sel = refl.get_flags(refl.flags.integrated_sum) assert sel.count(True) > 0 refl = refl.select(sel) data = refl['intensity.sum.value'] variances = refl['intensity.sum.variance'] # FIXME probably need to do some filtering of intensities similar to that # done in export_mtz miller_indices = refl['miller_index'] assert variances.all_gt(0) sigmas = flex.sqrt(variances) miller_set = miller.set(crystal_symmetry, miller_indices, anomalous_flag=False) intensities = miller.array(miller_set, data=data, sigmas=sigmas) intensities.set_observation_type_xray_intensity() intensities.set_info( miller.array_info(source='DIALS', source_type='pickle')) datasets_input.append(intensities) files = args for file_name in files: try: data = easy_pickle.load(file_name) intensities = data['observations'][0] intensities.set_info( miller.array_info(source=file_name, source_type='pickle')) intensities = intensities.customized_copy( anomalous_flag=False).set_info(intensities.info()) batches = None except Exception: reader = any_reflection_file(file_name) assert reader.file_type() == 'ccp4_mtz' as_miller_arrays = reader.as_miller_arrays(merge_equivalents=False) intensities = [ ma for ma in as_miller_arrays if ma.info().labels == ['I', 'SIGI'] ][0] batches = [ ma for ma in as_miller_arrays if ma.info().labels == ['BATCH'] ] if len(batches): batches = batches[0] else: batches = None mtz_object = reader.file_content() intensities = intensities.customized_copy( anomalous_flag=False, indices=mtz_object.extract_original_index_miller_indices( )).set_info(intensities.info()) intensities.set_observation_type_xray_intensity() datasets_input.append(intensities) if len(datasets_input) == 0: raise Sorry('No valid reflection files provided on command line') datasets = [] for intensities in datasets_input: if params.batch is not None: assert batches is not None bmin, bmax = params.batch assert bmax >= bmin sel = (batches.data() >= bmin) & (batches.data() <= bmax) assert sel.count(True) > 0 intensities = intensities.select(sel) if params.min_i_mean_over_sigma_mean is not None and ( params.d_min is libtbx.Auto or params.d_min is not None): from xia2.Modules import Resolutionizer rparams = Resolutionizer.phil_defaults.extract().resolutionizer rparams.nbins = 20 resolutionizer = Resolutionizer.resolutionizer( intensities, None, rparams) i_mean_over_sigma_mean = 4 d_min = resolutionizer.resolution_i_mean_over_sigma_mean( i_mean_over_sigma_mean) if params.d_min is libtbx.Auto: intensities = intensities.resolution_filter( d_min=d_min).set_info(intensities.info()) if params.verbose: logger.info('Selecting reflections with d > %.2f' % d_min) elif d_min > params.d_min: logger.info('Rejecting dataset %s as d_min too low (%.2f)' % (file_name, d_min)) continue else: logger.info('Estimated d_min for %s: %.2f' % (file_name, d_min)) elif params.d_min not in (None, libtbx.Auto): intensities = intensities.resolution_filter( d_min=params.d_min).set_info(intensities.info()) if params.normalisation == 'kernel': from mmtbx.scaling import absolute_scaling normalisation = absolute_scaling.kernel_normalisation( intensities, auto_kernel=True) intensities = normalisation.normalised_miller.deep_copy() cb_op_to_primitive = intensities.change_of_basis_op_to_primitive_setting( ) intensities = intensities.change_basis(cb_op_to_primitive) if params.mode == 'full' or params.space_group is not None: if params.space_group is not None: space_group_info = params.space_group.primitive_setting() if not space_group_info.group().is_compatible_unit_cell( intensities.unit_cell()): logger.info( 'Skipping data set - incompatible space group and unit cell: %s, %s' % (space_group_info, intensities.unit_cell())) continue else: space_group_info = sgtbx.space_group_info('P1') intensities = intensities.customized_copy( space_group_info=space_group_info) datasets.append(intensities) crystal_symmetries = [d.crystal_symmetry().niggli_cell() for d in datasets] lattice_ids = range(len(datasets)) from xfel.clustering.cluster import Cluster from xfel.clustering.cluster_groups import unit_cell_info ucs = Cluster.from_crystal_symmetries(crystal_symmetries, lattice_ids=lattice_ids) threshold = 1000 if params.save_plot: from matplotlib import pyplot as plt fig = plt.figure("Andrews-Bernstein distance dendogram", figsize=(12, 8)) ax = plt.gca() else: ax = None clusters, _ = ucs.ab_cluster(params.unit_cell_clustering.threshold, log=params.unit_cell_clustering.log, write_file_lists=False, schnell=False, doplot=params.save_plot, ax=ax) if params.save_plot: plt.tight_layout() plt.savefig('%scluster_unit_cell.png' % params.plot_prefix) plt.close(fig) logger.info(unit_cell_info(clusters)) largest_cluster = None largest_cluster_lattice_ids = None for cluster in clusters: cluster_lattice_ids = [m.lattice_id for m in cluster.members] if largest_cluster_lattice_ids is None: largest_cluster_lattice_ids = cluster_lattice_ids elif len(cluster_lattice_ids) > len(largest_cluster_lattice_ids): largest_cluster_lattice_ids = cluster_lattice_ids dataset_selection = largest_cluster_lattice_ids if len(dataset_selection) < len(datasets): logger.info('Selecting subset of data for cosym analysis: %s' % str(dataset_selection)) datasets = [datasets[i] for i in dataset_selection] # per-dataset change of basis operator to ensure all consistent change_of_basis_ops = [] for i, dataset in enumerate(datasets): metric_subgroups = sgtbx.lattice_symmetry.metric_subgroups(dataset, max_delta=5) subgroup = metric_subgroups.result_groups[0] cb_op_inp_best = subgroup['cb_op_inp_best'] datasets[i] = dataset.change_basis(cb_op_inp_best) change_of_basis_ops.append(cb_op_inp_best) cb_op_ref_min = datasets[0].change_of_basis_op_to_niggli_cell() for i, dataset in enumerate(datasets): if params.space_group is None: datasets[i] = dataset.change_basis(cb_op_ref_min).customized_copy( space_group_info=sgtbx.space_group_info('P1')) else: datasets[i] = dataset.change_basis(cb_op_ref_min) datasets[i] = datasets[i].customized_copy( crystal_symmetry=crystal.symmetry( unit_cell=datasets[i].unit_cell(), space_group_info=params.space_group.primitive_setting(), assert_is_compatible_unit_cell=False)) datasets[i] = datasets[i].merge_equivalents().array() change_of_basis_ops[i] = cb_op_ref_min * change_of_basis_ops[i] result = analyse_datasets(datasets, params) space_groups = {} reindexing_ops = {} for dataset_id in result.reindexing_ops.iterkeys(): if 0 in result.reindexing_ops[dataset_id]: cb_op = result.reindexing_ops[dataset_id][0] reindexing_ops.setdefault(cb_op, []) reindexing_ops[cb_op].append(dataset_id) if dataset_id in result.space_groups: space_groups.setdefault(result.space_groups[dataset_id], []) space_groups[result.space_groups[dataset_id]].append(dataset_id) logger.info('Space groups:') for sg, datasets in space_groups.iteritems(): logger.info(str(sg.info().reference_setting())) logger.info(datasets) logger.info('Reindexing operators:') for cb_op, datasets in reindexing_ops.iteritems(): logger.info(cb_op) logger.info(datasets) if (len(experiments) and len(reflections) and params.output.reflections is not None and params.output.experiments is not None): import copy from dxtbx.model import ExperimentList from dxtbx.serialize import dump reindexed_experiments = ExperimentList() reindexed_reflections = flex.reflection_table() expt_id = 0 for cb_op, dataset_ids in reindexing_ops.iteritems(): cb_op = sgtbx.change_of_basis_op(cb_op) for dataset_id in dataset_ids: expt = experiments[dataset_selection[dataset_id]] refl = reflections[dataset_selection[dataset_id]] reindexed_expt = copy.deepcopy(expt) refl_reindexed = copy.deepcopy(refl) cb_op_this = cb_op * change_of_basis_ops[dataset_id] reindexed_expt.crystal = reindexed_expt.crystal.change_basis( cb_op_this) refl_reindexed['miller_index'] = cb_op_this.apply( refl_reindexed['miller_index']) reindexed_experiments.append(reindexed_expt) refl_reindexed['id'] = flex.int(refl_reindexed.size(), expt_id) reindexed_reflections.extend(refl_reindexed) expt_id += 1 logger.info('Saving reindexed experiments to %s' % params.output.experiments) dump.experiment_list(reindexed_experiments, params.output.experiments) logger.info('Saving reindexed reflections to %s' % params.output.reflections) reindexed_reflections.as_pickle(params.output.reflections) elif params.output.suffix is not None: for cb_op, dataset_ids in reindexing_ops.iteritems(): cb_op = sgtbx.change_of_basis_op(cb_op) for dataset_id in dataset_ids: file_name = files[dataset_selection[dataset_id]] basename = os.path.basename(file_name) out_name = os.path.splitext( basename)[0] + params.output.suffix + '_' + str( dataset_selection[dataset_id]) + ".mtz" reader = any_reflection_file(file_name) assert reader.file_type() == 'ccp4_mtz' mtz_object = reader.file_content() cb_op_this = cb_op * change_of_basis_ops[dataset_id] if not cb_op_this.is_identity_op(): logger.info('reindexing %s (%s)' % (file_name, cb_op_this.as_xyz())) mtz_object.change_basis_in_place(cb_op_this) mtz_object.write(out_name)
def run(args): parser = OptionParser( #usage=usage, phil=phil_scope, read_reflections=True, read_datablocks=False, read_experiments=True, check_format=False, #epilog=help_message ) params, options, args = parser.parse_args(show_diff_phil=False, return_unhandled=True) # Configure the logging log.config(params.verbosity, info=params.output.log, debug=params.output.debug_log) from dials.util.version import dials_version logger.info(dials_version()) # Log the diff phil diff_phil = parser.diff_phil.as_str() if diff_phil is not '': logger.info('The following parameters have been modified:\n') logger.info(diff_phil) datasets_input = [] experiments = flatten_experiments(params.input.experiments) reflections = flatten_reflections(params.input.reflections) if len(experiments) or len(reflections): if len(reflections) == 1: reflections_input = reflections[0] reflections = [] for i in range(len(experiments)): reflections.append( reflections_input.select(reflections_input['id'] == i)) assert len(experiments) == len(reflections) from cctbx import crystal, miller for expt, refl in zip(experiments, reflections): crystal_symmetry = crystal.symmetry( unit_cell=expt.crystal.get_unit_cell(), space_group=expt.crystal.get_space_group()) # filtering of intensities similar to that done in export_mtz # FIXME this function should be renamed/moved elsewhere from dials.util.export_mtz import _apply_data_filters refl = _apply_data_filters(refl, ignore_profile_fitting=False, filter_ice_rings=False, min_isigi=-5, include_partials=False, keep_partials=False, scale_partials=True) assert 'intensity.sum.value' in refl sel = refl.get_flags(refl.flags.integrated_sum) data = refl['intensity.sum.value'] variances = refl['intensity.sum.variance'] if 'intensity.prf.value' in refl: prf_sel = refl.get_flags(refl.flags.integrated_prf) data.set_selected(prf_sel, refl['intensity.prf.value']) variances.set_selected(prf_sel, refl['intensity.prf.variance']) sel |= prf_sel refl = refl.select(sel) data = data.select(sel) variances = variances.select(sel) if 'lp' in refl and 'qe' in refl: lp = refl['lp'] qe = refl['qe'] assert qe.all_gt(0) scale = lp / qe data *= scale variances *= (flex.pow2(scale)) miller_indices = refl['miller_index'] assert variances.all_gt(0) sigmas = flex.sqrt(variances) miller_set = miller.set(crystal_symmetry, miller_indices, anomalous_flag=True) intensities = miller.array(miller_set, data=data, sigmas=sigmas) intensities.set_observation_type_xray_intensity() intensities.set_info( miller.array_info(source='DIALS', source_type='pickle')) datasets_input.append(intensities) files = args for file_name in files: reader = any_reflection_file(file_name) assert reader.file_type() == 'ccp4_mtz' as_miller_arrays = reader.as_miller_arrays(merge_equivalents=False) intensities_prf = [ ma for ma in as_miller_arrays if ma.info().labels == ['IPR', 'SIGIPR'] ] intensities_sum = [ ma for ma in as_miller_arrays if ma.info().labels == ['I', 'SIGI'] ] if len(intensities_prf): intensities = intensities_prf[0] else: assert len(intensities_sum), 'No intensities found in input file.' intensities = intensities_sum[0] batches = [ ma for ma in as_miller_arrays if ma.info().labels == ['BATCH'] ] if len(batches): batches = batches[0] else: batches = None mtz_object = reader.file_content() intensities = intensities.customized_copy( anomalous_flag=True, indices=mtz_object.extract_original_index_miller_indices( )).set_info(intensities.info()) intensities.set_observation_type_xray_intensity() if params.batch is not None: assert batches is not None bmin, bmax = params.batch assert bmax >= bmin sel = (batches.data() >= bmin) & (batches.data() <= bmax) assert sel.count(True) > 0 intensities = intensities.select(sel) datasets_input.append(intensities) datasets = datasets_input assert len(datasets) == 1 result = determine_space_group( datasets[0], normalisation=params.normalisation, d_min=params.d_min, min_i_mean_over_sigma_mean=params.min_i_mean_over_sigma_mean) if (len(experiments) and len(reflections) and params.output.reflections is not None and params.output.experiments is not None): from dxtbx.serialize import dump from rstbx.symmetry.constraints import parameter_reduction reindexed_experiments = copy.deepcopy(experiments) reindexed_reflections = copy.deepcopy(reflections[0]) cb_op_inp_best = result.best_solution.subgroup[ 'cb_op_inp_best'] * result.cb_op_inp_min best_subsym = result.best_solution.subgroup['best_subsym'] for expt in reindexed_experiments: expt.crystal = expt.crystal.change_basis(cb_op_inp_best) expt.crystal.set_space_group( best_subsym.space_group().build_derived_acentric_group()) S = parameter_reduction.symmetrize_reduce_enlarge( expt.crystal.get_space_group()) S.set_orientation(expt.crystal.get_B()) S.symmetrize() expt.crystal.set_B(S.orientation.reciprocal_matrix()) reindexed_reflections['miller_index'] = cb_op_inp_best.apply( reindexed_reflections['miller_index']) logger.info('Saving reindexed experiments to %s' % params.output.experiments) dump.experiment_list(reindexed_experiments, params.output.experiments) logger.info('Saving reindexed reflections to %s' % params.output.reflections) reindexed_reflections.as_pickle(params.output.reflections) elif params.output.suffix is not None: cb_op_inp_best = result.best_solution.subgroup[ 'cb_op_inp_best'] * result.cb_op_inp_min best_subsym = result.best_solution.subgroup['best_subsym'] space_group = best_subsym.space_group().build_derived_acentric_group() for file_name in files: basename = os.path.basename(file_name) out_name = os.path.splitext( basename)[0] + params.output.suffix + ".mtz" reader = any_reflection_file(file_name) assert reader.file_type() == 'ccp4_mtz' mtz_object = reader.file_content() if not cb_op_inp_best.is_identity_op(): mtz_object.change_basis_in_place(cb_op_inp_best) mtz_object.set_space_group_info(space_group.info()) for crystal in mtz_object.crystals(): crystal.set_unit_cell_parameters( best_subsym.unit_cell().parameters()) mtz_object.write(out_name) logger.info('Saving reindexed reflections to %s' % out_name)
def _refine(self): for epoch, idxr in self._refinr_indexers.iteritems(): experiments = idxr.get_indexer_experiment_list() indexed_experiments = idxr.get_indexer_payload( "experiments_filename") indexed_reflections = idxr.get_indexer_payload("indexed_filename") if len(experiments) > 1: xsweeps = idxr._indxr_sweeps assert len(xsweeps) == len(experiments) assert len(self._refinr_sweeps ) == 1 # don't currently support joint refinement xsweep = self._refinr_sweeps[0] i = xsweeps.index(xsweep) experiments = experiments[i:i + 1] # Extract and output experiment and reflections for current sweep indexed_experiments = os.path.join( self.get_working_directory(), "%s_indexed_experiments.json" % xsweep.get_name()) indexed_reflections = os.path.join( self.get_working_directory(), "%s_indexed_reflections.pickle" % xsweep.get_name()) from dxtbx.serialize import dump dump.experiment_list(experiments, indexed_experiments) from libtbx import easy_pickle from scitbx.array_family import flex reflections = easy_pickle.load( idxr.get_indexer_payload("indexed_filename")) sel = reflections['id'] == i assert sel.count(True) > 0 imageset_id = reflections['imageset_id'].select(sel) assert imageset_id.all_eq(imageset_id[0]) sel = reflections['imageset_id'] == imageset_id[0] reflections = reflections.select(sel) # set indexed reflections to id == 0 and imageset_id == 0 reflections['id'].set_selected(reflections['id'] == i, 0) reflections['imageset_id'] = flex.int(len(reflections), 0) easy_pickle.dump(indexed_reflections, reflections) assert len(experiments.crystals() ) == 1 # currently only handle one lattice/sweep crystal_model = experiments.crystals()[0] lattice = idxr.get_indexer_lattice() from dxtbx.serialize import load scan_static = PhilIndex.params.dials.refine.scan_static # XXX Temporary workaround for dials.refine error for scan_varying # refinement with smaller wedges start, end = experiments[0].scan.get_oscillation_range() total_phi_range = end - start if (PhilIndex.params.dials.refine.scan_varying and total_phi_range > 5 and not PhilIndex.params.dials.fast_mode): scan_varying = PhilIndex.params.dials.refine.scan_varying else: scan_varying = False if scan_static: refiner = self.Refine() refiner.set_experiments_filename(indexed_experiments) refiner.set_indexed_filename(indexed_reflections) refiner.set_scan_varying(False) refiner.run() self._refinr_experiments_filename \ = refiner.get_refined_experiments_filename() self._refinr_indexed_filename = refiner.get_refined_filename() else: self._refinr_experiments_filename = indexed_experiments self._refinr_indexed_filename = indexed_reflections if scan_varying: refiner = self.Refine() refiner.set_experiments_filename( self._refinr_experiments_filename) refiner.set_indexed_filename(self._refinr_indexed_filename) if total_phi_range < 36: refiner.set_interval_width_degrees(total_phi_range / 2) refiner.run() self._refinr_experiments_filename \ = refiner.get_refined_experiments_filename() self._refinr_indexed_filename = refiner.get_refined_filename() if scan_static or scan_varying: FileHandler.record_log_file( '%s REFINE' % idxr.get_indexer_full_name(), refiner.get_log_file()) report = self.Report() report.set_experiments_filename( self._refinr_experiments_filename) report.set_reflections_filename(self._refinr_indexed_filename) html_filename = os.path.join( self.get_working_directory(), '%i_dials.refine.report.html' % report.get_xpid()) report.set_html_filename(html_filename) report.run() FileHandler.record_html_file( '%s REFINE' % idxr.get_indexer_full_name(), html_filename) experiments = load.experiment_list( self._refinr_experiments_filename) self.set_refiner_payload("experiments.json", self._refinr_experiments_filename) self.set_refiner_payload("reflections.pickle", self._refinr_indexed_filename) # this is the result of the cell refinement self._refinr_cell = experiments.crystals()[0].get_unit_cell( ).parameters()
def _index_prepare(self): from xia2.Handlers.Citations import Citations Citations.cite("dials") # all_images = self.get_matching_images() # first = min(all_images) # last = max(all_images) spot_lists = [] experiments_filenames = [] for imageset, xsweep in zip(self._indxr_imagesets, self._indxr_sweeps): Chatter.banner("Spotfinding %s" % xsweep.get_name()) first, last = imageset.get_scan().get_image_range() # at this stage, break out to run the DIALS code: this sets itself up # now cheat and pass in some information... save re-reading all of the # image headers # FIXME need to adjust this to allow (say) three chunks of images from dxtbx.serialize import dump from dxtbx.model.experiment_list import ExperimentListFactory sweep_filename = os.path.join(self.get_working_directory(), "%s_import.expt" % xsweep.get_name()) dump.experiment_list( ExperimentListFactory.from_imageset_and_crystal( imageset, None), sweep_filename, ) genmask = self.GenerateMask() genmask.set_input_experiments(sweep_filename) genmask.set_output_experiments( os.path.join( self.get_working_directory(), "%s_%s_masked.expt" % (genmask.get_xpid(), xsweep.get_name()), )) genmask.set_params(PhilIndex.params.dials.masking) sweep_filename, mask_pickle = genmask.run() Debug.write("Generated mask for %s: %s" % (xsweep.get_name(), mask_pickle)) gain = PhilIndex.params.xia2.settings.input.gain if gain is libtbx.Auto: gain_estimater = self.EstimateGain() gain_estimater.set_sweep_filename(sweep_filename) gain_estimater.run() gain = gain_estimater.get_gain() Chatter.write("Estimated gain: %.2f" % gain) PhilIndex.params.xia2.settings.input.gain = gain # FIXME this should really use the assigned spot finding regions # offset = self.get_frame_offset() dfs_params = PhilIndex.params.dials.find_spots spotfinder = self.Spotfinder() if last - first > 10: spotfinder.set_write_hot_mask(True) spotfinder.set_input_sweep_filename(sweep_filename) spotfinder.set_output_sweep_filename( "%s_%s_strong.expt" % (spotfinder.get_xpid(), xsweep.get_name())) spotfinder.set_input_spot_filename( "%s_%s_strong.refl" % (spotfinder.get_xpid(), xsweep.get_name())) if PhilIndex.params.dials.fast_mode: wedges = self._index_select_images_i(imageset) spotfinder.set_scan_ranges(wedges) else: spotfinder.set_scan_ranges([(first, last)]) if dfs_params.phil_file is not None: spotfinder.set_phil_file(dfs_params.phil_file) if dfs_params.min_spot_size is libtbx.Auto: if imageset.get_detector()[0].get_type() == "SENSOR_PAD": dfs_params.min_spot_size = 3 else: dfs_params.min_spot_size = None if dfs_params.min_spot_size is not None: spotfinder.set_min_spot_size(dfs_params.min_spot_size) if dfs_params.min_local is not None: spotfinder.set_min_local(dfs_params.min_local) if dfs_params.sigma_strong: spotfinder.set_sigma_strong(dfs_params.sigma_strong) gain = PhilIndex.params.xia2.settings.input.gain if gain: spotfinder.set_gain(gain) if dfs_params.filter_ice_rings: spotfinder.set_filter_ice_rings(dfs_params.filter_ice_rings) if dfs_params.kernel_size: spotfinder.set_kernel_size(dfs_params.kernel_size) if dfs_params.global_threshold is not None: spotfinder.set_global_threshold(dfs_params.global_threshold) if dfs_params.threshold.algorithm is not None: spotfinder.set_threshold_algorithm( dfs_params.threshold.algorithm) spotfinder.run() spot_filename = spotfinder.get_spot_filename() if not os.path.exists(spot_filename): raise RuntimeError("Spotfinding failed: %s does not exist." % os.path.basename(spot_filename)) spot_lists.append(spot_filename) experiments_filenames.append( spotfinder.get_output_sweep_filename()) from dials.util.ascii_art import spot_counts_per_image_plot refl = flex.reflection_table.from_file(spot_filename) if not len(refl): raise RuntimeError("No spots found in sweep %s" % xsweep.get_name()) Chatter.write(spot_counts_per_image_plot(refl), strip=False) if not PhilIndex.params.dials.fast_mode: detectblanks = self.DetectBlanks() detectblanks.set_sweep_filename(experiments_filenames[-1]) detectblanks.set_reflections_filename(spot_filename) detectblanks.run() json = detectblanks.get_results() offset = imageset.get_scan().get_image_range()[0] blank_regions = json["strong"]["blank_regions"] if len(blank_regions): blank_regions = [(int(s), int(e)) for s, e in blank_regions] for blank_start, blank_end in blank_regions: Chatter.write( "WARNING: Potential blank images: %i -> %i" % (blank_start + 1, blank_end)) if PhilIndex.params.xia2.settings.remove_blanks: non_blanks = [] start, end = imageset.get_array_range() last_blank_end = start for blank_start, blank_end in blank_regions: if blank_start > start: non_blanks.append( (last_blank_end, blank_start)) last_blank_end = blank_end if last_blank_end + 1 < end: non_blanks.append((last_blank_end, end)) xsweep = self.get_indexer_sweep() xwav = xsweep.get_wavelength() xsample = xsweep.get_xsample() sweep_name = xsweep.get_name() import string for i, (nb_start, nb_end) in enumerate(non_blanks): assert i < 26 if i == 0: sub_imageset = imageset[nb_start - start:nb_end - start] xsweep._frames_to_process = (nb_start + 1, nb_end + 1) self.set_indexer_prepare_done(done=False) self._indxr_imagesets[ self._indxr_imagesets.index( imageset)] = sub_imageset xsweep._integrater._setup_from_imageset( sub_imageset) else: min_images = (PhilIndex.params.xia2.settings. input.min_images) if (nb_end - nb_start) < min_images: continue new_name = "_".join( (sweep_name, string.ascii_lowercase[i])) new_sweep = xwav.add_sweep( new_name, xsample, directory=os.path.join( os.path.basename( xsweep.get_directory()), new_name, ), image=imageset.get_path(nb_start - start), frames_to_process=(nb_start + 1, nb_end), ) Chatter.write( "Generating new sweep: %s (%s:%i:%i)" % ( new_sweep.get_name(), new_sweep.get_image(), new_sweep.get_frames_to_process()[0], new_sweep.get_frames_to_process()[1], )) return if not PhilIndex.params.xia2.settings.trust_beam_centre: discovery = self.DiscoverBetterExperimentalModel() discovery.set_sweep_filename(experiments_filenames[-1]) discovery.set_spot_filename(spot_filename) # wedges = self._index_select_images_i(imageset) # discovery.set_scan_ranges(wedges) # discovery.set_scan_ranges([(first + offset, last + offset)]) try: discovery.run() except Exception as e: Debug.write("DIALS beam centre search failed: %s" % str(e)) else: # overwrite indexed.expt in experiments list experiments_filenames[ -1] = discovery.get_optimized_experiments_filename() self.set_indexer_payload("spot_lists", spot_lists) self.set_indexer_payload("experiments", experiments_filenames) return