def tst_dump_formats(self): from uuid import uuid4 from os.path import join import os os.environ['DIALS_REGRESSION'] = self.path # Get all the filenames filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json') # Read all the experiment lists in elist1 = ExperimentListFactory.from_json_file(filename1) # Create the experiment list dumper dump = ExperimentListDumper(elist1) # Dump as JSON file and reload filename = 'temp%s.json' % uuid4().hex dump.as_json(filename) elist2 = ExperimentListFactory.from_json_file(filename) self.check(elist1, elist2) # Dump as split JSON file and reload filename = 'temp%s.json' % uuid4().hex dump.as_json(filename, split=True) elist2 = ExperimentListFactory.from_json_file(filename) self.check(elist1, elist2) # Dump as pickle and reload filename = 'temp%s.pickle' % uuid4().hex dump.as_pickle(filename) elist2 = ExperimentListFactory.from_pickle_file(filename) self.check(elist1, elist2)
def tst_dump_empty_sweep(self): from dxtbx.imageset import ImageSweep, NullReader, SweepFileList from dxtbx.model import Beam, Detector, Goniometer, Scan from dxtbx.model.crystal import crystal_model from uuid import uuid4 imageset = ImageSweep(NullReader(SweepFileList("filename%01d.cbf", (0, 3)))) imageset.set_beam(Beam((1, 0, 0))) imageset.set_detector(Detector()) imageset.set_goniometer(Goniometer()) imageset.set_scan(Scan((1, 3), (0.0, 1.0))) crystal = crystal_model((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol=1) experiments = ExperimentListFactory.from_imageset_and_crystal( imageset, crystal) dump = ExperimentListDumper(experiments) filename = 'temp%s.json' % uuid4().hex dump.as_json(filename) experiments2 = ExperimentListFactory.from_json_file(filename, check_format=False) self.check(experiments, experiments2) print 'OK'
def test1(): dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_stills") experiments_path = os.path.join(data_dir, "combined_experiments.json") reflections_path = os.path.join(data_dir, "combined_reflections.pickle") cmd = "dials.refine " + experiments_path + " " + reflections_path print cmd # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory(suffix="tst_refine_multi_stills1") os.chdir(tmp_dir) try: result = easy_run.fully_buffered(command=cmd).raise_if_errors() # load results reg_exp = ExperimentListFactory.from_json_file(os.path.join( data_dir, "regression_experiments.json"), check_format=False) ref_exp = ExperimentListFactory.from_json_file( "refined_experiments.json", check_format=False) finally: os.chdir(cwd) print "OK" # compare results tol = 1e-5 for b1, b2 in zip(reg_exp.beams(), ref_exp.beams()): assert b1.is_similar_to(b2, wavelength_tolerance=tol, direction_tolerance=tol, polarization_normal_tolerance=tol, polarization_fraction_tolerance=tol) s0_1 = matrix.col(b1.get_unit_s0()) s0_2 = matrix.col(b2.get_unit_s0()) assert s0_1.accute_angle(s0_2, deg=True) < 0.0057 # ~0.1 mrad for c1, c2 in zip(reg_exp.crystals(), ref_exp.crystals()): assert c1.is_similar_to(c2) for d1, d2 in zip(reg_exp.detectors(), ref_exp.detectors()): assert d1.is_similar_to(d2, fast_axis_tolerance=1e-4, slow_axis_tolerance=1e-4, origin_tolerance=1e-2) print "OK" return
def test1(): dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_stills") experiments_path = os.path.join(data_dir, "combined_experiments.json") reflections_path = os.path.join(data_dir, "combined_reflections.pickle") cmd = "dials.refine " + experiments_path + " " + reflections_path print cmd # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory(suffix="tst_refine_multi_stills1") os.chdir(tmp_dir) try: result = easy_run.fully_buffered(command=cmd).raise_if_errors() # load results reg_exp = ExperimentListFactory.from_json_file( os.path.join(data_dir, "regression_experiments.json"), check_format=False) ref_exp = ExperimentListFactory.from_json_file("refined_experiments.json", check_format=False) finally: os.chdir(cwd) # clean up tmp dir shutil.rmtree(tmp_dir) print "OK" # compare results tol = 1e-5 for b1, b2 in zip(reg_exp.beams(), ref_exp.beams()): assert b1.is_similar_to(b2, wavelength_tolerance=tol, direction_tolerance=tol, polarization_normal_tolerance=tol, polarization_fraction_tolerance=tol) s0_1 = matrix.col(b1.get_unit_s0()) s0_2 = matrix.col(b2.get_unit_s0()) assert s0_1.accute_angle(s0_2, deg=True) < 0.0057 # ~0.1 mrad for c1, c2 in zip(reg_exp.crystals(), ref_exp.crystals()): assert c1.is_similar_to(c2) for d1, d2 in zip(reg_exp.detectors(), ref_exp.detectors()): assert d1.is_similar_to(d2, fast_axis_tolerance=1e-4, slow_axis_tolerance=1e-4, origin_tolerance=1e-2) print "OK" return
class TestSummation(object): def __init__(self): from dxtbx.model.experiment.experiment_list import ExperimentListFactory from dials.algorithms.profile_model.gaussian_rs import Model import libtbx.load_env from dials.array_family import flex from os.path import join from math import pi try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError, e: print 'FAIL: dials_regression not configured' exit(0) path = join(dials_regression, "centroid_test_data", "experiments.json") exlist = ExperimentListFactory.from_json_file(path) exlist[0].profile = Model(None, n_sigma=3, sigma_b=0.024 * pi / 180.0, sigma_m=0.044 * pi / 180.0) rlist = flex.reflection_table.from_predictions(exlist[0]) rlist['id'] = flex.int(len(rlist), 0) self.rlist = rlist self.exlist = exlist
def from_dict(cls, obj): import json assert obj['__id__'] == 'Refiner' return_obj = cls() for k, v in obj.iteritems(): if k == '_refinr_indexers': v_new = {} for k_, v_ in v.iteritems(): from libtbx.utils import import_python_object integrater_cls = import_python_object( import_path=".".join((v_['__module__'], v_['__name__'])), error_prefix='', target_must_be='', where_str='').object v_new[float(k_)] = integrater_cls.from_dict(v_) v = v_new elif k == '_refinr_payload': v_new = {} for k_, v_ in v.iteritems(): try: v_new[float(k_)] = v_ except ValueError, e: v_new[k_] = v_ v = v_new if isinstance(v, dict): if v.get('__id__', None) == 'ExperimentList': from dxtbx.model.experiment.experiment_list import ExperimentListFactory v = ExperimentListFactory.from_dict(v, check_format=False) setattr(return_obj, k, v)
def tst_from_datablock(self): from dxtbx.imageset import ImageSweep, NullReader, SweepFileList from dxtbx.model import Beam, Detector, Goniometer, Scan from dxtbx.datablock import DataBlockFactory from dxtbx.model.crystal import crystal_model imageset = ImageSweep(NullReader(SweepFileList("filename%01d.cbf", (0, 2)))) imageset.set_beam(Beam()) imageset.set_detector(Detector()) imageset.set_goniometer(Goniometer()) imageset.set_scan(Scan((1, 2), (0, 1))) crystal = crystal_model((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol=0) datablock = DataBlockFactory.from_imageset(imageset) experiments = ExperimentListFactory.from_datablock_and_crystal( datablock, crystal) assert(len(experiments) == 1) assert(experiments[0].imageset is not None) assert(experiments[0].beam is not None) assert(experiments[0].detector is not None) assert(experiments[0].goniometer is not None) assert(experiments[0].scan is not None) assert(experiments[0].crystal is not None) print 'OK' pass
def run(self): from os.path import join from libtbx import easy_run from dials.algorithms.profile_model.factory import phil_scope from libtbx.phil import parse from dxtbx.model.experiment.experiment_list import ExperimentListFactory # Call dials.create_profile_model easy_run.fully_buffered([ 'dials.create_profile_model', join(self.path, 'experiments.json'), join(self.path, 'indexed.pickle'), ]).raise_if_errors() experiments = ExperimentListFactory.from_json_file( "experiments_with_profile_model.json", check_format=False) sigma_b = experiments[0].profile.sigma_b(deg=True) sigma_m = experiments[0].profile.sigma_m(deg=True) eps = 1e-3 try: assert (abs(sigma_b - 0.02195) < eps) assert (abs(sigma_m - 0.06833) < eps) except Exception: print sigma_b print sigma_m raise print 'OK'
def run(self): from os.path import join from libtbx import easy_run from dials.algorithms.profile_model.factory import phil_scope from libtbx.phil import parse from dxtbx.model.experiment.experiment_list import ExperimentListFactory # Call dials.create_profile_model easy_run.fully_buffered([ 'dials.create_profile_model', join(self.path, 'experiments.json'), join(self.path, 'indexed.pickle'), ]).raise_if_errors() experiments = ExperimentListFactory.from_json_file( "experiments_with_profile_model.json", check_format=False) sigma_b = experiments[0].profile.sigma_b(deg=True) sigma_m = experiments[0].profile.sigma_m(deg=True) eps = 1e-3 try: assert(abs(sigma_b - 0.02195) < eps) assert(abs(sigma_m - 0.06833) < eps) except Exception: print sigma_b print sigma_m raise print 'OK'
def print_someting(experiments_argv): from dxtbx.model.experiment.experiment_list import ExperimentListFactory experiments = ExperimentListFactory.from_json_file(experiments_argv, check_format=False) print "len(experiments)", len(experiments) print experiments[0] for exp in experiments: print "\n\n exp =", exp print "dir(exp) =", dir(exp), "\n\n" #print "dir(exp.crystal) =", dir(exp.crystal) print "exp.crystal.get_space_group =", exp.crystal.get_space_group() print "exp.crystal.get_unit_cell =", exp.crystal.get_unit_cell() #detc = exp.detector #scan = exp.scan #prof = exp.profile gonio = exp.goniometer #print "\n\n dir(detc) =", dir(detc) #print "\n\n dir(scan) =", dir(scan) #print "\n\n dir(prof) =", dir(prof) print "\n\n dir(goni) =", dir(gonio) print "Pass"
class Test(object): def __init__(self): from math import pi import libtbx.load_env try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError, e: print 'FAIL: dials_regression not configured' exit(0) import os filename = os.path.join(dials_regression, 'centroid_test_data', 'fake_long_experiments.json') from dxtbx.model.experiment.experiment_list import ExperimentListFactory from dxtbx.model.experiment.experiment_list import ExperimentList exlist = ExperimentListFactory.from_json_file(filename) assert (len(exlist) == 1) self.experiment = exlist[0] # Set the delta_divergence/mosaicity self.n_sigma = 5 self.sigma_b = 0.060 * pi / 180 self.sigma_m = 0.154 * pi / 180 from dials.algorithms.profile_model.gaussian_rs import Model self.profile_model = Model(None, self.n_sigma, self.sigma_b, self.sigma_m) self.experiment.profile = self.profile_model self.experiments = ExperimentList() self.experiments.append(self.experiment)
def tst_from_args(self): from os.path import join from glob import glob # Get all the filenames filenames = [ join(self.path, 'experiment_test_data', 'experiment_1.json'), #join(self.path, 'experiment_test_data', 'experiment_2.json'), join(self.path, 'experiment_test_data', 'experiment_3.json'), join(self.path, 'experiment_test_data', 'experiment_4.json')] # Get the experiments from a list of filenames experiments = ExperimentListFactory.from_args(filenames) # Have 4 experiment assert(len(experiments) == 3) for i in range(3): assert(experiments[i].imageset is not None) assert(experiments[i].beam is not None) assert(experiments[i].detector is not None) assert(experiments[i].goniometer is not None) assert(experiments[i].scan is not None) # Test passed print 'OK'
def tst_from_datablock(self): from dxtbx.imageset import ImageSweep, NullReader, SweepFileList from dxtbx.model import Beam, Detector, Goniometer, Scan from dxtbx.datablock import DataBlockFactory from dxtbx.model.crystal import crystal_model imageset = ImageSweep(NullReader(SweepFileList("filename%01d.cbf", (0, 2)))) imageset.set_beam(Beam()) imageset.set_detector(Detector()) imageset.set_goniometer(Goniometer()) imageset.set_scan(Scan((1, 2), (0, 1))) crystal = crystal_model((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol=0) datablock = DataBlockFactory.from_imageset(imageset) experiments = ExperimentListFactory.from_datablock_and_crystal( datablock, crystal) assert(len(experiments) == 1) assert(experiments[0].imageset is not None) assert(experiments[0].beam is not None) assert(experiments[0].detector is not None) assert(experiments[0].goniometer is not None) assert(experiments[0].scan is not None) assert(experiments[0].crystal is not None) print 'OK'
def tst_from_args(self): from os.path import join from glob import glob # Get all the filenames filenames = [ join(self.path, 'experiment_test_data', 'experiment_1.json'), join(self.path, 'experiment_test_data', 'experiment_2.json'), join(self.path, 'experiment_test_data', 'experiment_3.json'), join(self.path, 'experiment_test_data', 'experiment_4.json')] # Get the experiments from a list of filenames experiments = ExperimentListFactory.from_args(filenames) # Have 4 experiment assert(len(experiments) == 4) for i in range(4): assert(experiments[i].imageset is not None) assert(experiments[i].beam is not None) assert(experiments[i].detector is not None) assert(experiments[i].goniometer is not None) assert(experiments[i].scan is not None) # Test passed print 'OK'
class Test(object): def __init__(self): import libtbx.load_env try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError, e: print 'SKIP: dials_regression not configured' exit(0) import os from dxtbx.model.experiment.experiment_list import ExperimentListFactory path = os.path.join(dials_regression, 'centroid_test_data', 'experiments.json') self.experiments = ExperimentListFactory.from_json_file(path) assert (len(self.experiments) == 1) self.experiments[0].imageset.set_beam(self.experiments[0].beam) self.experiments[0].imageset.set_detector(self.experiments[0].detector) self.experiments[0].imageset.set_goniometer( self.experiments[0].goniometer) self.experiments[0].imageset.set_scan(self.experiments[0].scan) reflection_filename = os.path.join(dials_regression, 'prediction_test_data', 'expected_reflections.pickle') from dials.array_family import flex self.reflections = flex.reflection_table.from_pickle( reflection_filename)
def test2(): """Compare results of multiprocess vs single process refinement to ensure they are the same""" dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_stills") experiments_path = os.path.join(data_dir, "combined_experiments.json") reflections_path = os.path.join(data_dir, "combined_reflections.pickle") cmd = "dials.refine " + experiments_path + " " + reflections_path + \ " outlier.algorithm=null engine=LBFGScurvs output.reflections=None " cmd1 = cmd + "output.experiments=refined_experiments_nproc1.json nproc=1" print cmd1 cmd2= cmd + "output.experiments=refined_experiments_nproc4.json nproc=4" print cmd2 # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory(suffix="tst_refine_multi_stills2") os.chdir(tmp_dir) try: result1 = easy_run.fully_buffered(command=cmd1).raise_if_errors() result2 = easy_run.fully_buffered(command=cmd2).raise_if_errors() # load results nproc1 = ExperimentListFactory.from_json_file( "refined_experiments_nproc1.json", check_format=False) nproc4 = ExperimentListFactory.from_json_file( "refined_experiments_nproc4.json", check_format=False) finally: os.chdir(cwd) # clean up tmp dir shutil.rmtree(tmp_dir) print "OK" # compare results for b1, b2 in zip(nproc1.beams(), nproc4.beams()): assert b1.is_similar_to(b2) for c1, c2 in zip(nproc1.crystals(), nproc4.crystals()): assert c1.is_similar_to(c2) for d1, d2 in zip(nproc1.detectors(), nproc4.detectors()): assert d1.is_similar_to(d2, fast_axis_tolerance=5e-5, slow_axis_tolerance=5e-5, origin_tolerance=5e-5) print "OK" return
def test2(): """Compare results of multiprocess vs single process refinement to ensure they are the same""" dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_stills") experiments_path = os.path.join(data_dir, "combined_experiments.json") reflections_path = os.path.join(data_dir, "combined_reflections.pickle") cmd = "dials.refine " + experiments_path + " " + reflections_path + \ " outlier.algorithm=null engine=LBFGScurvs output.reflections=None " cmd1 = cmd + "output.experiments=refined_experiments_nproc1.json nproc=1" print cmd1 cmd2 = cmd + "output.experiments=refined_experiments_nproc4.json nproc=4" print cmd2 # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory(suffix="tst_refine_multi_stills2") os.chdir(tmp_dir) try: result1 = easy_run.fully_buffered(command=cmd1).raise_if_errors() result2 = easy_run.fully_buffered(command=cmd2).raise_if_errors() # load results nproc1 = ExperimentListFactory.from_json_file( "refined_experiments_nproc1.json", check_format=False) nproc4 = ExperimentListFactory.from_json_file( "refined_experiments_nproc4.json", check_format=False) finally: os.chdir(cwd) print "OK" # compare results for b1, b2 in zip(nproc1.beams(), nproc4.beams()): assert b1.is_similar_to(b2) for c1, c2 in zip(nproc1.crystals(), nproc4.crystals()): assert c1.is_similar_to(c2) for d1, d2 in zip(nproc1.detectors(), nproc4.detectors()): assert d1.is_similar_to(d2, fast_axis_tolerance=5e-5, slow_axis_tolerance=5e-5, origin_tolerance=5e-5) print "OK" return
def tst_from_json(self): from os.path import join import os os.environ['DIALS_REGRESSION'] = self.path # Get all the filenames filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json') filename2 = join(self.path, 'experiment_test_data', 'experiment_2.json') filename3 = join(self.path, 'experiment_test_data', 'experiment_3.json') filename4 = join(self.path, 'experiment_test_data', 'experiment_4.json') # Read all the experiment lists in el1 = ExperimentListFactory.from_json_file(filename1) #el2 = ExperimentListFactory.from_json_file(filename2) el3 = ExperimentListFactory.from_json_file(filename3) el4 = ExperimentListFactory.from_json_file(filename4) # All the experiment lists should be the same length assert(len(el1) == 1) #assert(len(el1) == len(el2)) assert(len(el1) == len(el3)) assert(len(el1) == len(el4)) # Check all the models are the same for e in zip(el1, el3, el4): e1 = e[0] assert(e1.imageset is not None) assert(e1.beam is not None) assert(e1.detector is not None) assert(e1.goniometer is not None) assert(e1.scan is not None) assert(e1.crystal is not None) for ee in e[1:]: assert(e1.imageset == ee.imageset) assert(e1.beam == ee.beam) assert(e1.detector == ee.detector) assert(e1.goniometer == ee.goniometer) assert(e1.scan == ee.scan) assert(e1.crystal == ee.crystal) # test passed print 'OK'
def test1(): dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) # use the i04_weak_data for this test data_dir = os.path.join(dials_regression, "refinement_test_data", "i04_weak_data") experiments_path = os.path.join(data_dir, "experiments.json") pickle_path = os.path.join(data_dir, "indexed_strong.pickle") for pth in (experiments_path, pickle_path): assert os.path.exists(pth) # set close_to_spindle_cutoff to old default cmd = "dials.refine close_to_spindle_cutoff=0.05 reflections_per_degree=100 " + \ experiments_path + " " + pickle_path print cmd # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory(suffix="test_dials_refine") os.chdir(tmp_dir) try: result = easy_run.fully_buffered(command=cmd).raise_if_errors() # load results reg_exp = ExperimentListFactory.from_json_file( os.path.join(data_dir, "regression_experiments.json"), check_format=False)[0] ref_exp = ExperimentListFactory.from_json_file("refined_experiments.json", check_format=False)[0] finally: os.chdir(cwd) # clean up tmp dir shutil.rmtree(tmp_dir) # test refined models against expected assert reg_exp.crystal == ref_exp.crystal assert reg_exp.detector == ref_exp.detector assert reg_exp.beam == ref_exp.beam print "OK" return
def tst_from_json(self): from os.path import join import os os.environ['DIALS_REGRESSION'] = self.path # Get all the filenames filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json') filename2 = join(self.path, 'experiment_test_data', 'experiment_2.json') filename3 = join(self.path, 'experiment_test_data', 'experiment_3.json') filename4 = join(self.path, 'experiment_test_data', 'experiment_4.json') # Read all the experiment lists in el1 = ExperimentListFactory.from_json_file(filename1) el2 = ExperimentListFactory.from_json_file(filename2) el3 = ExperimentListFactory.from_json_file(filename3) el4 = ExperimentListFactory.from_json_file(filename4) # All the experiment lists should be the same length assert(len(el1) == 1) assert(len(el1) == len(el2)) assert(len(el1) == len(el3)) assert(len(el1) == len(el4)) # Check all the models are the same for e in zip(el1, el2, el3, el4): e1 = e[0] assert(e1.imageset is not None) assert(e1.beam is not None) assert(e1.detector is not None) assert(e1.goniometer is not None) assert(e1.scan is not None) assert(e1.crystal is not None) for ee in e[1:]: assert(e1.imageset == ee.imageset) assert(e1.beam == ee.beam) assert(e1.detector == ee.detector) assert(e1.goniometer == ee.goniometer) assert(e1.scan == ee.scan) assert(e1.crystal == ee.crystal) # test passed print 'OK'
def test1(): dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) # use the i04_weak_data for this test data_dir = os.path.join(dials_regression, "refinement_test_data", "i04_weak_data") experiments_path = os.path.join(data_dir, "experiments.json") pickle_path = os.path.join(data_dir, "indexed_strong.pickle") for pth in (experiments_path, pickle_path): assert os.path.exists(pth) # set some old defaults cmd = "dials.refine close_to_spindle_cutoff=0.05 reflections_per_degree=100 " + \ "outlier.separate_blocks=False " + experiments_path + " " + pickle_path print cmd # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory(suffix="test_dials_refine") os.chdir(tmp_dir) try: result = easy_run.fully_buffered(command=cmd).raise_if_errors() # load results reg_exp = ExperimentListFactory.from_json_file(os.path.join( data_dir, "regression_experiments.json"), check_format=False)[0] ref_exp = ExperimentListFactory.from_json_file( "refined_experiments.json", check_format=False)[0] finally: os.chdir(cwd) # test refined models against expected assert reg_exp.crystal == ref_exp.crystal assert reg_exp.detector == ref_exp.detector assert reg_exp.beam == ref_exp.beam print "OK" return
def _refine(self): """Do refinement and load the results""" # turn off outlier rejection so that test takes about 4s rather than 10s # set close_to_spindle_cutoff to old default cmd = ("dials.refine combined_experiments.json combined_reflections.pickle" " outlier.algorithm=null close_to_spindle_cutoff=0.05") result = easy_run.fully_buffered(command=cmd).raise_if_errors() self._refined_experiments = ExperimentListFactory.from_json_file( "refined_experiments.json", check_format=False) return
def from_string(self, s): from dxtbx.model.experiment.experiment_list import ExperimentListFactory from os.path import exists from libtbx.utils import Sorry if s is None: return None if s not in self.cache: if not exists(s): raise Sorry('File %s does not exist' % s) self.cache[s] = FilenameDataWrapper(s, ExperimentListFactory.from_json_file(s, check_format=self._check_format)) return self.cache[s]
def _refine(self): """Do refinement and load the results""" # turn off outlier rejection so that test takes about 4s rather than 10s # set close_to_spindle_cutoff to old default cmd = ( "dials.refine combined_experiments.json combined_reflections.pickle" " outlier.algorithm=null close_to_spindle_cutoff=0.05") result = easy_run.fully_buffered(command=cmd).raise_if_errors() self._refined_experiments = ExperimentListFactory.from_json_file( "refined_experiments.json", check_format=False) return
class Test(object): def __init__(self): from dxtbx.model.experiment.experiment_list import ExperimentListFactory import libtbx.load_env from os.path import join try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError, e: print 'FAIL: dials_regression not configured' exit(0) path = join(dials_regression, "centroid_test_data", "experiments.json") self.experiments = ExperimentListFactory.from_json_file(path)
def from_string(self, s): from dxtbx.model.experiment.experiment_list import ExperimentListFactory from os.path import exists from libtbx.utils import Sorry if s is None: return None if s not in self.cache: if not exists(s): raise Sorry('File %s does not exist' % s) self.cache[s] = FilenameDataWrapper( s, ExperimentListFactory.from_json_file( s, check_format=self._check_format)) return self.cache[s]
def select_importer(self, args): from os.path import split from dxtbx.model.experiment.experiment_list import ExperimentListFactory import libtbx.load_env path, filename = split(args[0]) if filename == 'SPOT.XDS': return SpotXDSImporter(args[0]) elif filename == 'INTEGRATE.HKL': assert(len(args) == 2) experiments = ExperimentListFactory.from_json_file(args[1]) assert(len(experiments) == 1) return IntegrateHKLImporter(args[0], experiments[0]) else: raise RuntimeError('expected (SPOT.XDS|INTEGRATE.HKL), got %s' % filename)
def regression(self): """Check results are as expected""" regression_experiments = ExperimentListFactory.from_json_file( os.path.join(self._data_dir, "regression_experiments.json"), check_format=False) for e1, e2 in zip(self._refined_experiments, regression_experiments): assert e1.crystal.is_similar_to(e2.crystal) # FIXME need is_similar_to for detector that checks geometry #assert e1.detector == e2.detector s0_1 = matrix.col(e1.beam.get_unit_s0()) s0_2 = matrix.col(e1.beam.get_unit_s0()) assert s0_1.accute_angle(s0_2, deg=True) < 0.0057 # ~0.1 mrad print "OK" return
def _create_profile_model(self): info("\nCreating profile model...") command = [ "dials.create_profile_model", "experiments.json", "indexed.pickle" ] result = run_process(command, print_stdout=False, debug=procrunner_debug) debug("result = %s" % self._prettyprint_dictionary(result)) if result['exitcode'] == 0: from dxtbx.model.experiment.experiment_list import ExperimentListFactory db = ExperimentListFactory.from_json_file('experiments_with_profile_model.json')[0] self._num_images = db.imageset.get_scan().get_num_images() self._oscillation = db.imageset.get_scan().get_oscillation()[1] self._sigma_m = db.profile.sigma_m() info("%d images, %s deg. oscillation, sigma_m=%.3f" % (self._num_images, str(self._oscillation), self._sigma_m)) info("Successfully completed (%.1f sec)" % result['runtime']) return True else: warn("Failed with exit code %d" % result['exitcode']) return False
def test1(): dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) # use multiple scan small molecule data for this test data_dir = os.path.join(dials_regression, "xia2-28") prefix = ["20", "25", "30", "35"] exp_path = [e + "_integrated_experiments.json" for e in prefix] exp_path = [os.path.join(data_dir, e) for e in exp_path] pkl_path = [e + "_integrated.pickle" for e in prefix] pkl_path = [os.path.join(data_dir, e) for e in pkl_path] for pth in exp_path + pkl_path: assert os.path.exists(pth), "%s missing" % pth cmd = "dials.two_theta_refine " + " ".join(exp_path) + " " + " ".join( pkl_path) + " cif=refined_cell.cif" print cmd # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory(suffix="test_dials_two_theta_refine") os.chdir(tmp_dir) try: result = easy_run.fully_buffered(command=cmd).raise_if_errors() ref_exp = ExperimentListFactory.from_json_file("refined_cell.json", check_format=False) finally: os.chdir(cwd) xls = ref_exp.crystals() assert len(xls) == 1 # crystal models should have been combined xl = xls[0] # test refined crystal model against expected values assert approx_equal( xl.get_unit_cell().parameters(), (5.428022880, 8.144145476, 12.039666971, 90.0, 90.0, 90.0)) assert approx_equal(xl.get_cell_parameter_sd(), (9.58081e-5, 0.000149909, 0.000215765, 0, 0, 0)) assert approx_equal(xl.get_cell_volume_sd(), 0.0116254298) print "OK" return
def from_dict(cls, obj): assert obj['__id__'] == 'Indexer' assert obj['__name__'] == cls.__name__ return_obj = cls() for k, v in obj.iteritems(): if k == '_indxr_helper' and v is not None: from xia2.Schema.Interfaces.Indexer import _IndexerHelper v = _IndexerHelper(v) if k == '_indxr_imagesets' and len(v): assert v[0].get('__id__') == 'imageset' from dxtbx.serialize.imageset import imageset_from_dict v = [imageset_from_dict(v_, check_format=False) for v_ in v] if isinstance(v, dict): if v.get('__id__') == 'ExperimentList': from dxtbx.model.experiment.experiment_list import ExperimentListFactory v = ExperimentListFactory.from_dict(v, check_format=False) setattr(return_obj, k, v) return return_obj
def load_reference_geometry(self): if self.params.input.reference_geometry is None: return try: ref_datablocks = DataBlockFactory.from_json_file(self.params.input.reference_geometry, check_format=False) except Exception: ref_datablocks = None if ref_datablocks is None: from dxtbx.model.experiment.experiment_list import ExperimentListFactory try: ref_experiments = ExperimentListFactory.from_json_file(self.params.input.reference_geometry, check_format=False) except Exception: raise Sorry("Couldn't load geometry file %s"%self.params.input.reference_geometry) assert len(ref_experiments.detectors()) == 1 self.reference_detector = ref_experiments.detectors()[0] else: assert len(ref_datablocks) == 1 and len(ref_datablocks[0].unique_detectors()) == 1 self.reference_detector = ref_datablocks[0].unique_detectors()[0]
def from_dict(cls, obj): assert obj['__id__'] == 'Integrater' return_obj = cls() for k, v in obj.iteritems(): if k in ('_intgr_indexer', '_intgr_refiner') and v is not None: from libtbx.utils import import_python_object cls = import_python_object( import_path=".".join((v['__module__'], v['__name__'])), error_prefix='', target_must_be='', where_str='').object v = cls.from_dict(v) if isinstance(v, dict): if v.get('__id__') == 'ExperimentList': from dxtbx.model.experiment.experiment_list import ExperimentListFactory v = ExperimentListFactory.from_dict(v) elif v.get('__id__') == 'imageset': from dxtbx.serialize.imageset import imageset_from_dict v = imageset_from_dict(v, check_format=False) setattr(return_obj, k, v) return return_obj
def print_someting(experiments_argv): from dxtbx.model.experiment.experiment_list import ExperimentListFactory experiments = ExperimentListFactory.from_json_file(experiments_argv, check_format=False) print "len(experiments)", len(experiments) print experiments[0] for exp in experiments: print "\n exp =", exp #print "dir(exp) =", dir(exp), "\n" #print "dir(exp.crystal) =", dir(exp.crystal) print "exp.crystal.get_space_group =", exp.crystal.get_space_group() print "exp.crystal.get_unit_cell =", exp.crystal.get_unit_cell() #detc = exp.detector #scan = exp.scan #prof = exp.profile gonio = exp.goniometer #print "\n\n dir(detc) =", dir(detc) #print "\n\n dir(scan) =", dir(scan) #print "\n\n dir(prof) =", dir(prof) print "\n dir(gonio) =", dir(gonio), "\n" ''' print "gonio.get_fixed_rotation() =", gonio.get_fixed_rotation() print "gonio.get_rotation_axis() =", gonio.get_rotation_axis() print "gonio.get_rotation_axis_datum() =", gonio.get_rotation_axis_datum() print "gonio.get_setting_rotation() =", gonio.get_setting_rotation() ''' #get_U().elems print "\nexp.crystal.get_U() =\n", exp.crystal.get_U().elems print "\nexp.crystal.get_A() =\n", exp.crystal.get_A().elems print "\nexp.crystal.get_B() =\n", exp.crystal.get_B().elems print "Pass 01" '''
class Test(object): def __init__(self): import libtbx.load_env try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError, e: print 'FAIL: dials_regression not configured' exit(0) import dials import os filename = os.path.join(dials_regression, 'centroid_test_data', 'experiments.json') from dxtbx.model.experiment.experiment_list import ExperimentListFactory self.exlist = ExperimentListFactory.from_json_file(filename) assert (len(self.exlist) == 1) from dials.array_family import flex self.rlist = flex.reflection_table.from_predictions_multi(self.exlist)
class Test(object): def __init__(self): import libtbx.load_env try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError, e: print 'SKIP: dials_regression not configured' exit(0) import os path = os.path.join( dials_regression, 'prediction_test_data', 'experiments_scan_varying_crystal.json') from dxtbx.model.experiment.experiment_list import ExperimentListFactory self.experiments = ExperimentListFactory.from_json_file(path) assert(len(self.experiments) == 1) assert(self.experiments[0].crystal.num_scan_points == self.experiments[0].scan.get_num_images() + 1)
def update_crystal(experiments_path): dat = CrystalData() try: from dxtbx.model.experiment.experiment_list import ExperimentListFactory experiments = ExperimentListFactory.from_json_file(experiments_path, check_format=False) print "len(experiments)", len(experiments) exp = experiments[0] unit_cell = exp.crystal.get_unit_cell() dat.a, dat.b, dat.c, dat.alpha, dat.beta, dat.gamma = unit_cell.parameters( ) except: print "Unable to find cell data" return dat
def test1(): dials_regression = libtbx.env.find_in_repositories(relative_path="dials_regression", test=os.path.isdir) # use multiple scan small molecule data for this test data_dir = os.path.join(dials_regression, "xia2-28") prefix = ["20", "25", "30", "35"] exp_path = [e + "_integrated_experiments.json" for e in prefix] exp_path = [os.path.join(data_dir, e) for e in exp_path] pkl_path = [e + "_integrated.pickle" for e in prefix] pkl_path = [os.path.join(data_dir, e) for e in pkl_path] for pth in exp_path + pkl_path: assert os.path.exists(pth), "%s missing" % pth cmd = "dials.two_theta_refine " + " ".join(exp_path) + " " + " ".join(pkl_path) + " cif=refined_cell.cif" print cmd # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory(suffix="test_dials_two_theta_refine") os.chdir(tmp_dir) try: result = easy_run.fully_buffered(command=cmd).raise_if_errors() ref_exp = ExperimentListFactory.from_json_file("refined_cell.json", check_format=False) finally: os.chdir(cwd) xls = ref_exp.crystals() assert len(xls) == 1 # crystal models should have been combined xl = xls[0] # test refined crystal model against expected values assert approx_equal(xl.get_unit_cell().parameters(), (5.428022880, 8.144145476, 12.039666971, 90.0, 90.0, 90.0)) assert approx_equal(xl.get_cell_parameter_sd(), (9.58081e-5, 0.000149909, 0.000215765, 0, 0, 0)) assert approx_equal(xl.get_cell_volume_sd(), 0.0116254298) print "OK" return
def load_reference_geometry(self): if self.params.input.reference_geometry is None: return try: ref_datablocks = DataBlockFactory.from_json_file( self.params.input.reference_geometry, check_format=False) except Exception: ref_datablocks = None if ref_datablocks is None: from dxtbx.model.experiment.experiment_list import ExperimentListFactory try: ref_experiments = ExperimentListFactory.from_json_file( self.params.input.reference_geometry, check_format=False) except Exception: raise Sorry("Couldn't load geometry file %s" % self.params.input.reference_geometry) assert len(ref_experiments.detectors()) == 1 self.reference_detector = ref_experiments.detectors()[0] else: assert len(ref_datablocks) == 1 and len( ref_datablocks[0].unique_detectors()) == 1 self.reference_detector = ref_datablocks[0].unique_detectors()[0]
def test1(): dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) # use the i04_weak_data for this test data_dir = os.path.join(dials_regression, "refinement_test_data", "i04_weak_data") experiments_path = os.path.join(data_dir, "experiments.json") pickle_path = os.path.join(data_dir, "indexed_strong.pickle") for pth in (experiments_path, pickle_path): assert os.path.exists(pth) cmd = "dials.slice_sweep " + experiments_path + " " + pickle_path + \ ' "scan_range=1 20"' print cmd # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory(suffix="test_dials_slice_sweep") os.chdir(tmp_dir) try: result = easy_run.fully_buffered(command=cmd).raise_if_errors() # load results sliced_exp = ExperimentListFactory.from_json_file( "experiments_1_20.json", check_format=False)[0] with open("indexed_strong_1_20.pickle", "r") as f: sliced_refs = pickle.load(f) finally: os.chdir(cwd) # simple test of results assert sliced_exp.scan.get_image_range() == (1, 20) assert len(sliced_refs) == 3670 print "OK" return
def test1(): dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) # use the i04_weak_data for this test data_dir = os.path.join(dials_regression, "refinement_test_data", "i04_weak_data") experiments_path = os.path.join(data_dir, "experiments.json") pickle_path = os.path.join(data_dir, "indexed_strong.pickle") for pth in (experiments_path, pickle_path): assert os.path.exists(pth) cmd = "dials.slice_sweep " + experiments_path + " " + pickle_path + \ ' "scan_range=1 20"' print cmd # work in a temporary directory cwd = os.path.abspath(os.curdir) tmp_dir = open_tmp_directory(suffix="test_dials_slice_sweep") os.chdir(tmp_dir) try: result = easy_run.fully_buffered(command=cmd).raise_if_errors() # load results sliced_exp = ExperimentListFactory.from_json_file("experiments_1_20.json", check_format=False)[0] with open("indexed_strong_1_20.pickle", "r") as f: sliced_refs = pickle.load(f) finally: os.chdir(cwd) # simple test of results assert sliced_exp.scan.get_image_range() == (1, 20) assert len(sliced_refs) == 3670 print "OK" return
def tst_from_imageset(self): from dxtbx.imageset import ImageSet, NullReader from dxtbx.model import Beam, Detector, Goniometer, Scan from dxtbx.model.crystal import crystal_model imageset = ImageSet(NullReader(["filename.cbf"])) imageset.set_beam(Beam(), 0) imageset.set_detector(Detector(), 0) crystal = crystal_model( (1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol=0) experiments = ExperimentListFactory.from_imageset_and_crystal( imageset, crystal) assert(len(experiments) == 1) assert(experiments[0].imageset is not None) assert(experiments[0].beam is not None) assert(experiments[0].detector is not None) assert(experiments[0].crystal is not None) print 'OK'
def tst_from_pickle(self): from os.path import join import os os.environ['DIALS_REGRESSION'] = self.path # Get all the filenames filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json') # Read all the experiment lists in el1 = ExperimentListFactory.from_json_file(filename1) # Pickle then load again el2 = self.pickle_then_unpickle(el1) # All the experiment lists should be the same length assert(len(el1) == 1) assert(len(el1) == len(el2)) # Check all the models are the same for e1, e2 in zip(el1, el2): assert(e1.imageset is not None) assert(e1.beam is not None) assert(e1.detector is not None) assert(e1.goniometer is not None) assert(e1.scan is not None) assert(e1.crystal is not None) assert(e1.imageset == e2.imageset) assert(e1.beam == e2.beam) assert(e1.detector == e2.detector) assert(e1.goniometer == e2.goniometer) assert(e1.scan == e2.scan) assert(e1.crystal == e2.crystal) # test passed print 'OK'
# run_test_single(imageset, 10000000), # run_test_single(imageset, 50000000), # run_test_single(imageset, 100000000), # run_test_single(imageset, 250000000) # ] # outfile = open("profile.txt", "w") # for r in results: # print >>outfile, r[0], r[1] if __name__ == '__main__': import sys from dxtbx.model.experiment.experiment_list import ExperimentListFactory exlist = ExperimentListFactory.from_json_file(sys.argv[1]) if len(sys.argv) > 2: imageset = exlist[0].imageset[0:int(sys.argv[2])] else: imageset = exlist[0].imageset print len(imageset) # run_test(imageset) from time import time st = time() n = int(len(imageset) / 4) for i in range(0, n): image = imageset[i]
def __call__(self, params, options): from dxtbx.model.experiment.experiment_list import ExperimentListFactory from dxtbx.model.experiment.experiment_list import ExperimentListDumper import os # Get the XDS.INP file xds_inp = os.path.join(self.args[0], 'XDS.INP') if params.input.xds_file is None: xds_file = XDSFileImporter.find_best_xds_file(self.args[0]) else: xds_file = os.path.join(self.args[0], params.input.xds_file) # Check a file is given if xds_file is None: raise RuntimeError('No XDS file found') # Load the experiment list unhandled = [] experiments = ExperimentListFactory.from_xds(xds_inp, xds_file) # Print out any unhandled files if len(unhandled) > 0: print '-' * 80 print 'The following command line arguments were not handled:' for filename in unhandled: print ' %s' % filename # Print some general info print '-' * 80 print 'Read %d experiments from %s' % (len(experiments), xds_file) # Attempt to create scan-varying crystal model if requested if params.read_varying_crystal: integrate_lp = os.path.join(self.args[0], 'INTEGRATE.LP') if os.path.isfile(integrate_lp): self.extract_varying_crystal(integrate_lp, experiments) else: print "No INTEGRATE.LP to extract varying crystal model. Skipping" # Loop through the data blocks for i, exp in enumerate(experiments): # Print some experiment info print "-" * 80 print "Experiment %d" % i print " format: %s" % str(exp.imageset.reader().get_format_class()) print " type: %s" % type(exp.imageset) print " num images: %d" % len(exp.imageset) # Print some model info if options.verbose > 1: print "" if exp.beam: print exp.beam else: print "no beam!" if exp.detector: print exp.detector else: print "no detector!" if exp.goniometer: print exp.goniometer else: print "no goniometer!" if exp.scan: print exp.scan else: print "no scan!" if exp.crystal: print exp.crystal else: print "no crystal!"
class TestExperimentListDumper(object): def __init__(self, path): self.path = path def run(self): self.tst_dump_formats() self.tst_dump_empty_sweep() self.tst_dump_with_lookup() self.tst_dump_with_bad_lookup() def tst_dump_formats(self): from uuid import uuid4 from os.path import join import os os.environ['DIALS_REGRESSION'] = self.path # Get all the filenames filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json') # Read all the experiment lists in elist1 = ExperimentListFactory.from_json_file(filename1) # Create the experiment list dumper dump = ExperimentListDumper(elist1) # Dump as JSON file and reload filename = 'temp%s.json' % uuid4().hex dump.as_json(filename) elist2 = ExperimentListFactory.from_json_file(filename) self.check(elist1, elist2) # Dump as split JSON file and reload filename = 'temp%s.json' % uuid4().hex dump.as_json(filename, split=True) elist2 = ExperimentListFactory.from_json_file(filename) self.check(elist1, elist2) # Dump as pickle and reload filename = 'temp%s.pickle' % uuid4().hex dump.as_pickle(filename) elist2 = ExperimentListFactory.from_pickle_file(filename) self.check(elist1, elist2) def tst_dump_empty_sweep(self): from dxtbx.imageset import ImageSweep, NullReader, SweepFileList from dxtbx.model import Beam, Detector, Goniometer, Scan from dxtbx.model.crystal import crystal_model from uuid import uuid4 imageset = ImageSweep(NullReader(SweepFileList("filename%01d.cbf", (0, 3)))) imageset.set_beam(Beam((1, 0, 0))) imageset.set_detector(Detector()) imageset.set_goniometer(Goniometer()) imageset.set_scan(Scan((1, 3), (0.0, 1.0))) crystal = crystal_model((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol=1) experiments = ExperimentListFactory.from_imageset_and_crystal( imageset, crystal) dump = ExperimentListDumper(experiments) filename = 'temp%s.json' % uuid4().hex dump.as_json(filename) experiments2 = ExperimentListFactory.from_json_file(filename, check_format=False) self.check(experiments, experiments2) print 'OK' def tst_dump_with_lookup(self): from dxtbx.imageset import ImageSweep, NullReader, SweepFileList from dxtbx.model import Beam, Detector, Goniometer, Scan from dxtbx.model.crystal import crystal_model from uuid import uuid4 import libtbx.load_env import os from os.path import join try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError, e: print 'FAIL: dials_regression not configured' exit(0) filename = join(dials_regression, "centroid_test_data", "experiments_with_lookup.json") experiments = ExperimentListFactory.from_json_file( filename, check_format=True) imageset = experiments[0].imageset assert imageset.external_lookup.mask.data is not None assert imageset.external_lookup.gain.data is not None assert imageset.external_lookup.pedestal.data is not None assert imageset.external_lookup.mask.filename is not None assert imageset.external_lookup.gain.filename is not None assert imageset.external_lookup.pedestal.filename is not None assert imageset.external_lookup.mask.data.all_eq(True) assert imageset.external_lookup.gain.data.all_eq(1) assert imageset.external_lookup.pedestal.data.all_eq(0) dump = ExperimentListDumper(experiments) filename = 'temp%s.json' % uuid4().hex dump.as_json(filename) experiments = ExperimentListFactory.from_json_file( filename, check_format=True) imageset = experiments[0].imageset assert imageset.external_lookup.mask.data is not None assert imageset.external_lookup.gain.data is not None assert imageset.external_lookup.pedestal.data is not None assert imageset.external_lookup.mask.filename is not None assert imageset.external_lookup.gain.filename is not None assert imageset.external_lookup.pedestal.filename is not None assert imageset.external_lookup.mask.data.all_eq(True) assert imageset.external_lookup.gain.data.all_eq(1) assert imageset.external_lookup.pedestal.data.all_eq(0)
self.n_sigma * self.sigma_m, ) # Mask the foreground mask_foreground(refl["shoebox"], refl["s1"], refl["xyzcal.px"].parts()[2], refl["panel"]) Command.end("Masked foreground for %d reflections" % len(refl)) # Return the reflections return refl if __name__ == "__main__": from math import pi from dxtbx.model.experiment.experiment_list import ExperimentListFactory experiments = ExperimentListFactory.from_json_file( "/home/upc86896/Projects/cctbx/sources/dials_regression/centroid_test_data/experiments.json", check_format=False ) sigma_b = 0.058 * pi / 180 sigma_m = 0.157 * pi / 180 n_sigma = 3 N = 100 I = 1000 B = 10 simulate = Simulator(experiments[0], sigma_b, sigma_m, n_sigma) simulate.with_random_intensity(N, I, B) # simulate(experiments[0], sigma_b, sigma_m, n_sigma, N, I, B)
from uuid import uuid4 import libtbx.load_env import os from os.path import join try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError, e: print 'FAIL: dials_regression not configured' exit(0) filename = join(dials_regression, "centroid_test_data", "experiments_with_bad_lookup.json") experiments = ExperimentListFactory.from_json_file( filename, check_format=False) imageset = experiments[0].imageset assert imageset.external_lookup.mask.data is None assert imageset.external_lookup.gain.data is None assert imageset.external_lookup.pedestal.data is None assert imageset.external_lookup.mask.filename is not None assert imageset.external_lookup.gain.filename is not None assert imageset.external_lookup.pedestal.filename is not None dump = ExperimentListDumper(experiments) filename = 'temp%s.json' % uuid4().hex dump.as_json(filename) experiments = ExperimentListFactory.from_json_file(
def experiment_list(infile, check_format=True): ''' Load an experiment list from a serialzied format. ''' from dxtbx.model.experiment.experiment_list import ExperimentListFactory return ExperimentListFactory.from_serialized_format( infile, check_format=check_format)
def reconstruct_rogues(params): assert os.path.exists('xia2.json') from xia2.Schema.XProject import XProject xinfo = XProject.from_json(filename='xia2.json') from dxtbx.model.experiment.experiment_list import ExperimentListFactory import cPickle as pickle import dials # because WARNING:root:No profile class gaussian_rs registered crystals = xinfo.get_crystals() assert len(crystals) == 1 for xname in crystals: crystal = crystals[xname] scaler = crystal._get_scaler() epochs = scaler._sweep_handler.get_epochs() rogues = os.path.join(scaler.get_working_directory(), xname, 'scale', 'ROGUES') rogue_reflections = munch_rogues(rogues) batched_reflections = { } for epoch in epochs: si = scaler._sweep_handler.get_sweep_information(epoch) intgr = si.get_integrater() experiments = ExperimentListFactory.from_json_file( intgr.get_integrated_experiments()) reflections = pickle.load(open(intgr.get_integrated_reflections())) batched_reflections[si.get_batch_range()] = (experiments, reflections, si.get_sweep_name()) # - look up reflection in reflection list, get bounding box # - pull pixels given from image set, flatten these, write out from dials.array_family import flex from annlib_ext import AnnAdaptor as ann_adaptor reflections_run = { } for run in batched_reflections: reflections_run[run] = [] for rogue in rogue_reflections: b = rogue[0] for run in batched_reflections: if b >= run[0] and b <= run[1]: reflections_run[run].append(rogue) break for run_no, run in enumerate(reflections_run): experiment = batched_reflections[run][0] reflections = batched_reflections[run][1] name = batched_reflections[run][2] rogues = reflections_run[run] reference = flex.double() scan = experiment.scans()[0] images = experiment.imagesets()[0] for xyz in reflections['xyzcal.px']: reference.append(xyz[0]) reference.append(xyz[1]) reference.append(xyz[2]) search = flex.double() for rogue in rogues: search.append(rogue[1]) search.append(rogue[2]) search.append(scan.get_array_index_from_angle(rogue[3])) ann = ann_adaptor(data=reference, dim=3, k=1) ann.query(search) keep = flex.bool(len(reflections), False) for j, rogue in enumerate(rogues): keep[ann.nn[j]] = True reflections = reflections.select(keep==True) if params.extract: reflections["shoebox"] = flex.shoebox( reflections["panel"], reflections["bbox"], allocate=True) reflections.extract_shoeboxes(images, verbose=False) if len(reflections_run) > 1: output = params.output.reflections.replace( '.pickle', '-%s.pickle' % name) print 'Extracted %d rogue reflections for %s to %s' % \ (len(reflections), name, output) reflections.as_pickle(output) else: output = params.output.reflections print 'Extracted %d rogue reflections to %s' % \ (len(reflections), output) reflections.as_pickle(output)
def get_dials_coordinate_frame(experiments_json): from dxtbx.model.experiment.experiment_list import ExperimentListFactory experiments = ExperimentListFactory.from_json_file(experiments_json) return experiments[0].beam.get_direction(), \ experiments[0].goniometer.get_rotation_axis()
def test4(): '''Test group restraint with multiple crystals, and a stills refiner''' if not libtbx.env.has_module("dials_regression"): print "Skipping test2 in " + __file__ + " as dials_regression not present" return # The phil scope from dials.algorithms.refinement.refiner import phil_scope user_phil = parse(''' refinement { parameterisation { crystal { unit_cell { restraints { tie_to_group { sigmas=1,0,2,0,0,0 apply_to_all=true } } } } } } ''') working_phil = phil_scope.fetch(source=user_phil) working_params = working_phil.extract() dials_regression = libtbx.env.find_in_repositories( relative_path="dials_regression", test=os.path.isdir) # use the multi stills test data data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_stills") experiments_path = os.path.join(data_dir, "combined_experiments.json") pickle_path = os.path.join(data_dir, "combined_reflections.pickle") experiments = ExperimentListFactory.from_json_file(experiments_path, check_format=False) reflections = flex.reflection_table.from_pickle(pickle_path) refiner = RefinerFactory.from_parameters_data_experiments(working_params, reflections, experiments) # hack to extract the objects needed from the Refiner rp = refiner._target._restraints_parameterisation pred_param = refiner._pred_param # get analytical values and gradients vals, grads, weights = rp.get_residuals_gradients_and_weights() # get finite difference gradients p_vals = pred_param.get_param_vals() deltas = [1.e-7] * len(p_vals) fd_grad=[] for i in range(len(deltas)): val = p_vals[i] p_vals[i] -= deltas[i] / 2. pred_param.set_param_vals(p_vals) rev_state, foo, bar = rp.get_residuals_gradients_and_weights() rev_state = flex.double(rev_state) p_vals[i] += deltas[i] pred_param.set_param_vals(p_vals) fwd_state, foo, bar = rp.get_residuals_gradients_and_weights() fwd_state = flex.double(fwd_state) p_vals[i] = val fd = (fwd_state - rev_state) / deltas[i] fd_grad.append(fd) # for comparison, fd_grad is a list of flex.doubles, each of which corresponds # to the gradients of the residuals wrt to a single parameter. pnames = pred_param.get_param_names() for i, (pname, fd) in enumerate(zip(pnames, fd_grad)): # extract dense column from the sparse matrix an = grads.col(i).as_dense_vector() #print pname #print list(an.round(6)) #print list(fd.round(6)) #print assert approx_equal(an, fd, eps=1e-5) print "OK" return
def _write_pickle(self, batch): pass def _write_predictions(self, predictions): pass @contextmanager def open_shoebox_writer(filename): writer = ShoeboxWriter(filename) yield writer if __name__ == '__main__': from dxtbx.model.experiment.experiment_list import ExperimentListFactory from dials.array_family import flex experiments = ExperimentListFactory.from_json_file( '/home/upc86896/Data/Data/i04-BAG-training/dials_processed/experiments.json') predictions = flex.reflection_table.from_predictions(experiments[0]) predictions.compute_bbox(experiments[0], nsigma=3, sigma_d=0.024, sigma_m=0.044) zeta = predictions.compute_zeta(experiments[0]) mask = flex.abs(zeta) < 0.05 predictions.del_selected(mask) with open_shoebox_writer("extracted.tar") as writer: writer.write(predictions, experiments[0].imageset)
def run(): from dxtbx.model.experiment.experiment_list import ExperimentListFactory from os.path import join import libtbx.load_env try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError, e: print 'FAIL: dials_regression not configured' exit(0) path = join(dials_regression, "nexus_test_data", "shared_models") filename_list = [ 'single', 'multiple_unrelated', 'multi_crystal', 'two_colour', 'multiple_sweeps', 'stills' ] for filename in filename_list: filename_in = join(path, "%s.json" % filename) filename_out = "%s.nxs" % filename experiments = ExperimentListFactory.from_json_file(filename_in) run_single(experiments, filename_out) if __name__ == '__main__': from dials.test import cd_auto with cd_auto(__file__): test_polarization_conversion() run()