def tst_dump_formats(self):
    from uuid import uuid4
    from os.path import join
    import os

    os.environ['DIALS_REGRESSION'] = self.path

    # Get all the filenames
    filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json')

    # Read all the experiment lists in
    elist1 = ExperimentListFactory.from_json_file(filename1)

    # Create the experiment list dumper
    dump = ExperimentListDumper(elist1)

    # Dump as JSON file and reload
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename)
    elist2 = ExperimentListFactory.from_json_file(filename)
    self.check(elist1, elist2)

    # Dump as split JSON file and reload
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename, split=True)
    elist2 = ExperimentListFactory.from_json_file(filename)
    self.check(elist1, elist2)

    # Dump as pickle and reload
    filename = 'temp%s.pickle' % uuid4().hex
    dump.as_pickle(filename)
    elist2 = ExperimentListFactory.from_pickle_file(filename)
    self.check(elist1, elist2)
  def tst_dump_formats(self):
    from uuid import uuid4
    from os.path import join
    import os

    os.environ['DIALS_REGRESSION'] = self.path

    # Get all the filenames
    filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json')

    # Read all the experiment lists in
    elist1 = ExperimentListFactory.from_json_file(filename1)

    # Create the experiment list dumper
    dump = ExperimentListDumper(elist1)

    # Dump as JSON file and reload
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename)
    elist2 = ExperimentListFactory.from_json_file(filename)
    self.check(elist1, elist2)

    # Dump as split JSON file and reload
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename, split=True)
    elist2 = ExperimentListFactory.from_json_file(filename)
    self.check(elist1, elist2)

    # Dump as pickle and reload
    filename = 'temp%s.pickle' % uuid4().hex
    dump.as_pickle(filename)
    elist2 = ExperimentListFactory.from_pickle_file(filename)
    self.check(elist1, elist2)
Exemple #3
0
def test1():

    dials_regression = libtbx.env.find_in_repositories(
        relative_path="dials_regression", test=os.path.isdir)

    data_dir = os.path.join(dials_regression, "refinement_test_data",
                            "multi_stills")
    experiments_path = os.path.join(data_dir, "combined_experiments.json")
    reflections_path = os.path.join(data_dir, "combined_reflections.pickle")
    cmd = "dials.refine " + experiments_path + " " + reflections_path
    print cmd

    # work in a temporary directory
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="tst_refine_multi_stills1")
    os.chdir(tmp_dir)
    try:
        result = easy_run.fully_buffered(command=cmd).raise_if_errors()
        # load results
        reg_exp = ExperimentListFactory.from_json_file(os.path.join(
            data_dir, "regression_experiments.json"),
                                                       check_format=False)
        ref_exp = ExperimentListFactory.from_json_file(
            "refined_experiments.json", check_format=False)
    finally:
        os.chdir(cwd)
    print "OK"

    # compare results
    tol = 1e-5
    for b1, b2 in zip(reg_exp.beams(), ref_exp.beams()):
        assert b1.is_similar_to(b2,
                                wavelength_tolerance=tol,
                                direction_tolerance=tol,
                                polarization_normal_tolerance=tol,
                                polarization_fraction_tolerance=tol)
        s0_1 = matrix.col(b1.get_unit_s0())
        s0_2 = matrix.col(b2.get_unit_s0())
        assert s0_1.accute_angle(s0_2, deg=True) < 0.0057  # ~0.1 mrad
    for c1, c2 in zip(reg_exp.crystals(), ref_exp.crystals()):
        assert c1.is_similar_to(c2)

    for d1, d2 in zip(reg_exp.detectors(), ref_exp.detectors()):
        assert d1.is_similar_to(d2,
                                fast_axis_tolerance=1e-4,
                                slow_axis_tolerance=1e-4,
                                origin_tolerance=1e-2)

    print "OK"

    return
def test1():

  dials_regression = libtbx.env.find_in_repositories(
    relative_path="dials_regression",
    test=os.path.isdir)

  data_dir = os.path.join(dials_regression, "refinement_test_data",
                          "multi_stills")
  experiments_path = os.path.join(data_dir, "combined_experiments.json")
  reflections_path = os.path.join(data_dir, "combined_reflections.pickle")
  cmd = "dials.refine " + experiments_path + " " + reflections_path
  print cmd

  # work in a temporary directory
  cwd = os.path.abspath(os.curdir)
  tmp_dir = open_tmp_directory(suffix="tst_refine_multi_stills1")
  os.chdir(tmp_dir)
  try:
    result = easy_run.fully_buffered(command=cmd).raise_if_errors()
    # load results
    reg_exp = ExperimentListFactory.from_json_file(
                os.path.join(data_dir, "regression_experiments.json"),
                check_format=False)
    ref_exp = ExperimentListFactory.from_json_file("refined_experiments.json",
                check_format=False)
  finally:
    os.chdir(cwd)
    # clean up tmp dir
    shutil.rmtree(tmp_dir)
  print "OK"

  # compare results
  tol = 1e-5
  for b1, b2 in zip(reg_exp.beams(), ref_exp.beams()):
    assert b1.is_similar_to(b2, wavelength_tolerance=tol,
                                direction_tolerance=tol,
                                polarization_normal_tolerance=tol,
                                polarization_fraction_tolerance=tol)
    s0_1 = matrix.col(b1.get_unit_s0())
    s0_2 = matrix.col(b2.get_unit_s0())
    assert s0_1.accute_angle(s0_2, deg=True) < 0.0057 # ~0.1 mrad
  for c1, c2 in zip(reg_exp.crystals(), ref_exp.crystals()):
    assert c1.is_similar_to(c2)

  for d1, d2 in zip(reg_exp.detectors(), ref_exp.detectors()):
    assert d1.is_similar_to(d2,
      fast_axis_tolerance=1e-4, slow_axis_tolerance=1e-4, origin_tolerance=1e-2)

  print "OK"

  return
Exemple #5
0
class TestSummation(object):
    def __init__(self):
        from dxtbx.model.experiment.experiment_list import ExperimentListFactory
        from dials.algorithms.profile_model.gaussian_rs import Model
        import libtbx.load_env
        from dials.array_family import flex
        from os.path import join
        from math import pi
        try:
            dials_regression = libtbx.env.dist_path('dials_regression')
        except KeyError, e:
            print 'FAIL: dials_regression not configured'
            exit(0)

        path = join(dials_regression, "centroid_test_data", "experiments.json")

        exlist = ExperimentListFactory.from_json_file(path)
        exlist[0].profile = Model(None,
                                  n_sigma=3,
                                  sigma_b=0.024 * pi / 180.0,
                                  sigma_m=0.044 * pi / 180.0)

        rlist = flex.reflection_table.from_predictions(exlist[0])
        rlist['id'] = flex.int(len(rlist), 0)
        self.rlist = rlist
        self.exlist = exlist
Exemple #6
0
    def run(self):
        from os.path import join
        from libtbx import easy_run
        from dials.algorithms.profile_model.factory import phil_scope
        from libtbx.phil import parse
        from dxtbx.model.experiment.experiment_list import ExperimentListFactory

        # Call dials.create_profile_model
        easy_run.fully_buffered([
            'dials.create_profile_model',
            join(self.path, 'experiments.json'),
            join(self.path, 'indexed.pickle'),
        ]).raise_if_errors()

        experiments = ExperimentListFactory.from_json_file(
            "experiments_with_profile_model.json", check_format=False)
        sigma_b = experiments[0].profile.sigma_b(deg=True)
        sigma_m = experiments[0].profile.sigma_m(deg=True)
        eps = 1e-3
        try:
            assert (abs(sigma_b - 0.02195) < eps)
            assert (abs(sigma_m - 0.06833) < eps)
        except Exception:
            print sigma_b
            print sigma_m
            raise
        print 'OK'
  def tst_dump_empty_sweep(self):
    from dxtbx.imageset import ImageSweep, NullReader, SweepFileList
    from dxtbx.model import Beam, Detector, Goniometer, Scan
    from dxtbx.model.crystal import crystal_model
    from uuid import uuid4

    imageset = ImageSweep(NullReader(SweepFileList("filename%01d.cbf", (0, 3))))
    imageset.set_beam(Beam((1, 0, 0)))
    imageset.set_detector(Detector())
    imageset.set_goniometer(Goniometer())
    imageset.set_scan(Scan((1, 3), (0.0, 1.0)))

    crystal = crystal_model((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol=1)

    experiments = ExperimentListFactory.from_imageset_and_crystal(
      imageset, crystal)

    dump = ExperimentListDumper(experiments)
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename)
    experiments2 = ExperimentListFactory.from_json_file(filename,
                                                        check_format=False)
    self.check(experiments, experiments2)

    print 'OK'
def print_someting(experiments_argv):

    from dxtbx.model.experiment.experiment_list import ExperimentListFactory
    experiments = ExperimentListFactory.from_json_file(experiments_argv,
                                                       check_format=False)

    print "len(experiments)", len(experiments)
    print experiments[0]

    for exp in experiments:
        print "\n\n exp =", exp
        print "dir(exp) =", dir(exp), "\n\n"

        #print "dir(exp.crystal) =", dir(exp.crystal)

        print "exp.crystal.get_space_group =", exp.crystal.get_space_group()
        print "exp.crystal.get_unit_cell =", exp.crystal.get_unit_cell()

        #detc = exp.detector
        #scan = exp.scan
        #prof = exp.profile
        gonio = exp.goniometer

        #print "\n\n dir(detc) =", dir(detc)
        #print "\n\n dir(scan) =", dir(scan)
        #print "\n\n dir(prof) =", dir(prof)
        print "\n\n dir(goni) =", dir(gonio)

    print "Pass"
class Test(object):
    def __init__(self):
        from math import pi

        import libtbx.load_env
        try:
            dials_regression = libtbx.env.dist_path('dials_regression')
        except KeyError, e:
            print 'FAIL: dials_regression not configured'
            exit(0)

        import os

        filename = os.path.join(dials_regression, 'centroid_test_data',
                                'fake_long_experiments.json')

        from dxtbx.model.experiment.experiment_list import ExperimentListFactory
        from dxtbx.model.experiment.experiment_list import ExperimentList
        exlist = ExperimentListFactory.from_json_file(filename)
        assert (len(exlist) == 1)
        self.experiment = exlist[0]

        # Set the delta_divergence/mosaicity
        self.n_sigma = 5
        self.sigma_b = 0.060 * pi / 180
        self.sigma_m = 0.154 * pi / 180

        from dials.algorithms.profile_model.gaussian_rs import Model
        self.profile_model = Model(None, self.n_sigma, self.sigma_b,
                                   self.sigma_m)
        self.experiment.profile = self.profile_model
        self.experiments = ExperimentList()
        self.experiments.append(self.experiment)
  def tst_dump_empty_sweep(self):
    from dxtbx.imageset import ImageSweep, NullReader, SweepFileList
    from dxtbx.model import Beam, Detector, Goniometer, Scan
    from dxtbx.model.crystal import crystal_model
    from uuid import uuid4

    imageset = ImageSweep(NullReader(SweepFileList("filename%01d.cbf", (0, 3))))
    imageset.set_beam(Beam((1, 0, 0)))
    imageset.set_detector(Detector())
    imageset.set_goniometer(Goniometer())
    imageset.set_scan(Scan((1, 3), (0.0, 1.0)))

    crystal = crystal_model((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol=1)

    experiments = ExperimentListFactory.from_imageset_and_crystal(
      imageset, crystal)

    dump = ExperimentListDumper(experiments)
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename)
    experiments2 = ExperimentListFactory.from_json_file(filename,
                                                        check_format=False)
    self.check(experiments, experiments2)

    print 'OK'
  def run(self):
    from os.path import join
    from libtbx import easy_run
    from dials.algorithms.profile_model.factory import phil_scope
    from libtbx.phil import parse
    from dxtbx.model.experiment.experiment_list import ExperimentListFactory

    # Call dials.create_profile_model
    easy_run.fully_buffered([
      'dials.create_profile_model',
      join(self.path, 'experiments.json'),
      join(self.path, 'indexed.pickle'),
    ]).raise_if_errors()


    experiments =  ExperimentListFactory.from_json_file(
      "experiments_with_profile_model.json",
      check_format=False)
    sigma_b = experiments[0].profile.sigma_b(deg=True)
    sigma_m = experiments[0].profile.sigma_m(deg=True)
    eps = 1e-3
    try:
      assert(abs(sigma_b - 0.02195) < eps)
      assert(abs(sigma_m - 0.06833) < eps)
    except Exception:
      print sigma_b
      print sigma_m
      raise
    print 'OK'
Exemple #12
0
class Test(object):
    def __init__(self):
        import libtbx.load_env
        try:
            dials_regression = libtbx.env.dist_path('dials_regression')
        except KeyError, e:
            print 'SKIP: dials_regression not configured'
            exit(0)

        import os
        from dxtbx.model.experiment.experiment_list import ExperimentListFactory
        path = os.path.join(dials_regression, 'centroid_test_data',
                            'experiments.json')

        self.experiments = ExperimentListFactory.from_json_file(path)
        assert (len(self.experiments) == 1)
        self.experiments[0].imageset.set_beam(self.experiments[0].beam)
        self.experiments[0].imageset.set_detector(self.experiments[0].detector)
        self.experiments[0].imageset.set_goniometer(
            self.experiments[0].goniometer)
        self.experiments[0].imageset.set_scan(self.experiments[0].scan)

        reflection_filename = os.path.join(dials_regression,
                                           'prediction_test_data',
                                           'expected_reflections.pickle')

        from dials.array_family import flex
        self.reflections = flex.reflection_table.from_pickle(
            reflection_filename)
def test2():
  """Compare results of multiprocess vs single process refinement to ensure
  they are the same"""

  dials_regression = libtbx.env.find_in_repositories(
    relative_path="dials_regression",
    test=os.path.isdir)

  data_dir = os.path.join(dials_regression, "refinement_test_data",
                          "multi_stills")
  experiments_path = os.path.join(data_dir, "combined_experiments.json")
  reflections_path = os.path.join(data_dir, "combined_reflections.pickle")
  cmd = "dials.refine " + experiments_path + " " + reflections_path + \
        " outlier.algorithm=null engine=LBFGScurvs output.reflections=None "
  cmd1 = cmd + "output.experiments=refined_experiments_nproc1.json nproc=1"
  print cmd1

  cmd2= cmd + "output.experiments=refined_experiments_nproc4.json nproc=4"
  print cmd2
  # work in a temporary directory
  cwd = os.path.abspath(os.curdir)
  tmp_dir = open_tmp_directory(suffix="tst_refine_multi_stills2")
  os.chdir(tmp_dir)
  try:
    result1 = easy_run.fully_buffered(command=cmd1).raise_if_errors()
    result2 = easy_run.fully_buffered(command=cmd2).raise_if_errors()
    # load results
    nproc1 = ExperimentListFactory.from_json_file(
      "refined_experiments_nproc1.json", check_format=False)
    nproc4 = ExperimentListFactory.from_json_file(
      "refined_experiments_nproc4.json", check_format=False)
  finally:
    os.chdir(cwd)
    # clean up tmp dir
    shutil.rmtree(tmp_dir)
  print "OK"

  # compare results
  for b1, b2 in zip(nproc1.beams(), nproc4.beams()):
    assert b1.is_similar_to(b2)
  for c1, c2 in zip(nproc1.crystals(), nproc4.crystals()):
    assert c1.is_similar_to(c2)
  for d1, d2 in zip(nproc1.detectors(), nproc4.detectors()):
    assert d1.is_similar_to(d2,
      fast_axis_tolerance=5e-5, slow_axis_tolerance=5e-5, origin_tolerance=5e-5)
  print "OK"
  return
Exemple #14
0
def test2():
    """Compare results of multiprocess vs single process refinement to ensure
  they are the same"""

    dials_regression = libtbx.env.find_in_repositories(
        relative_path="dials_regression", test=os.path.isdir)

    data_dir = os.path.join(dials_regression, "refinement_test_data",
                            "multi_stills")
    experiments_path = os.path.join(data_dir, "combined_experiments.json")
    reflections_path = os.path.join(data_dir, "combined_reflections.pickle")
    cmd = "dials.refine " + experiments_path + " " + reflections_path + \
          " outlier.algorithm=null engine=LBFGScurvs output.reflections=None "
    cmd1 = cmd + "output.experiments=refined_experiments_nproc1.json nproc=1"
    print cmd1

    cmd2 = cmd + "output.experiments=refined_experiments_nproc4.json nproc=4"
    print cmd2
    # work in a temporary directory
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="tst_refine_multi_stills2")
    os.chdir(tmp_dir)
    try:
        result1 = easy_run.fully_buffered(command=cmd1).raise_if_errors()
        result2 = easy_run.fully_buffered(command=cmd2).raise_if_errors()
        # load results
        nproc1 = ExperimentListFactory.from_json_file(
            "refined_experiments_nproc1.json", check_format=False)
        nproc4 = ExperimentListFactory.from_json_file(
            "refined_experiments_nproc4.json", check_format=False)
    finally:
        os.chdir(cwd)
    print "OK"

    # compare results
    for b1, b2 in zip(nproc1.beams(), nproc4.beams()):
        assert b1.is_similar_to(b2)
    for c1, c2 in zip(nproc1.crystals(), nproc4.crystals()):
        assert c1.is_similar_to(c2)
    for d1, d2 in zip(nproc1.detectors(), nproc4.detectors()):
        assert d1.is_similar_to(d2,
                                fast_axis_tolerance=5e-5,
                                slow_axis_tolerance=5e-5,
                                origin_tolerance=5e-5)
    print "OK"
    return
  def tst_from_json(self):
    from os.path import join
    import os

    os.environ['DIALS_REGRESSION'] = self.path

    # Get all the filenames
    filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json')
    filename2 = join(self.path, 'experiment_test_data', 'experiment_2.json')
    filename3 = join(self.path, 'experiment_test_data', 'experiment_3.json')
    filename4 = join(self.path, 'experiment_test_data', 'experiment_4.json')

    # Read all the experiment lists in
    el1 = ExperimentListFactory.from_json_file(filename1)
    el2 = ExperimentListFactory.from_json_file(filename2)
    el3 = ExperimentListFactory.from_json_file(filename3)
    el4 = ExperimentListFactory.from_json_file(filename4)

    # All the experiment lists should be the same length
    assert(len(el1) == 1)
    assert(len(el1) == len(el2))
    assert(len(el1) == len(el3))
    assert(len(el1) == len(el4))

    # Check all the models are the same
    for e in zip(el1, el2, el3, el4):
      e1 = e[0]
      assert(e1.imageset is not None)
      assert(e1.beam is not None)
      assert(e1.detector is not None)
      assert(e1.goniometer is not None)
      assert(e1.scan is not None)
      assert(e1.crystal is not None)
      for ee in e[1:]:
        assert(e1.imageset == ee.imageset)
        assert(e1.beam == ee.beam)
        assert(e1.detector == ee.detector)
        assert(e1.goniometer == ee.goniometer)
        assert(e1.scan == ee.scan)
        assert(e1.crystal == ee.crystal)

    # test passed
    print 'OK'
Exemple #16
0
def test1():

  dials_regression = libtbx.env.find_in_repositories(
    relative_path="dials_regression",
    test=os.path.isdir)

  # use the i04_weak_data for this test
  data_dir = os.path.join(dials_regression, "refinement_test_data", "i04_weak_data")
  experiments_path = os.path.join(data_dir, "experiments.json")
  pickle_path = os.path.join(data_dir, "indexed_strong.pickle")

  for pth in (experiments_path, pickle_path):
    assert os.path.exists(pth)

  # set close_to_spindle_cutoff to old default
  cmd = "dials.refine close_to_spindle_cutoff=0.05 reflections_per_degree=100 " + \
        experiments_path + " " + pickle_path
  print cmd

  # work in a temporary directory
  cwd = os.path.abspath(os.curdir)
  tmp_dir = open_tmp_directory(suffix="test_dials_refine")
  os.chdir(tmp_dir)
  try:
    result = easy_run.fully_buffered(command=cmd).raise_if_errors()
    # load results
    reg_exp = ExperimentListFactory.from_json_file(
                os.path.join(data_dir, "regression_experiments.json"),
                check_format=False)[0]
    ref_exp = ExperimentListFactory.from_json_file("refined_experiments.json",
                check_format=False)[0]
  finally:
    os.chdir(cwd)
    # clean up tmp dir
    shutil.rmtree(tmp_dir)

  # test refined models against expected
  assert reg_exp.crystal == ref_exp.crystal
  assert reg_exp.detector == ref_exp.detector
  assert reg_exp.beam == ref_exp.beam

  print "OK"
  return
  def tst_from_json(self):
    from os.path import join
    import os

    os.environ['DIALS_REGRESSION'] = self.path

    # Get all the filenames
    filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json')
    filename2 = join(self.path, 'experiment_test_data', 'experiment_2.json')
    filename3 = join(self.path, 'experiment_test_data', 'experiment_3.json')
    filename4 = join(self.path, 'experiment_test_data', 'experiment_4.json')

    # Read all the experiment lists in
    el1 = ExperimentListFactory.from_json_file(filename1)
    #el2 = ExperimentListFactory.from_json_file(filename2)
    el3 = ExperimentListFactory.from_json_file(filename3)
    el4 = ExperimentListFactory.from_json_file(filename4)

    # All the experiment lists should be the same length
    assert(len(el1) == 1)
    #assert(len(el1) == len(el2))
    assert(len(el1) == len(el3))
    assert(len(el1) == len(el4))

    # Check all the models are the same
    for e in zip(el1, el3, el4):
      e1 = e[0]
      assert(e1.imageset is not None)
      assert(e1.beam is not None)
      assert(e1.detector is not None)
      assert(e1.goniometer is not None)
      assert(e1.scan is not None)
      assert(e1.crystal is not None)
      for ee in e[1:]:
        assert(e1.imageset == ee.imageset)
        assert(e1.beam == ee.beam)
        assert(e1.detector == ee.detector)
        assert(e1.goniometer == ee.goniometer)
        assert(e1.scan == ee.scan)
        assert(e1.crystal == ee.crystal)

    # test passed
    print 'OK'
Exemple #18
0
def test1():

    dials_regression = libtbx.env.find_in_repositories(
        relative_path="dials_regression", test=os.path.isdir)

    # use the i04_weak_data for this test
    data_dir = os.path.join(dials_regression, "refinement_test_data",
                            "i04_weak_data")
    experiments_path = os.path.join(data_dir, "experiments.json")
    pickle_path = os.path.join(data_dir, "indexed_strong.pickle")

    for pth in (experiments_path, pickle_path):
        assert os.path.exists(pth)

    # set some old defaults
    cmd = "dials.refine close_to_spindle_cutoff=0.05 reflections_per_degree=100 " + \
          "outlier.separate_blocks=False " + experiments_path + " " + pickle_path
    print cmd

    # work in a temporary directory
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="test_dials_refine")
    os.chdir(tmp_dir)
    try:
        result = easy_run.fully_buffered(command=cmd).raise_if_errors()
        # load results
        reg_exp = ExperimentListFactory.from_json_file(os.path.join(
            data_dir, "regression_experiments.json"),
                                                       check_format=False)[0]
        ref_exp = ExperimentListFactory.from_json_file(
            "refined_experiments.json", check_format=False)[0]
    finally:
        os.chdir(cwd)

    # test refined models against expected
    assert reg_exp.crystal == ref_exp.crystal
    assert reg_exp.detector == ref_exp.detector
    assert reg_exp.beam == ref_exp.beam

    print "OK"
    return
  def _refine(self):
    """Do refinement and load the results"""

    # turn off outlier rejection so that test takes about 4s rather than 10s
    # set close_to_spindle_cutoff to old default
    cmd = ("dials.refine combined_experiments.json combined_reflections.pickle"
           " outlier.algorithm=null close_to_spindle_cutoff=0.05")
    result = easy_run.fully_buffered(command=cmd).raise_if_errors()

    self._refined_experiments = ExperimentListFactory.from_json_file(
      "refined_experiments.json", check_format=False)
    return
Exemple #20
0
    def _refine(self):
        """Do refinement and load the results"""

        # turn off outlier rejection so that test takes about 4s rather than 10s
        # set close_to_spindle_cutoff to old default
        cmd = (
            "dials.refine combined_experiments.json combined_reflections.pickle"
            " outlier.algorithm=null close_to_spindle_cutoff=0.05")
        result = easy_run.fully_buffered(command=cmd).raise_if_errors()

        self._refined_experiments = ExperimentListFactory.from_json_file(
            "refined_experiments.json", check_format=False)
        return
Exemple #21
0
 def from_string(self, s):
   from dxtbx.model.experiment.experiment_list import ExperimentListFactory
   from os.path import exists
   from libtbx.utils import Sorry
   if s is None:
     return None
   if s not in self.cache:
     if not exists(s):
       raise Sorry('File %s does not exist' % s)
     self.cache[s] = FilenameDataWrapper(s,
       ExperimentListFactory.from_json_file(s,
         check_format=self._check_format))
   return self.cache[s]
Exemple #22
0
 def select_importer(self, args):
   from os.path import split
   from dxtbx.model.experiment.experiment_list import ExperimentListFactory
   import libtbx.load_env
   path, filename = split(args[0])
   if filename == 'SPOT.XDS':
     return SpotXDSImporter(args[0])
   elif filename == 'INTEGRATE.HKL':
     assert(len(args) == 2)
     experiments = ExperimentListFactory.from_json_file(args[1])
     assert(len(experiments) == 1)
     return IntegrateHKLImporter(args[0], experiments[0])
   else:
     raise RuntimeError('expected (SPOT.XDS|INTEGRATE.HKL), got %s' % filename)
class Test(object):
    def __init__(self):
        from dxtbx.model.experiment.experiment_list import ExperimentListFactory
        import libtbx.load_env
        from os.path import join
        try:
            dials_regression = libtbx.env.dist_path('dials_regression')
        except KeyError, e:
            print 'FAIL: dials_regression not configured'
            exit(0)

        path = join(dials_regression, "centroid_test_data", "experiments.json")

        self.experiments = ExperimentListFactory.from_json_file(path)
Exemple #24
0
 def from_string(self, s):
     from dxtbx.model.experiment.experiment_list import ExperimentListFactory
     from os.path import exists
     from libtbx.utils import Sorry
     if s is None:
         return None
     if s not in self.cache:
         if not exists(s):
             raise Sorry('File %s does not exist' % s)
         self.cache[s] = FilenameDataWrapper(
             s,
             ExperimentListFactory.from_json_file(
                 s, check_format=self._check_format))
     return self.cache[s]
Exemple #25
0
    def regression(self):
        """Check results are as expected"""

        regression_experiments = ExperimentListFactory.from_json_file(
            os.path.join(self._data_dir, "regression_experiments.json"),
            check_format=False)

        for e1, e2 in zip(self._refined_experiments, regression_experiments):
            assert e1.crystal.is_similar_to(e2.crystal)
            # FIXME need is_similar_to for detector that checks geometry
            #assert e1.detector == e2.detector
            s0_1 = matrix.col(e1.beam.get_unit_s0())
            s0_2 = matrix.col(e1.beam.get_unit_s0())
            assert s0_1.accute_angle(s0_2, deg=True) < 0.0057  # ~0.1 mrad
        print "OK"
        return
  def regression(self):
    """Check results are as expected"""

    regression_experiments = ExperimentListFactory.from_json_file(
      os.path.join(self._data_dir, "regression_experiments.json"),
      check_format=False)

    for e1, e2 in zip(self._refined_experiments, regression_experiments):
      assert e1.crystal.is_similar_to(e2.crystal)
      # FIXME need is_similar_to for detector that checks geometry
      #assert e1.detector == e2.detector
      s0_1 = matrix.col(e1.beam.get_unit_s0())
      s0_2 = matrix.col(e1.beam.get_unit_s0())
      assert s0_1.accute_angle(s0_2, deg=True) < 0.0057 # ~0.1 mrad
    print "OK"
    return
Exemple #27
0
 def _create_profile_model(self):
   info("\nCreating profile model...")
   command = [ "dials.create_profile_model", "experiments.json", "indexed.pickle" ]
   result = run_process(command, print_stdout=False, debug=procrunner_debug)
   debug("result = %s" % self._prettyprint_dictionary(result))
   if result['exitcode'] == 0:
     from dxtbx.model.experiment.experiment_list import ExperimentListFactory
     db = ExperimentListFactory.from_json_file('experiments_with_profile_model.json')[0]
     self._num_images = db.imageset.get_scan().get_num_images()
     self._oscillation = db.imageset.get_scan().get_oscillation()[1]
     self._sigma_m = db.profile.sigma_m()
     info("%d images, %s deg. oscillation, sigma_m=%.3f" % (self._num_images, str(self._oscillation), self._sigma_m))
     info("Successfully completed (%.1f sec)" % result['runtime'])
     return True
   else:
     warn("Failed with exit code %d" % result['exitcode'])
     return False
def test1():

    dials_regression = libtbx.env.find_in_repositories(
        relative_path="dials_regression", test=os.path.isdir)

    # use multiple scan small molecule data for this test
    data_dir = os.path.join(dials_regression, "xia2-28")
    prefix = ["20", "25", "30", "35"]
    exp_path = [e + "_integrated_experiments.json" for e in prefix]
    exp_path = [os.path.join(data_dir, e) for e in exp_path]
    pkl_path = [e + "_integrated.pickle" for e in prefix]
    pkl_path = [os.path.join(data_dir, e) for e in pkl_path]

    for pth in exp_path + pkl_path:
        assert os.path.exists(pth), "%s missing" % pth

    cmd = "dials.two_theta_refine " + " ".join(exp_path) + " " + " ".join(
        pkl_path) + " cif=refined_cell.cif"
    print cmd

    # work in a temporary directory
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="test_dials_two_theta_refine")
    os.chdir(tmp_dir)
    try:
        result = easy_run.fully_buffered(command=cmd).raise_if_errors()
        ref_exp = ExperimentListFactory.from_json_file("refined_cell.json",
                                                       check_format=False)
    finally:
        os.chdir(cwd)

    xls = ref_exp.crystals()
    assert len(xls) == 1  # crystal models should have been combined
    xl = xls[0]

    # test refined crystal model against expected values
    assert approx_equal(
        xl.get_unit_cell().parameters(),
        (5.428022880, 8.144145476, 12.039666971, 90.0, 90.0, 90.0))
    assert approx_equal(xl.get_cell_parameter_sd(),
                        (9.58081e-5, 0.000149909, 0.000215765, 0, 0, 0))
    assert approx_equal(xl.get_cell_volume_sd(), 0.0116254298)

    print "OK"
    return
Exemple #29
0
  def load_reference_geometry(self):
    if self.params.input.reference_geometry is None: return

    try:
      ref_datablocks = DataBlockFactory.from_json_file(self.params.input.reference_geometry, check_format=False)
    except Exception:
      ref_datablocks = None
    if ref_datablocks is None:
      from dxtbx.model.experiment.experiment_list import ExperimentListFactory
      try:
        ref_experiments = ExperimentListFactory.from_json_file(self.params.input.reference_geometry, check_format=False)
      except Exception:
        raise Sorry("Couldn't load geometry file %s"%self.params.input.reference_geometry)
      assert len(ref_experiments.detectors()) == 1
      self.reference_detector = ref_experiments.detectors()[0]
    else:
      assert len(ref_datablocks) == 1 and len(ref_datablocks[0].unique_detectors()) == 1
      self.reference_detector = ref_datablocks[0].unique_detectors()[0]
def print_someting(experiments_argv):

    from dxtbx.model.experiment.experiment_list import ExperimentListFactory
    experiments = ExperimentListFactory.from_json_file(experiments_argv,
                                                       check_format=False)

    print "len(experiments)", len(experiments)
    print experiments[0]

    for exp in experiments:
        print "\n exp =", exp
        #print "dir(exp) =", dir(exp), "\n"

        #print "dir(exp.crystal) =", dir(exp.crystal)

        print "exp.crystal.get_space_group =", exp.crystal.get_space_group()
        print "exp.crystal.get_unit_cell =", exp.crystal.get_unit_cell()

        #detc = exp.detector
        #scan = exp.scan
        #prof = exp.profile
        gonio = exp.goniometer

        #print "\n\n dir(detc) =", dir(detc)
        #print "\n\n dir(scan) =", dir(scan)
        #print "\n\n dir(prof) =", dir(prof)

        print "\n dir(gonio) =", dir(gonio), "\n"
        '''
        print "gonio.get_fixed_rotation() =", gonio.get_fixed_rotation()
        print "gonio.get_rotation_axis() =", gonio.get_rotation_axis()
        print "gonio.get_rotation_axis_datum() =", gonio.get_rotation_axis_datum()
        print "gonio.get_setting_rotation() =", gonio.get_setting_rotation()
        '''

        #get_U().elems

        print "\nexp.crystal.get_U() =\n", exp.crystal.get_U().elems
        print "\nexp.crystal.get_A() =\n", exp.crystal.get_A().elems
        print "\nexp.crystal.get_B() =\n", exp.crystal.get_B().elems

    print "Pass 01"
    '''
Exemple #31
0
def update_crystal(experiments_path):

    dat = CrystalData()

    try:

        from dxtbx.model.experiment.experiment_list import ExperimentListFactory
        experiments = ExperimentListFactory.from_json_file(experiments_path,
                                                           check_format=False)

        print "len(experiments)", len(experiments)

        exp = experiments[0]
        unit_cell = exp.crystal.get_unit_cell()
        dat.a, dat.b, dat.c, dat.alpha, dat.beta, dat.gamma = unit_cell.parameters(
        )

    except:
        print "Unable to find cell data"

    return dat
Exemple #32
0
class Test(object):
    def __init__(self):
        import libtbx.load_env
        try:
            dials_regression = libtbx.env.dist_path('dials_regression')
        except KeyError, e:
            print 'FAIL: dials_regression not configured'
            exit(0)

        import dials
        import os

        filename = os.path.join(dials_regression, 'centroid_test_data',
                                'experiments.json')

        from dxtbx.model.experiment.experiment_list import ExperimentListFactory
        self.exlist = ExperimentListFactory.from_json_file(filename)
        assert (len(self.exlist) == 1)

        from dials.array_family import flex
        self.rlist = flex.reflection_table.from_predictions_multi(self.exlist)
class Test(object):

  def __init__(self):
    import libtbx.load_env
    try:
      dials_regression = libtbx.env.dist_path('dials_regression')
    except KeyError, e:
      print 'SKIP: dials_regression not configured'
      exit(0)

    import os
    path = os.path.join(
      dials_regression,
      'prediction_test_data',
      'experiments_scan_varying_crystal.json')

    from dxtbx.model.experiment.experiment_list import ExperimentListFactory
    self.experiments = ExperimentListFactory.from_json_file(path)
    assert(len(self.experiments) == 1)
    assert(self.experiments[0].crystal.num_scan_points ==
           self.experiments[0].scan.get_num_images() + 1)
Exemple #34
0
def test1():

    dials_regression = libtbx.env.find_in_repositories(relative_path="dials_regression", test=os.path.isdir)

    # use multiple scan small molecule data for this test
    data_dir = os.path.join(dials_regression, "xia2-28")
    prefix = ["20", "25", "30", "35"]
    exp_path = [e + "_integrated_experiments.json" for e in prefix]
    exp_path = [os.path.join(data_dir, e) for e in exp_path]
    pkl_path = [e + "_integrated.pickle" for e in prefix]
    pkl_path = [os.path.join(data_dir, e) for e in pkl_path]

    for pth in exp_path + pkl_path:
        assert os.path.exists(pth), "%s missing" % pth

    cmd = "dials.two_theta_refine " + " ".join(exp_path) + " " + " ".join(pkl_path) + " cif=refined_cell.cif"
    print cmd

    # work in a temporary directory
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="test_dials_two_theta_refine")
    os.chdir(tmp_dir)
    try:
        result = easy_run.fully_buffered(command=cmd).raise_if_errors()
        ref_exp = ExperimentListFactory.from_json_file("refined_cell.json", check_format=False)
    finally:
        os.chdir(cwd)

    xls = ref_exp.crystals()
    assert len(xls) == 1  # crystal models should have been combined
    xl = xls[0]

    # test refined crystal model against expected values
    assert approx_equal(xl.get_unit_cell().parameters(), (5.428022880, 8.144145476, 12.039666971, 90.0, 90.0, 90.0))
    assert approx_equal(xl.get_cell_parameter_sd(), (9.58081e-5, 0.000149909, 0.000215765, 0, 0, 0))
    assert approx_equal(xl.get_cell_volume_sd(), 0.0116254298)

    print "OK"
    return
Exemple #35
0
    def load_reference_geometry(self):
        if self.params.input.reference_geometry is None: return

        try:
            ref_datablocks = DataBlockFactory.from_json_file(
                self.params.input.reference_geometry, check_format=False)
        except Exception:
            ref_datablocks = None
        if ref_datablocks is None:
            from dxtbx.model.experiment.experiment_list import ExperimentListFactory
            try:
                ref_experiments = ExperimentListFactory.from_json_file(
                    self.params.input.reference_geometry, check_format=False)
            except Exception:
                raise Sorry("Couldn't load geometry file %s" %
                            self.params.input.reference_geometry)
            assert len(ref_experiments.detectors()) == 1
            self.reference_detector = ref_experiments.detectors()[0]
        else:
            assert len(ref_datablocks) == 1 and len(
                ref_datablocks[0].unique_detectors()) == 1
            self.reference_detector = ref_datablocks[0].unique_detectors()[0]
Exemple #36
0
def test1():

  dials_regression = libtbx.env.find_in_repositories(
    relative_path="dials_regression",
    test=os.path.isdir)

  # use the i04_weak_data for this test
  data_dir = os.path.join(dials_regression, "refinement_test_data", "i04_weak_data")
  experiments_path = os.path.join(data_dir, "experiments.json")
  pickle_path = os.path.join(data_dir, "indexed_strong.pickle")

  for pth in (experiments_path, pickle_path):
    assert os.path.exists(pth)

  cmd = "dials.slice_sweep " + experiments_path + " " + pickle_path + \
  ' "scan_range=1 20"'
  print cmd

  # work in a temporary directory
  cwd = os.path.abspath(os.curdir)
  tmp_dir = open_tmp_directory(suffix="test_dials_slice_sweep")
  os.chdir(tmp_dir)
  try:
    result = easy_run.fully_buffered(command=cmd).raise_if_errors()
    # load results
    sliced_exp = ExperimentListFactory.from_json_file("experiments_1_20.json",
                check_format=False)[0]
    with open("indexed_strong_1_20.pickle", "r") as f:
      sliced_refs = pickle.load(f)
  finally:
    os.chdir(cwd)

  # simple test of results
  assert sliced_exp.scan.get_image_range() == (1, 20)
  assert len(sliced_refs) == 3670

  print "OK"
  return
Exemple #37
0
def test1():

    dials_regression = libtbx.env.find_in_repositories(
        relative_path="dials_regression", test=os.path.isdir)

    # use the i04_weak_data for this test
    data_dir = os.path.join(dials_regression, "refinement_test_data",
                            "i04_weak_data")
    experiments_path = os.path.join(data_dir, "experiments.json")
    pickle_path = os.path.join(data_dir, "indexed_strong.pickle")

    for pth in (experiments_path, pickle_path):
        assert os.path.exists(pth)

    cmd = "dials.slice_sweep " + experiments_path + " " + pickle_path + \
    ' "scan_range=1 20"'
    print cmd

    # work in a temporary directory
    cwd = os.path.abspath(os.curdir)
    tmp_dir = open_tmp_directory(suffix="test_dials_slice_sweep")
    os.chdir(tmp_dir)
    try:
        result = easy_run.fully_buffered(command=cmd).raise_if_errors()
        # load results
        sliced_exp = ExperimentListFactory.from_json_file(
            "experiments_1_20.json", check_format=False)[0]
        with open("indexed_strong_1_20.pickle", "r") as f:
            sliced_refs = pickle.load(f)
    finally:
        os.chdir(cwd)

    # simple test of results
    assert sliced_exp.scan.get_image_range() == (1, 20)
    assert len(sliced_refs) == 3670

    print "OK"
    return
  def tst_from_pickle(self):
    from os.path import join
    import os

    os.environ['DIALS_REGRESSION'] = self.path

    # Get all the filenames
    filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json')

    # Read all the experiment lists in
    el1 = ExperimentListFactory.from_json_file(filename1)

    # Pickle then load again
    el2 = self.pickle_then_unpickle(el1)

    # All the experiment lists should be the same length
    assert(len(el1) == 1)
    assert(len(el1) == len(el2))

    # Check all the models are the same
    for e1, e2 in zip(el1, el2):
      assert(e1.imageset is not None)
      assert(e1.beam is not None)
      assert(e1.detector is not None)
      assert(e1.goniometer is not None)
      assert(e1.scan is not None)
      assert(e1.crystal is not None)
      assert(e1.imageset == e2.imageset)
      assert(e1.beam == e2.beam)
      assert(e1.detector == e2.detector)
      assert(e1.goniometer == e2.goniometer)
      assert(e1.scan == e2.scan)
      assert(e1.crystal == e2.crystal)

    # test passed
    print 'OK'
  def tst_from_pickle(self):
    from os.path import join
    import os

    os.environ['DIALS_REGRESSION'] = self.path

    # Get all the filenames
    filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json')

    # Read all the experiment lists in
    el1 = ExperimentListFactory.from_json_file(filename1)

    # Pickle then load again
    el2 = self.pickle_then_unpickle(el1)

    # All the experiment lists should be the same length
    assert(len(el1) == 1)
    assert(len(el1) == len(el2))

    # Check all the models are the same
    for e1, e2 in zip(el1, el2):
      assert(e1.imageset is not None)
      assert(e1.beam is not None)
      assert(e1.detector is not None)
      assert(e1.goniometer is not None)
      assert(e1.scan is not None)
      assert(e1.crystal is not None)
      assert(e1.imageset == e2.imageset)
      assert(e1.beam == e2.beam)
      assert(e1.detector == e2.detector)
      assert(e1.goniometer == e2.goniometer)
      assert(e1.scan == e2.scan)
      assert(e1.crystal == e2.crystal)

    # test passed
    print 'OK'
  try:
    dials_regression = libtbx.env.dist_path('dials_regression')
  except KeyError, e:
    print 'FAIL: dials_regression not configured'
    exit(0)
  path = join(dials_regression, "centroid_test_data")
  import sys
  assert(len(sys.argv) == 1)
  sys.argv.append(join(path, "experiments.json"))
  sys.argv.append(join(path, "profile.phil"))

  parser = OptionParser(phil=phil_scope)
  params, options, args = parser.parse_args()
  assert(len(args) == 1)

  exlist = ExperimentListFactory.from_json_file(args[0])
  assert(len(exlist) == 1)


  profile_model = ProfileModelList.load(params)

  rlist = flex.reflection_table.from_predictions_multi(exlist)
  rlist.compute_bbox(exlist, profile_model)
  rlist['shoebox'] = flex.shoebox(rlist['panel'], rlist['bbox'])
  rlist['shoebox'].allocate()

  rlist.extract_shoeboxes(exlist[0].imageset)

  show_reflection(rlist[len(rlist)//2])
  #show_reflection(rlist[len(rlist)//2], orient = "porTrait")
  #show_reflection(rlist[len(rlist)//2], orient = "lanDscape")
Exemple #41
0
            self.n_sigma * self.sigma_m,
        )

        # Mask the foreground
        mask_foreground(refl["shoebox"], refl["s1"], refl["xyzcal.px"].parts()[2], refl["panel"])
        Command.end("Masked foreground for %d reflections" % len(refl))

        # Return the reflections
        return refl


if __name__ == "__main__":

    from math import pi
    from dxtbx.model.experiment.experiment_list import ExperimentListFactory

    experiments = ExperimentListFactory.from_json_file(
        "/home/upc86896/Projects/cctbx/sources/dials_regression/centroid_test_data/experiments.json", check_format=False
    )
    sigma_b = 0.058 * pi / 180
    sigma_m = 0.157 * pi / 180
    n_sigma = 3

    N = 100
    I = 1000
    B = 10

    simulate = Simulator(experiments[0], sigma_b, sigma_m, n_sigma)
    simulate.with_random_intensity(N, I, B)
#  simulate(experiments[0], sigma_b, sigma_m, n_sigma, N, I, B)
    from uuid import uuid4
    import libtbx.load_env
    import os
    from os.path import join

    try:
      dials_regression = libtbx.env.dist_path('dials_regression')
    except KeyError, e:
      print 'FAIL: dials_regression not configured'
      exit(0)

    filename = join(dials_regression, "centroid_test_data",
                    "experiments_with_bad_lookup.json")

    experiments = ExperimentListFactory.from_json_file(
      filename,
      check_format=False)

    imageset = experiments[0].imageset
    assert imageset.external_lookup.mask.data is None
    assert imageset.external_lookup.gain.data is None
    assert imageset.external_lookup.pedestal.data is None
    assert imageset.external_lookup.mask.filename is not None
    assert imageset.external_lookup.gain.filename is not None
    assert imageset.external_lookup.pedestal.filename is not None

    dump = ExperimentListDumper(experiments)
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename)

    experiments = ExperimentListFactory.from_json_file(
Exemple #43
0
def get_dials_matrix(experiments_json):
    from dxtbx.model.experiment.experiment_list import ExperimentListFactory
    experiments = ExperimentListFactory.from_json_file(experiments_json)
    return experiments[0].crystal.get_A()
Exemple #44
0
def get_dials_coordinate_frame(experiments_json):
    from dxtbx.model.experiment.experiment_list import ExperimentListFactory
    experiments = ExperimentListFactory.from_json_file(experiments_json)
    return experiments[0].beam.get_direction(), \
      experiments[0].goniometer.get_rotation_axis()
def get_dials_coordinate_frame(experiments_json):
    from dxtbx.model.experiment.experiment_list import ExperimentListFactory

    experiments = ExperimentListFactory.from_json_file(experiments_json)
    return experiments[0].beam.get_direction(), experiments[0].goniometer.get_rotation_axis()
Exemple #46
0
class Test(object):
    def __init__(self):
        from os.path import join, isfile
        import libtbx.load_env
        try:
            dials_regression = libtbx.env.dist_path('dials_regression')
        except KeyError, e:
            print 'SKIP: dials_regression not configured'
            exit(0)

        # The base path
        path = join(dials_regression, 'integration_test_data', 'simulated')

        # Set the experiment filename
        expr_filename = join(path, 'experiments.json')

        # The reference spots filesname
        reference_filename = join(path, 'simulated_n10000_i10000_b0.pickle')

        # The paths to the reflection files
        self.refl_filenames = [
            #join(path, 'simulated_n10000_i0_b10.pickle'),
            #join(path, 'simulated_n10000_i0_b100.pickle'),
            #join(path, 'simulated_n10000_i0_b1000.pickle'),
            #join(path, 'simulated_r_n10000_i0_b1000.pickle'),

            #join(path, 'simulated_n10000_i10_b0.pickle'),
            #join(path, 'simulated_n10000_i100_b0.pickle'),
            #join(path, 'simulated_n10000_i1000_b0.pickle'),
            #join(path, 'simulated_r_n10000_i10000_b0.pickle'),
            join(path, 'simulated_n10000_i10_b10.pickle'),
            join(path, 'simulated_n10000_i100_b10.pickle'),
            join(path, 'simulated_n10000_i1000_b10.pickle'),
            join(path, 'simulated_n10000_i10000_b10.pickle'),
            join(path, 'simulated_r_n10000_i10000_b10.pickle'),
            join(path, 'simulated_n10000_i10_b100.pickle'),
            join(path, 'simulated_n10000_i100_b100.pickle'),
            join(path, 'simulated_n10000_i1000_b100.pickle'),
            join(path, 'simulated_n10000_i10000_b100.pickle'),
            join(path, 'simulated_r_n10000_i10000_b100.pickle'),
        ]

        # Check the files exist
        for filename in self.refl_filenames:
            if not isfile(filename):
                print 'SKIP: simulated test data does not exist'
                print 'Generate by running the following commands:'
                print ' cd dials_regression/integration_test_data/simulated'
                print ' ./simulate'
                exit(0)

        # Load the experiments
        from dxtbx.model.experiment.experiment_list import ExperimentListFactory
        experiments = ExperimentListFactory.from_json_file(expr_filename,
                                                           check_format=False)
        assert (len(experiments) == 1)
        self.experiments = experiments

        from dials.algorithms.profile_model.gaussian_rs import Model as ProfileModel
        from dials.algorithms.profile_model.factory import phil_scope
        from libtbx.phil import parse
        params = phil_scope.fetch(parse('')).extract()
        self.experiments[0].profile = ProfileModel(params.profile,
                                                   sigma_b=0.024,
                                                   sigma_m=0.044,
                                                   n_sigma=3,
                                                   deg=True)

        # Load the reference spots
        from dials.array_family import flex
        self.reference = flex.reflection_table.from_pickle(reference_filename)
        self.reference.compute_partiality(self.experiments)
        mask = flex.bool(len(self.reference), True)
        value = self.reference.flags.reference_spot
        self.reference.set_flags(mask, value)
        self.reference.set_flags(mask, self.reference.flags.integrated_sum)
class TestExperimentListDumper(object):

  def __init__(self, path):
    self.path = path

  def run(self):
    self.tst_dump_formats()
    self.tst_dump_empty_sweep()
    self.tst_dump_with_lookup()
    self.tst_dump_with_bad_lookup()

  def tst_dump_formats(self):
    from uuid import uuid4
    from os.path import join
    import os

    os.environ['DIALS_REGRESSION'] = self.path

    # Get all the filenames
    filename1 = join(self.path, 'experiment_test_data', 'experiment_1.json')

    # Read all the experiment lists in
    elist1 = ExperimentListFactory.from_json_file(filename1)

    # Create the experiment list dumper
    dump = ExperimentListDumper(elist1)

    # Dump as JSON file and reload
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename)
    elist2 = ExperimentListFactory.from_json_file(filename)
    self.check(elist1, elist2)

    # Dump as split JSON file and reload
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename, split=True)
    elist2 = ExperimentListFactory.from_json_file(filename)
    self.check(elist1, elist2)

    # Dump as pickle and reload
    filename = 'temp%s.pickle' % uuid4().hex
    dump.as_pickle(filename)
    elist2 = ExperimentListFactory.from_pickle_file(filename)
    self.check(elist1, elist2)

  def tst_dump_empty_sweep(self):
    from dxtbx.imageset import ImageSweep, NullReader, SweepFileList
    from dxtbx.model import Beam, Detector, Goniometer, Scan
    from dxtbx.model.crystal import crystal_model
    from uuid import uuid4

    imageset = ImageSweep(NullReader(SweepFileList("filename%01d.cbf", (0, 3))))
    imageset.set_beam(Beam((1, 0, 0)))
    imageset.set_detector(Detector())
    imageset.set_goniometer(Goniometer())
    imageset.set_scan(Scan((1, 3), (0.0, 1.0)))

    crystal = crystal_model((1, 0, 0), (0, 1, 0), (0, 0, 1), space_group_symbol=1)

    experiments = ExperimentListFactory.from_imageset_and_crystal(
      imageset, crystal)

    dump = ExperimentListDumper(experiments)
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename)
    experiments2 = ExperimentListFactory.from_json_file(filename,
                                                        check_format=False)
    self.check(experiments, experiments2)

    print 'OK'

  def tst_dump_with_lookup(self):
    from dxtbx.imageset import ImageSweep, NullReader, SweepFileList
    from dxtbx.model import Beam, Detector, Goniometer, Scan
    from dxtbx.model.crystal import crystal_model
    from uuid import uuid4
    import libtbx.load_env
    import os
    from os.path import join

    try:
      dials_regression = libtbx.env.dist_path('dials_regression')
    except KeyError, e:
      print 'FAIL: dials_regression not configured'
      exit(0)

    filename = join(dials_regression, "centroid_test_data",
                    "experiments_with_lookup.json")

    experiments = ExperimentListFactory.from_json_file(
      filename,
      check_format=True)

    imageset = experiments[0].imageset
    assert imageset.external_lookup.mask.data is not None
    assert imageset.external_lookup.gain.data is not None
    assert imageset.external_lookup.pedestal.data is not None
    assert imageset.external_lookup.mask.filename is not None
    assert imageset.external_lookup.gain.filename is not None
    assert imageset.external_lookup.pedestal.filename is not None
    assert imageset.external_lookup.mask.data.all_eq(True)
    assert imageset.external_lookup.gain.data.all_eq(1)
    assert imageset.external_lookup.pedestal.data.all_eq(0)

    dump = ExperimentListDumper(experiments)
    filename = 'temp%s.json' % uuid4().hex
    dump.as_json(filename)

    experiments = ExperimentListFactory.from_json_file(
      filename,
      check_format=True)

    imageset = experiments[0].imageset
    assert imageset.external_lookup.mask.data is not None
    assert imageset.external_lookup.gain.data is not None
    assert imageset.external_lookup.pedestal.data is not None
    assert imageset.external_lookup.mask.filename is not None
    assert imageset.external_lookup.gain.filename is not None
    assert imageset.external_lookup.pedestal.filename is not None
    assert imageset.external_lookup.mask.data.all_eq(True)
    assert imageset.external_lookup.gain.data.all_eq(1)
    assert imageset.external_lookup.pedestal.data.all_eq(0)
Exemple #48
0
def run():

  from dials.util.nexus import dump, load
  from dxtbx.model.experiment.experiment_list import ExperimentListFactory
  from dials.array_family import flex
  from os.path import join
  import libtbx.load_env
  try:
    dials_regression = libtbx.env.dist_path('dials_regression')
  except KeyError, e:
    print 'FAIL: dials_regression not configured'
    exit(0)
  path = join(dials_regression, "nexus_test_data")

  # Read the experiments
  experiments1 = ExperimentListFactory.from_json_file(
    join(path, "refined_experiments.json"))

  # Read the reflections
  reflections1 = flex.reflection_table.from_pickle(
    join(path, "integrated.pickle"))

  # Delete some columns for the test
  del reflections1['s1']
  del reflections1['zeta']
  del reflections1['background.mse']

  # Dump the reflections
  dump(experiments1, reflections1, "hklout.nxs")

  # Load them again
  experiments2, reflections2 = load("hklout.nxs")
Exemple #49
0
def load_sweeps_with_common_indexing():
  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  import dials # required for gaussian_rs warning
  from xia2.Wrappers.Dials.Reindex import Reindex
  Citations.cite('dials')

  from dxtbx.model.experiment.experiment_list import ExperimentListFactory
  import cPickle as pickle
  crystals = xinfo.get_crystals()
  assert len(crystals) == 1
  crystal = next(crystals.itervalues())
  working_directory = Environment.generate_directory([crystal.get_name(), 'analysis'])
  os.chdir(working_directory)

  scaler = crystal._get_scaler()

  epoch_to_batches = {}
  epoch_to_integrated_intensities = {}
  epoch_to_sweep_name = {}

  # Aimless only
  epochs = scaler._sweep_handler.get_epochs()

  reference_cell = None
  reference_lattice = None
  reference_vectors = None
  reference_wavelength = None

  # Reindex each sweep to same setting
  all_miller_indices = flex.miller_index()
  all_two_thetas = flex.double()

  for epoch in epochs:
    si = scaler._sweep_handler.get_sweep_information(epoch)
    Chatter.smallbanner(si.get_sweep_name(), True)
    Debug.smallbanner(si.get_sweep_name(), True)

    intgr = si.get_integrater()
    experiments_filename = intgr.get_integrated_experiments()
    reflections_filename = intgr.get_integrated_reflections()
    refiner = intgr.get_integrater_refiner()
    Debug.write('experiment: %s' % experiments_filename)
    Debug.write('reflection: %s' % reflections_filename)

    # Use setting of first sweep as reference
    if reference_vectors is None:
      reference_vectors = experiments_filename

    # Assume that all sweeps have the same lattice system
    if reference_lattice is None:
      reference_lattice = refiner.get_refiner_lattice()
    else:
      assert reference_lattice == refiner.get_refiner_lattice()
    Debug.write("lattice: %s" % refiner.get_refiner_lattice())

    # Read .json file for sweep
    db = ExperimentListFactory.from_json_file(experiments_filename)

    # Assume that each file only contains a single experiment
    assert (len(db) == 1)
    db = db[0]

    # Get beam vector
    s0 = db.beam.get_unit_s0()

    # Use the unit cell of the first sweep as reference
    if reference_cell is None:
      reference_cell = db.crystal.get_unit_cell()
      Debug.write("Reference cell: %s" % str(reference_cell))

    dials_reindex = Reindex()
    dials_reindex.set_working_directory(working_directory)
    dials_reindex.set_cb_op("auto")
    dials_reindex.set_reference_filename(reference_vectors)
    dials_reindex.set_experiments_filename(experiments_filename)
    dials_reindex.set_indexed_filename(reflections_filename)
    auto_logfiler(dials_reindex)
    dials_reindex.run()

    # Assume that all data are collected at same wavelength
    if reference_wavelength is None:
      reference_wavelength = intgr.get_wavelength()
    else:
      assert abs(reference_wavelength - intgr.get_wavelength()) < 0.01
    Debug.write("wavelength: %f A" % intgr.get_wavelength())
    Debug.write("distance: %f mm" % intgr.get_distance())

    # Get integrated reflection data
    import dials
    with open(dials_reindex.get_reindexed_reflections_filename(), 'rb') as fh:
      reflections = pickle.load(fh)

    selection = reflections.get_flags(reflections.flags.used_in_refinement)
    Chatter.write("Found %d reflections used in refinement (out of %d entries)" % (selection.count(True), len(reflections['miller_index'])))
    reflections = reflections.select(selection)

    # Filter bad reflections
    selection = reflections['intensity.sum.variance'] <= 0
    if selection.count(True) > 0:
      reflections.del_selected(selection)
      print 'Removing %d reflections with negative variance' % \
        selection.count(True)

    if 'intensity.prf.variance' in reflections:
      selection = reflections['intensity.prf.variance'] <= 0
      if selection.count(True) > 0:
        reflections.del_selected(selection)
        print 'Removing %d profile reflections with negative variance' % \
          selection.count(True)

    # Find the observed 2theta angles
    miller_indices = flex.miller_index()
    two_thetas_obs = flex.double()
    for pixel, panel, hkl in zip(reflections['xyzobs.px.value'], reflections['panel'], reflections['miller_index']):
      assert hkl != (0, 0, 0)
      two_thetas_obs.append(db.detector[panel].get_two_theta_at_pixel(s0, pixel[0:2]))
      miller_indices.append(hkl)

    # Convert observed 2theta angles to degrees
    two_thetas_obs = two_thetas_obs * 180 / 3.14159265359
    Chatter.write("Remaining %d reflections are in 2theta range %.3f - %.3f deg" % (len(miller_indices), min(two_thetas_obs), max(two_thetas_obs)))

    all_miller_indices.extend(miller_indices)
    all_two_thetas.extend(two_thetas_obs)

  return all_miller_indices, all_two_thetas, reference_cell, reference_lattice, reference_wavelength
Exemple #50
0
def reconstruct_rogues(params):
  assert os.path.exists('xia2.json')
  from xia2.Schema.XProject import XProject
  xinfo = XProject.from_json(filename='xia2.json')

  from dxtbx.model.experiment.experiment_list import ExperimentListFactory
  import cPickle as pickle
  import dials # because WARNING:root:No profile class gaussian_rs registered
  crystals = xinfo.get_crystals()
  assert len(crystals) == 1

  for xname in crystals:
    crystal = crystals[xname]

  scaler = crystal._get_scaler()

  epochs = scaler._sweep_handler.get_epochs()

  rogues = os.path.join(scaler.get_working_directory(),
                        xname, 'scale', 'ROGUES')

  rogue_reflections = munch_rogues(rogues)

  batched_reflections = { }

  for epoch in epochs:
    si = scaler._sweep_handler.get_sweep_information(epoch)
    intgr = si.get_integrater()
    experiments = ExperimentListFactory.from_json_file(
      intgr.get_integrated_experiments())
    reflections = pickle.load(open(intgr.get_integrated_reflections()))
    batched_reflections[si.get_batch_range()] = (experiments, reflections,
                                                 si.get_sweep_name())

  # - look up reflection in reflection list, get bounding box
  # - pull pixels given from image set, flatten these, write out

  from dials.array_family import flex
  from annlib_ext import AnnAdaptor as ann_adaptor

  reflections_run = { }
  for run in batched_reflections:
    reflections_run[run] = []

  for rogue in rogue_reflections:
    b = rogue[0]
    for run in batched_reflections:
      if b >= run[0] and b <= run[1]:
        reflections_run[run].append(rogue)
        break

  for run_no, run in enumerate(reflections_run):
    experiment = batched_reflections[run][0]
    reflections = batched_reflections[run][1]
    name = batched_reflections[run][2]
    rogues = reflections_run[run]
    reference = flex.double()
    scan = experiment.scans()[0]
    images = experiment.imagesets()[0]
    for xyz in reflections['xyzcal.px']:
      reference.append(xyz[0])
      reference.append(xyz[1])
      reference.append(xyz[2])

    search = flex.double()
    for rogue in rogues:
      search.append(rogue[1])
      search.append(rogue[2])
      search.append(scan.get_array_index_from_angle(rogue[3]))

    ann = ann_adaptor(data=reference, dim=3, k=1)
    ann.query(search)

    keep = flex.bool(len(reflections), False)

    for j, rogue in enumerate(rogues):
      keep[ann.nn[j]] = True

    reflections = reflections.select(keep==True)

    if params.extract:
      reflections["shoebox"] = flex.shoebox(
        reflections["panel"],
        reflections["bbox"],
        allocate=True)
      reflections.extract_shoeboxes(images, verbose=False)

    if len(reflections_run) > 1:
      output = params.output.reflections.replace(
          '.pickle', '-%s.pickle' % name)
      print 'Extracted %d rogue reflections for %s to %s' % \
        (len(reflections), name, output)
      reflections.as_pickle(output)
    else:
      output = params.output.reflections
      print 'Extracted %d rogue reflections to %s' % \
        (len(reflections), output)
      reflections.as_pickle(output)
Exemple #51
0
  def _write_pickle(self, batch):
    pass

  def _write_predictions(self, predictions):
    pass


@contextmanager
def open_shoebox_writer(filename):
  writer = ShoeboxWriter(filename)
  yield writer


if __name__ == '__main__':
  from dxtbx.model.experiment.experiment_list import ExperimentListFactory
  from dials.array_family import flex

  experiments = ExperimentListFactory.from_json_file(
    '/home/upc86896/Data/Data/i04-BAG-training/dials_processed/experiments.json')

  predictions = flex.reflection_table.from_predictions(experiments[0])
  predictions.compute_bbox(experiments[0], nsigma=3, sigma_d=0.024,
                           sigma_m=0.044)

  zeta = predictions.compute_zeta(experiments[0])
  mask = flex.abs(zeta) < 0.05
  predictions.del_selected(mask)

  with open_shoebox_writer("extracted.tar") as writer:
    writer.write(predictions, experiments[0].imageset)
Exemple #52
0
    def run(self):
      from xia2.Handlers.Streams import Chatter, Debug

      if self._reindexing_operator:
        Debug.write('Reindexing sweeps for dials.two_theta_refine')
        from xia2.lib.bits import auto_logfiler
        from xia2.Wrappers.Dials.Reindex import Reindex
        self._reindexed_experiments, self._reindexed_reflections = [], []
        for e, p in zip(self._experiments, self._pickles):
          reindexer = Reindex()
          reindexer.set_cb_op(self._reindexing_operator)
          reindexer.set_experiments_filename(e)
          reindexer.set_indexed_filename(p)
          reindexer.set_working_directory(self.get_working_directory())
          auto_logfiler(reindexer)
          reindexer.run()
          self._reindexed_experiments.append(reindexer.get_reindexed_experiments_filename())
          self._reindexed_reflections.append(reindexer.get_reindexed_reflections_filename())

      Debug.write('Running dials.two_theta_refine')

      self._output_cif = os.path.join(
        self.get_working_directory(),
        '%s_dials.two_theta_refine.cif' % self.get_xpid())
      self._output_mmcif = os.path.join(
        self.get_working_directory(),
        '%s_dials.two_theta_refine.mmcif' % self.get_xpid())
      self._output_correlation_plot = os.path.join(
        self.get_working_directory(),
        '%s_dials.two_theta_refine.png' % self.get_xpid())
      self._output_experiments = os.path.join(
        self.get_working_directory(),
        '%s_refined_cell.json' % self.get_xpid())

      self.clear_command_line()

      if self._reindexing_operator:
        for experiment in self._reindexed_experiments:
          self.add_command_line(experiment)
        for pickle in self._reindexed_reflections:
          self.add_command_line(pickle)
      else:
        for experiment in self._experiments:
          self.add_command_line(experiment)
        for pickle in self._pickles:
          self.add_command_line(pickle)
      self.add_command_line('output.cif=%s' % self._output_cif)
      self.add_command_line('output.mmcif=%s' % self._output_mmcif)
      if self._output_correlation_plot is not None:
        self.add_command_line(
          'output.correlation_plot.filename=%s' % self._output_correlation_plot)
      if self._output_experiments is not None:
        self.add_command_line(
          'output.experiments=%s' % self._output_experiments)
      if self._phil_file is not None:
        self.add_command_line('%s' %self._phil_file)

      self.start()
      self.close_wait()

      if not os.path.isfile(self._output_cif):
        Chatter.write(
          "TwoTheta refinement failed, see log file for more details:\n  %s" % self.get_log_file())
        raise RuntimeError, 'unit cell not refined'

      self.check_for_errors()

      from dxtbx.model.experiment.experiment_list import ExperimentListFactory
      experiments = ExperimentListFactory.from_json_file(self.get_output_experiments())
      self._crystal = experiments.crystals()[0]
  def run(self):
    '''Execute the script.'''
    import os, math
    from cctbx.crystal import symmetry
    from scitbx.array_family import flex
    from libtbx import table_utils, easy_pickle
    from xfel.command_line.cspad_cbf_metrology import find_files
    from dxtbx.model.experiment.experiment_list import ExperimentListFactory
    table_header = ["","","","I","IsigI","N >","RMSD","Cutoff"]
    table_header2 = ["Bin","Resolution Range","Completeness","","","cutoff","(um)",""]

    # Parse the command line
    params, options, all_paths = self.parser.parse_args(show_diff_phil=False, return_unhandled=True)
    exp_paths = []
    refl_paths = []
    for path in all_paths:
      exps, refs = find_files(path, "integrated")
      exp_paths.extend(exps)
      refl_paths.extend(refs)
    assert len(exp_paths) == len(refl_paths)

    best_data = {}
    best_limits = flex.double()
    for exp_path, refl_path in zip(exp_paths, refl_paths):
      experiments = ExperimentListFactory.from_json_file(exp_path)
      reflections = easy_pickle.load(refl_path)
      exp_name = os.path.basename(exp_path)
      if exp_name.startswith("idx-") and exp_name.endswith("_refined_experiments.json"):
        tag = exp_name.lstrip("idx-").rstrip("_refined_experiments.json")
      else:
        tag = "%s, %s"%(exp_path, refl_path)

      for exp_id, experiment in enumerate(experiments):
        print "*"*80
        print "Data table for", tag
        table_data = []
        table_data.append(table_header)
        table_data.append(table_header2)

        crystal = experiment.crystal
        refls = reflections.select(reflections['id'] == exp_id)
        sym = symmetry(unit_cell = crystal.get_unit_cell(), space_group = crystal.get_space_group())
        d = crystal.get_unit_cell().d(refls['miller_index'])
        mset = sym.miller_set(indices = refls['miller_index'].select(d>=params.d_min), anomalous_flag=False)
        binner = mset.setup_binner(n_bins=params.n_bins)
        acceptable_resolution_bins = []
        for i in binner.range_used():
          d_max, d_min = binner.bin_d_range(i)
          sel = (d <= d_max) & (d > d_min)
          sel &= refls['intensity.sum.value'] > 0
          bin_refls = refls.select(sel)
          n_refls = len(bin_refls)
          avg_i = flex.mean(bin_refls['intensity.sum.value']) if n_refls > 0 else 0
          avg_i_sigi = flex.mean(bin_refls['intensity.sum.value'] /
                                 flex.sqrt(bin_refls['intensity.sum.variance'])) if n_refls > 0 else 0
          acceptable_resolution_bins.append(avg_i_sigi >= params.sig_filter_sigma)

          bright_refls = bin_refls.select((bin_refls['intensity.sum.value']/flex.sqrt(bin_refls['intensity.sum.variance'])) >= params.sig_filter_sigma)
          n_bright = len(bright_refls)

          rmsd_obs = 1000*math.sqrt((bright_refls['xyzcal.mm']-bright_refls['xyzobs.mm.value']).sum_sq()/n_bright) if n_bright > 0 else 0

          table_row = []
          table_row.append("%3d"%i)
          table_row.append("%-13s"%binner.bin_legend(i_bin=i,show_bin_number=False,show_bin_range=False,
                                                     show_d_range=True, show_counts=False))
          table_row.append("%13s"%binner.bin_legend(i_bin=i,show_bin_number=False,show_bin_range=False,
                                                    show_d_range=False, show_counts=True))

          table_row.append("%.1f"%(avg_i))
          table_row.append("%.1f"%(avg_i_sigi))
          table_row.append("%3d"%n_bright)
          table_row.append("%.1f"%(rmsd_obs))
          table_data.append(table_row)

        acceptable_resolution_bins = [acceptable_resolution_bins[i] for i in xrange(len(acceptable_resolution_bins))
                                      if False not in acceptable_resolution_bins[:i+1]]

        for b, row in zip(acceptable_resolution_bins, table_data[2:]):
          if b:
            row.append("X")
        print table_utils.format(table_data,has_header=2,justify='center',delim=" ")

        if any(acceptable_resolution_bins):
          best_index = acceptable_resolution_bins.count(True)-1
          best_row = table_data[best_index+2]
          d_min = binner.bin_d_range(binner.range_used()[best_index])[1]
          if len(best_limits) < params.best_count:
            best_limits.append(d_min)
            best_data[tag] = d_min, best_row
          elif (d_min < best_limits).count(True) > 0:
            worst_d_min = flex.max(best_limits)
            for tag, data in best_data.iteritems():
              if worst_d_min == data[0]:
                best_data[tag] = d_min, best_row
                best_limits[flex.first_index(best_limits, worst_d_min)] = d_min
                break
          print tag, "best row:", " ".join(best_row)
        else:
          print "Data didn't pass cutoff"
    if len(best_limits) > 0:
      print "*"*80
      print "Top", len(best_limits)
      for tag, data in best_data.iteritems():
        print tag, " ".join(data[1])
def test4():
  '''Test group restraint with multiple crystals, and a stills refiner'''

  if not libtbx.env.has_module("dials_regression"):
    print "Skipping test2 in " + __file__ + " as dials_regression not present"
    return

  # The phil scope
  from dials.algorithms.refinement.refiner import phil_scope
  user_phil = parse('''
  refinement
  {
    parameterisation
    {
      crystal
      {
        unit_cell
        {
          restraints
          {
            tie_to_group
            {
              sigmas=1,0,2,0,0,0
              apply_to_all=true
            }
          }
        }
      }
    }
  }
  ''')

  working_phil = phil_scope.fetch(source=user_phil)
  working_params = working_phil.extract()

  dials_regression = libtbx.env.find_in_repositories(
    relative_path="dials_regression",
    test=os.path.isdir)

  # use the multi stills test data
  data_dir = os.path.join(dials_regression, "refinement_test_data", "multi_stills")
  experiments_path = os.path.join(data_dir, "combined_experiments.json")
  pickle_path = os.path.join(data_dir, "combined_reflections.pickle")

  experiments = ExperimentListFactory.from_json_file(experiments_path,
                check_format=False)
  reflections = flex.reflection_table.from_pickle(pickle_path)

  refiner = RefinerFactory.from_parameters_data_experiments(working_params,
        reflections, experiments)

  # hack to extract the objects needed from the Refiner
  rp = refiner._target._restraints_parameterisation
  pred_param = refiner._pred_param

  # get analytical values and gradients
  vals, grads, weights = rp.get_residuals_gradients_and_weights()

  # get finite difference gradients
  p_vals = pred_param.get_param_vals()
  deltas = [1.e-7] * len(p_vals)

  fd_grad=[]
  for i in range(len(deltas)):

    val = p_vals[i]

    p_vals[i] -= deltas[i] / 2.
    pred_param.set_param_vals(p_vals)

    rev_state, foo, bar = rp.get_residuals_gradients_and_weights()
    rev_state = flex.double(rev_state)

    p_vals[i] += deltas[i]
    pred_param.set_param_vals(p_vals)

    fwd_state, foo, bar = rp.get_residuals_gradients_and_weights()
    fwd_state = flex.double(fwd_state)

    p_vals[i] = val

    fd = (fwd_state - rev_state) / deltas[i]
    fd_grad.append(fd)

  # for comparison, fd_grad is a list of flex.doubles, each of which corresponds
  # to the gradients of the residuals wrt to a single parameter.
  pnames = pred_param.get_param_names()
  for i, (pname, fd) in enumerate(zip(pnames, fd_grad)):
    # extract dense column from the sparse matrix
    an = grads.col(i).as_dense_vector()

    #print pname
    #print list(an.round(6))
    #print list(fd.round(6))
    #print
    assert approx_equal(an, fd, eps=1e-5)

  print "OK"
  return
def get_dials_matrix(experiments_json):
    from dxtbx.model.experiment.experiment_list import ExperimentListFactory

    experiments = ExperimentListFactory.from_json_file(experiments_json)
    return experiments[0].crystal.get_A()
Exemple #56
0
#     run_test_single(imageset, 10000000),
#     run_test_single(imageset, 50000000),
#     run_test_single(imageset, 100000000),
#     run_test_single(imageset, 250000000)
#   ]

#   outfile = open("profile.txt", "w")
#   for r in results:
#     print >>outfile, r[0], r[1]


if __name__ == '__main__':
  import sys
  from dxtbx.model.experiment.experiment_list import ExperimentListFactory

  exlist = ExperimentListFactory.from_json_file(sys.argv[1])

  if len(sys.argv) > 2:
    imageset = exlist[0].imageset[0:int(sys.argv[2])]
  else:
    imageset = exlist[0].imageset

  print len(imageset)

  # run_test(imageset)

  from time import time
  st = time()
  n = int(len(imageset) / 4)
  for i in range(0, n):
    image = imageset[i]
    from os.path import join

    # path = '/home/upc86896/Projects/cctbx/sources/dials_regression/centroid_test_data'
    path = "/home/upc86896/Data/Data/i04-BAG-training/dials_processed/"

    experiment_list_filename = join(path, "experiments.json")

    if len(sys.argv) > 1:
        nproc = int(sys.argv[1])
    else:
        nproc = 1

    print("Reading Experiments")
    from math import pi

    experiments = ExperimentListFactory.from_json_file(
        experiment_list_filename)

    profile_model = ProfileModelList()
    profile_model.append(
        ProfileModel(n_sigma=3,
                     sigma_b=0.024 * pi / 180,
                     sigma_m=0.044 * pi / 180))

    print("Predicting Reflections")
    rlist = flex.reflection_table.from_predictions(experiments[0])
    rlist["id"] = flex.int(len(rlist), 0)
    rlist.compute_bbox(experiments, profile_model)
    rlist.compute_zeta_multi(experiments)
    rlist.compute_d(experiments)
    print("")
def run():
  from dxtbx.model.experiment.experiment_list import ExperimentListFactory
  from os.path import join
  import libtbx.load_env
  try:
    dials_regression = libtbx.env.dist_path('dials_regression')
  except KeyError, e:
    print 'FAIL: dials_regression not configured'
    exit(0)
  path = join(dials_regression, "nexus_test_data", "shared_models")
  filename_list = [
    'single',
    'multiple_unrelated',
    'multi_crystal',
    'two_colour',
    'multiple_sweeps',
    'stills'
  ]
  for filename in filename_list:
    filename_in = join(path, "%s.json" % filename)
    filename_out = "%s.nxs" % filename
    experiments = ExperimentListFactory.from_json_file(filename_in)
    run_single(experiments, filename_out)

if __name__ == '__main__':
  from dials.test import cd_auto
  with cd_auto(__file__):
    test_polarization_conversion()
    run()
Exemple #59
0


if __name__ == '__main__':

  from dials.array_family import flex
  from dxtbx.model.experiment.experiment_list import ExperimentListFactory
  import os.path
  path = "/home/upc86896/Projects/cctbx/sources/dials_regression/centroid_test_data"

  rlist_filename = os.path.join(path, "integrated.pickle")
  exlist_filename = os.path.join(path, "experiments.json")

  rlist = flex.reflection_table.from_pickle(rlist_filename)
  exlist = ExperimentListFactory.from_json_file(exlist_filename)

  panel = rlist['panel']
  bbox = rlist['bbox']

  rlist['shoebox'] = flex.shoebox(panel, bbox)
  rlist['shoebox'].allocate()

  rlist.fill_shoeboxes(exlist[0].imageset)