Esempio n. 1
0
    def reprocess(self):
        """Main routine, chain together last few steps of processing i.e.
        pointgroup, scale and merge."""

        write("Running on: %s" % str(os.getenv("HOSTNAME")).split(".")[0])

        # check input frame limits

        start, end = map(int, self._xds_inp["DATA_RANGE"].split())
        osc = float(self._xds_inp["OSCILLATION_RANGE"])
        osc_start = float(self._xds_inp["STARTING_ANGLE"])

        if self._first_image is not None:
            if start < self._first_image:
                osc_start += osc * (self._first_image - start)
                start = self._first_image
                self._xds_inp["STARTING_ANGLE"] = str(osc_start)
                self._xds_inp["STARTING_FRAME"] = str(start)

        if self._last_image is not None:
            end = min(end, self._last_image)

        self._xds_inp["DATA_RANGE"] = "%s %s" % (start, end)

        step_time = time.time()

        write("Processing images: %d -> %d" % (start, end))

        osc_end = osc_start + (end - start + 1) * osc
        write("Rotation range: %.2f -> %.2f" % (osc_start, osc_end))

        template = self._xds_inp["NAME_TEMPLATE_OF_DATA_FRAMES"]

        write("Template: %s" % os.path.split(template)[-1].replace("?", "#"))
        write("Wavelength: %.5f" % float(self._xds_inp["X-RAY_WAVELENGTH"]))
        write("Working in: %s" % os.getcwd())

        # just for information for the user, print all options for indexing
        # FIXME should be able to run the same from CORRECT.LP which would
        # work better....

        from fast_dp.xds_reader import read_xds_idxref_lp
        from fast_dp.cell_spacegroup import spacegroup_to_lattice

        results = read_xds_idxref_lp("IDXREF.LP")

        write("For reference, all indexing results:")
        write("%3s %6s %6s %6s %6s %6s %6s" %
              ("Lattice", "a", "b", "c", "alpha", "beta", "gamma"))

        for r in sorted((r for r in results if isinstance(r, int)),
                        reverse=True):
            cell = results[r][1]
            write("%7s %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f" % (
                spacegroup_to_lattice(r),
                cell[0],
                cell[1],
                cell[2],
                cell[3],
                cell[4],
                cell[5],
            ))

        try:
            metadata = copy.deepcopy(self._xds_inp)

            cell, sg_num, resol = decide_pointgroup(
                self._p1_unit_cell,
                metadata,
                input_spacegroup=self._input_spacegroup)
            self._unit_cell = cell
            self._space_group_number = sg_num

            if not self._resolution_high:
                self._resolution_high = resol

        except RuntimeError:
            write("Pointgroup determination failed")
            raise

        try:
            if self._params.get("atom", None):
                self._xds_inp["FRIEDEL'S_LAW"] = "FALSE"
            else:
                self._xds_inp["FRIEDEL'S_LAW"] = "TRUE"
            self._unit_cell, self._space_group, self._nref, beam_pixels = scale(
                self._unit_cell,
                self._xds_inp,
                self._space_group_number,
                self._resolution_high,
            )
            self._refined_beam = (
                beam_pixels[1] * float(self._xds_inp["QY"]),
                beam_pixels[0] * float(self._xds_inp["QX"]),
            )

        except RuntimeError:
            write("Scaling failed")
            raise

        try:
            self._scaling_statistics = merge(hklout="fast_rdp.mtz",
                                             aimless_log="aimless_rerun.log")
        except RuntimeError as e:
            write("Merging failed")
            raise

        write("Merging point group: %s" % self._space_group)
        write("Unit cell: %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f" %
              self._unit_cell)

        duration = time.time() - step_time
        write("Reprocessing took %s (%d s) [%d reflections]" % (
            time.strftime("%Hh %Mm %Ss", time.gmtime(duration)),
            duration,
            self._nref,
        ))

        # write out json and xml
        for func, filename in [
            (fast_dp.output.write_json, "fast_rdp.json"),
            (fast_dp.output.write_ispyb_xml, "fast_rdp.xml"),
        ]:
            func(
                self._commandline,
                self._space_group,
                self._unit_cell,
                self._scaling_statistics,
                self._start_image,
                self._refined_beam,
                filename=filename,
            )
Esempio n. 2
0
def main():
    """Main routine for fast_rdp."""

    from optparse import OptionParser

    commandline = " ".join(sys.argv)

    parser = OptionParser()

    parser.add_option("-a", "--atom", dest="atom", help="Atom type (e.g. Se)")

    parser.add_option(
        "-c",
        "--cell",
        dest="cell",
        help="Cell constants for processing, needs spacegroup",
    )
    parser.add_option(
        "-s",
        "--spacegroup",
        dest="spacegroup",
        help="Spacegroup for scaling and merging",
    )

    parser.add_option("-1",
                      "--first-image",
                      dest="first_image",
                      help="First image for processing")
    parser.add_option("-N",
                      "--last-image",
                      dest="last_image",
                      help="Last image for processing")

    parser.add_option("-r",
                      "--resolution-high",
                      dest="resolution_high",
                      help="High resolution limit")
    parser.add_option("-R",
                      "--resolution-low",
                      dest="resolution_low",
                      help="Low resolution limit")

    parser.add_option(
        "--version",
        dest="version",
        action="store_true",
        default=False,
        help="Print fast_dp version",
    )

    (options, args) = parser.parse_args()

    if options.version:
        print("Fast_RDP version %s" % fast_dp.__version__)
        sys.exit(0)

    # if arg given then assume that this is a directory with a fast_dp
    # job it in, but where $user does not have access to write - so first
    # copy the files needed across

    if len(args) == 1:
        if not os.path.isdir(args[0]):
            raise RuntimeError("in this mode, provide /path/to/fast_dp/dir")
        from_dir = args[0]
        for filename in os.listdir(from_dir):
            if os.path.isdir(os.path.join(from_dir, filename)):
                continue
            import shutil

            shutil.copyfile(os.path.join(from_dir, filename),
                            os.path.join(os.getcwd(), filename))
    else:
        from_dir = None

    try:
        write("Fast_RDP version %s" % fast_dp.__version__)
        fast_rdp = FastRDP()
        fast_rdp._commandline = commandline
        write("Working in: %s" % os.getcwd())
        if from_dir:
            write("Working from: %s" % from_dir)

        if options.atom:
            fast_rdp.set_atom(options.atom)

        if options.first_image:
            first_image = int(options.first_image)
            fast_rdp.set_first_image(first_image)

        if options.last_image:
            last_image = int(options.last_image)
            fast_rdp.set_last_image(last_image)

        if options.resolution_low:
            fast_rdp.set_resolution_low(float(options.resolution_low))

        if options.resolution_high:
            fast_rdp.set_resolution_high(float(options.resolution_high))

        # must input spacegroup first as unpacking of the unit cell
        # will depend on the centering operation...

        if options.spacegroup:
            try:
                spacegroup = check_spacegroup_name(options.spacegroup)
                fast_rdp.set_input_spacegroup(spacegroup)
                write("Set spacegroup: %s" % spacegroup)
            except RuntimeError as e:
                write("Spacegroup %s not recognised: ignoring" %
                      options.spacegroup)

        if options.cell:
            assert options.spacegroup
            cell = check_split_cell(options.cell)
            write("Set cell: %.2f %.2f %.2f %.2f %.2f %.2f" % cell)
            fast_rdp.set_input_cell(cell)

        fast_rdp.reprocess()

    except Exception as e:
        with open("fast_rdp.error", "w") as fh:
            traceback.print_exc(file=fh)
        write("Fast RDP error: %s" % str(e))
        sys.exit(1)
Esempio n. 3
0
def autoindex(xds_inp, input_cell=None):
    """Perform the autoindexing, using metatdata, get a list of possible
    lattices and record / return the triclinic cell constants (get these from
    XPARM.XDS)."""

    assert xds_inp

    xds_inp = add_spot_range(xds_inp)

    with open("AUTOINDEX.INP", "w") as fout:
        for k in sorted(xds_inp):
            if "SEGMENT" in k:
                continue
            v = xds_inp[k]
            if isinstance(v, list):
                for _v in v:
                    fout.write("%s=%s\n" % (k, _v))
            else:
                fout.write("%s=%s\n" % (k, v))

        fout.write("%s\n" % segment_text(xds_inp))

        if input_cell:
            fout.write("SPACE_GROUP_NUMBER=1\n")
            fout.write("UNIT_CELL_CONSTANTS=%f %f %f %f %f %f\n" %
                       tuple(input_cell))

        fout.write("JOB=XYCORR INIT COLSPOT IDXREF\n")
        fout.write("REFINE(IDXREF)=CELL AXIS ORIENTATION POSITION BEAM\n")
        fout.write("MAXIMUM_ERROR_OF_SPOT_POSITION= 2.0\n")
        fout.write("MINIMUM_FRACTION_OF_INDEXED_SPOTS= 0.5\n")

    shutil.copyfile("AUTOINDEX.INP", "XDS.INP")

    log = run_job("xds_par")

    with open("autoindex.log", "w") as fout:
        fout.write("".join(log))

    # sequentially check for errors... XYCORR INIT COLSPOT IDXREF

    for step in ["XYCORR", "INIT", "COLSPOT", "IDXREF"]:
        lastrecord = open("%s.LP" % step).readlines()[-1]
        if "!!! ERROR !!!" in lastrecord:
            raise RuntimeError(
                "error in %s: %s" %
                (step, lastrecord.replace("!!! ERROR !!!", "").strip()))

    results = read_xds_idxref_lp("IDXREF.LP")

    # FIXME if input cell was given, verify that this is an allowed
    # permutation. If it was not, raise a RuntimeError. This remains to be
    # fixed

    write("All autoindexing results:")
    write("%3s %6s %6s %6s %6s %6s %6s" %
          ("Lattice", "a", "b", "c", "alpha", "beta", "gamma"))

    for r in sorted((r for r in results if isinstance(r, int)), reverse=True):
        cell = results[r][1]
        write("%7s %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f" % (
            spacegroup_to_lattice(r),
            cell[0],
            cell[1],
            cell[2],
            cell[3],
            cell[4],
            cell[5],
        ))

    # should probably print this for debugging

    try:
        return results[1][1]
    except Exception:
        raise RuntimeError("getting P1 cell for autoindex")
Esempio n. 4
0
def main():
    """Main routine for fast_dp."""

    commandline = " ".join(sys.argv)

    parser = OptionParser(usage="fast_dp [options] imagefile")

    parser.add_option("-?", action="help", help=SUPPRESS_HELP)

    parser.add_option("-b",
                      "--beam",
                      dest="beam",
                      help="Beam centre: x, y (mm)")

    parser.add_option("-d",
                      "--distance",
                      dest="distance",
                      help="Detector distance: d (mm)")

    parser.add_option("-a", "--atom", dest="atom", help="Atom type (e.g. Se)")

    parser.add_option(
        "-j",
        "--number-of-jobs",
        dest="number_of_jobs",
        help="Number of jobs for integration",
    )
    parser.add_option(
        "-k",
        "--number-of-cores",
        dest="number_of_cores",
        help="Number of cores for integration",
    )
    parser.add_option(
        "-J",
        "--maximum-number-of-jobs",
        dest="maximum_number_of_jobs",
        help="Maximum number of jobs for integration",
    )
    parser.add_option(
        "-e",
        "--execution-hosts",
        dest="execution_hosts",
        help="names for execution hosts for forkxds",
    )

    parser.add_option(
        "-c",
        "--cell",
        dest="cell",
        help="Cell constants for processing, needs spacegroup",
    )
    parser.add_option(
        "-s",
        "--spacegroup",
        dest="spacegroup",
        help="Spacegroup for scaling and merging",
    )

    parser.add_option("-1",
                      "--first-image",
                      dest="first_image",
                      help="First image for processing")
    parser.add_option("-N",
                      "--last-image",
                      dest="last_image",
                      help="Last image for processing")

    parser.add_option("-r",
                      "--resolution-high",
                      dest="resolution_high",
                      help="High resolution limit")
    parser.add_option("-R",
                      "--resolution-low",
                      dest="resolution_low",
                      help="Low resolution limit")

    parser.add_option(
        "-l",
        "--lib-name",
        dest="lib_name",
        help="HDF5 reader library (i.e. neggia etc.)",
    )

    parser.add_option(
        "--version",
        dest="version",
        action="store_true",
        default=False,
        help="Print fast_dp version",
    )

    (options, args) = parser.parse_args()

    if options.version:
        print("Fast_DP version %s" % fast_dp.__version__)
        sys.exit(0)

    if len(args) != 1:
        parser.error("You must point to one image of the dataset to process")

    image = args[0]

    xia2_format = re.match(r"^(.*):(\d+):(\d+)$", image)
    if xia2_format:
        # Image can be given in xia2-style format, ie.
        #   set_of_images_00001.cbf:1:5000
        # to select images 1 to 5000. Resolve any conflicts
        # with -1/-N in favour of the explicit arguments.
        image = xia2_format.group(1)
        if not options.first_image:
            options.first_image = xia2_format.group(2)
        if not options.last_image:
            options.last_image = xia2_format.group(3)

    if options.lib_name:
        fast_dp.image_readers.set_lib_name(options.lib_name)

    try:
        write("Fast_DP version %s" % fast_dp.__version__)
        finst = FastDP()
        finst._commandline = commandline
        write("Starting image: %s" % image)
        missing = finst.set_start_image(image)
        if options.beam:
            x, y = tuple(map(float, options.beam.split(",")))
            finst.set_beam((x, y))

        if options.distance:
            finst.set_distance(float(options.distance))

        if options.atom:
            finst.set_atom(options.atom)

        if options.maximum_number_of_jobs:
            finst.set_max_n_jobs(int(options.maximum_number_of_jobs))

        if options.execution_hosts:
            finst.set_execution_hosts(options.execution_hosts.split(","))
            write("Execution hosts: %s" %
                  " ".join(finst.get_execution_hosts()))

        if options.number_of_jobs:
            if options.maximum_number_of_jobs:
                n_jobs = min(int(options.number_of_jobs),
                             int(options.maximum_number_of_jobs))
            else:
                n_jobs = int(options.number_of_jobs)
            finst.set_n_jobs(n_jobs)

        if options.number_of_cores:
            n_cores = int(options.number_of_cores)
            finst.set_n_cores(n_cores)

        if options.first_image:
            first_image = int(options.first_image)
            missing = [m for m in missing if m >= first_image]
            finst.set_first_image(first_image)

        if options.last_image:
            last_image = int(options.last_image)
            missing = [m for m in missing if m <= last_image]
            finst.set_last_image(last_image)

        if missing:
            raise RuntimeError("images missing: %s" %
                               " ".join(map(str, missing)))

        if options.resolution_low:
            finst.set_resolution_low(float(options.resolution_low))

        if options.resolution_high:
            finst.set_resolution_high(float(options.resolution_high))

        # must input spacegroup first as unpacking of the unit cell
        # will depend on the centering operation...

        if options.spacegroup:
            try:
                spacegroup = check_spacegroup_name(options.spacegroup)
                finst.set_input_spacegroup(spacegroup)
                write("Set spacegroup: %s" % spacegroup)
            except RuntimeError:
                write("Spacegroup %s not recognised: ignoring" %
                      options.spacegroup)

        if options.cell:
            assert options.spacegroup
            cell = check_split_cell(options.cell)
            write("Set cell: %.2f %.2f %.2f %.2f %.2f %.2f" % cell)
            finst.set_input_cell(cell)

        finst.process()

    except Exception as e:
        with open("fast_dp.error", "w") as fh:
            traceback.print_exc(file=fh)
        write("Fast DP error: %s" % str(e))
        sys.exit(1)

    finally:
        json_stuff = {}
        for prop in dir(finst):
            ignore = []
            if not prop.startswith("_") or prop.startswith("__"):
                continue
            if prop in ignore:
                continue
            json_stuff[prop] = getattr(finst, prop)
        with open("fast_dp.state", "w") as fh:
            json.dump(json_stuff, fh)
Esempio n. 5
0
    def process(self):
        """Main routine, chain together all of the steps imported from
        autoindex, integrate, pointgroup, scale and merge."""

        write("Running on: %s" % str(os.getenv("HOSTNAME")).split(".")[0])

        # check input frame limits

        start, end = map(int, self._xds_inp["DATA_RANGE"].split())
        osc = float(self._xds_inp["OSCILLATION_RANGE"])
        osc_start = float(self._xds_inp["STARTING_ANGLE"])

        if osc == 0.0:
            raise RuntimeError("grid scan data")

        if self._first_image is not None:
            if start < self._first_image:
                osc_start += osc * (self._first_image - start)
                start = self._first_image
                self._xds_inp["STARTING_ANGLE"] = str(osc_start)
                self._xds_inp["STARTING_FRAME"] = str(start)

        if self._last_image is not None:
            end = min(end, self._last_image)

        self._xds_inp["DATA_RANGE"] = "%s %s" % (start, end)

        # first if the number of jobs was set to 0, decide something sensible.
        # this should be jobs of a minimum of 5 degrees, 10 frames.

        wedge = max(10, int(round(5.0 / osc)))
        wedge = min(wedge, end - start)

        self._xds_inp["BACKGROUND_RANGE"] = "%d %d" % (start, start + wedge)

        if self._n_jobs == 0:
            frames = end - start + 1
            n_jobs = int(round(frames / wedge))
            if self._max_n_jobs > 0:
                if n_jobs > self._max_n_jobs:
                    n_jobs = self._max_n_jobs
            self.set_n_jobs(n_jobs)

        write("Number of jobs: %d" % self._n_jobs)
        write("Number of cores: %d" % self._n_cores)

        step_time = time.time()

        write("Processing images: %d -> %d" % (start, end))
        osc_end = osc_start + (end - start + 1) * osc
        write("Rotation range: %.2f -> %.2f" % (osc_start, osc_end))

        template = self._xds_inp["NAME_TEMPLATE_OF_DATA_FRAMES"]

        write("Template: %s" % os.path.split(template)[-1].replace("?", "#"))
        write("Wavelength: %.5f" % float(self._xds_inp["X-RAY_WAVELENGTH"]))
        write("Working in: %s" % os.getcwd())

        try:
            self._p1_unit_cell = autoindex(self._xds_inp,
                                           input_cell=self._input_cell_p1)
        except Exception:
            write("Autoindexing failed")
            raise

        try:
            mosaics = integrate(
                self._xds_inp,
                self._p1_unit_cell,
                self._resolution_low,
                self._n_jobs,
                self._n_cores,
            )
            write("Mosaic spread: %.2f < %.2f < %.2f" % tuple(mosaics))
        except RuntimeError:
            write("Integration failed")
            raise

        try:
            metadata = copy.deepcopy(self._xds_inp)

            cell, sg_num, resol = decide_pointgroup(
                self._p1_unit_cell,
                metadata,
                input_spacegroup=self._input_spacegroup)
            self._unit_cell = cell
            self._space_group_number = sg_num

            if not self._resolution_high:
                self._resolution_high = resol
        except RuntimeError:
            write("Pointgroup determination failed")
            raise

        try:
            if self._params.get("atom", None):
                self._xds_inp["FRIEDEL'S_LAW"] = "FALSE"
            else:
                self._xds_inp["FRIEDEL'S_LAW"] = "TRUE"
            self._unit_cell, self._space_group, self._nref, beam_pixels = scale(
                self._unit_cell,
                self._xds_inp,
                self._space_group_number,
                self._resolution_high,
            )
            self._refined_beam = (
                beam_pixels[1] * float(self._xds_inp["QY"]),
                beam_pixels[0] * float(self._xds_inp["QX"]),
            )
        except RuntimeError:
            write("Scaling failed")
            raise

        try:
            self._scaling_statistics = merge()
        except RuntimeError:
            write("Merging failed")
            raise

        write("Merging point group: %s" % self._space_group)
        write("Unit cell: %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f" %
              self._unit_cell)

        duration = time.time() - step_time
        write("Processing took %s (%d s) [%d reflections]" % (
            time.strftime("%Hh %Mm %Ss", time.gmtime(duration)),
            duration,
            self._nref,
        ))
        write("RPS: %.1f" % (float(self._nref) / duration))

        # write out json and xml
        for func in (fast_dp.output.write_json,
                     fast_dp.output.write_ispyb_xml):
            func(
                self._commandline,
                self._space_group,
                self._unit_cell,
                self._scaling_statistics,
                self._start_image,
                self._refined_beam,
            )
Esempio n. 6
0
def parse_aimless_log(log):
    for record in log:
        if "Low resolution limit  " in record:
            lres = tuple(map(float, record.split()[-3:]))
        elif "High resolution limit  " in record:
            hres = tuple(map(float, record.split()[-3:]))
        elif "Rmerge  (within I+/I-)  " in record:
            rmerge = tuple(map(float, record.split()[-3:]))
        elif "Rmeas (all I+ & I-) " in record:
            rmeas = tuple(map(float, record.split()[-3:]))
        elif "Mean((I)/sd(I))  " in record:
            isigma = tuple(map(float, record.split()[-3:]))
        elif "Completeness  " in record:
            comp = tuple(map(float, record.split()[-3:]))
        elif "Multiplicity  " in record:
            mult = tuple(map(float, record.split()[-3:]))
        elif "Anomalous completeness  " in record:
            acomp = tuple(map(float, record.split()[-3:]))
        elif "Anomalous multiplicity  " in record:
            amult = tuple(map(float, record.split()[-3:]))
        elif "Mid-Slope of Anom Normal Probability  " in record:
            slope = float(record.split()[-3])
        elif "Total number of observations" in record:
            nref = tuple(map(int, record.split()[-3:]))
        elif "Total number unique" in record:
            nuniq = tuple(map(int, record.split()[-3:]))
        elif "DelAnom correlation between half-sets" in record:
            ccanom = tuple(map(float, record.split()[-3:]))
        elif "Mn(I) half-set correlation CC(1/2)" in record:
            cchalf = tuple(map(float, record.split()[-3:]))

    scaling_statistics = {
        shell: {
            "anom_completeness": acomp[index],
            "anom_multiplicity": amult[index],
            "cc_anom": ccanom[index],
            "cc_half": cchalf[index],
            "completeness": comp[index],
            "mean_i_sig_i": isigma[index],
            "multiplicity": mult[index],
            "n_tot_obs": nref[index],
            "n_tot_unique_obs": nuniq[index],
            "r_meas_all_iplusi_minus": rmeas[index],
            "r_merge": rmerge[index],
            "res_lim_high": hres[index],
            "res_lim_low": lres[index],
        }
        for index, shell in enumerate(("overall", "innerShell", "outerShell"))
    }

    # compute some additional results
    df_f, di_sigdi = anomalous_signals("fast_dp.mtz")

    # print out the results...
    write(80 * "-")

    write("%20s " % "Low resolution" + "%6.2f %6.2f %6.2f" % lres)
    write("%20s " % "High resolution" + "%6.2f %6.2f %6.2f" % hres)
    write("%20s " % "Rmerge" + "%6.3f %6.3f %6.3f" % rmerge)
    write("%20s " % "I/sigma" + "%6.2f %6.2f %6.2f" % isigma)
    write("%20s " % "Completeness" + "%6.1f %6.1f %6.1f" % comp)
    write("%20s " % "Multiplicity" + "%6.1f %6.1f %6.1f" % mult)
    write("%20s " % "CC 1/2" + "%6.3f %6.3f %6.3f" % cchalf)
    write("%20s " % "Anom. Completeness" + "%6.1f %6.1f %6.1f" % acomp)
    write("%20s " % "Anom. Multiplicity" + "%6.1f %6.1f %6.1f" % amult)
    write("%20s " % "Anom. Correlation" + "%6.3f %6.3f %6.3f" % ccanom)
    write("%20s " % "Nrefl" + "%6d %6d %6d" % nref)
    write("%20s " % "Nunique" + "%6d %6d %6d" % nuniq)
    write("%20s " % "Mid-slope" + "%6.3f" % slope)
    write("%20s " % "dF/F" + "%6.3f" % df_f)
    write("%20s " % "dI/sig(dI)" + "%6.3f" % di_sigdi)

    write(80 * "-")

    return scaling_statistics