示例#1
0
def run_dials_sequence(filename_template, prefix, nr_range, wdir, known_xs,
                       overrides, scan_varying, nproc):
    log_out = open(os.path.join(wdir, "dials_sequence.log"), "w")
    pointless_log = os.path.join(wdir, "pointless.log")

    # Prepare
    img_files = find_existing_files_in_template(
        filename_template,
        nr_range[0],
        nr_range[1],
        datadir=os.path.dirname(prefix),
        check_compressed=True)
    if len(img_files) == 0:
        mylog.error("No files found for %s %s" % (filename_template, nr_range))
        return

    nproc_str = "nproc=%d" % nproc

    log_out.write("Importing %s range=%s\n" % (img_files, nr_range))
    log_out.write(" Overrides: %s\n" % overrides)
    log_out.flush()

    override_str = ""  # TODO support other stuff.. (wavelength, distance, osc_range, rotation_axis,..)
    if "orgx" in overrides and "orgy" in overrides:
        override_str += "slow_fast_beam_centre=%.2f,%.2f " % (
            overrides["orgy"], overrides["orgx"])

    if len(img_files) == 1 and img_files[0].endswith(".h5"):
        util.call('dials.import "%s" %s image_range=%d,%d' %
                  (img_files[0], override_str, nr_range[0], nr_range[1]),
                  wdir=wdir,
                  stdout=log_out,
                  expects_out=[os.path.join(wdir, "datablock.json")])
    else:
        util.call('dials.import %s template="%s" image_range=%d,%d' %
                  (override_str, filename_template.replace(
                      "?", "#"), nr_range[0], nr_range[1]),
                  wdir=wdir,
                  stdout=log_out,
                  expects_out=[os.path.join(wdir, "datablock.json")])

    util.call(
        "dials.find_spots datablock.json filter.d_max=30 %s" %
        nproc_str,  # global_threshold=200
        wdir=wdir,
        stdout=log_out,
        expects_out=[os.path.join(wdir, "strong.pickle")])

    util.call("dials.export strong.pickle format=xds xds.directory=.",
              wdir=wdir,
              stdout=log_out)

    index_ok = False
    for index_meth in ("fft3d", "fft1d", "real_space_grid_search"):
        for index_assi in ("local", "simple"):
            if index_ok: break
            cmd = "dials.index datablock.json strong.pickle verbosity=3 "
            cmd += "indexing.method=%s index_assignment.method=%s " % (
                index_meth, index_assi)
            if known_xs is not None:  # not in (known.space_group, known.unit_cell):
                cmd += "unit_cell=%s space_group=%d " % (
                    ",".join(
                        map(lambda x: "%.3f" % x,
                            known_xs.unit_cell().parameters())),
                    known_xs.space_group().type().number())
            elif index_meth == "real_space_grid_search":
                continue

            log_out.write(
                "Trying indexing.method=%s index_assignment.method=%s\n" %
                (index_meth, index_assi))
            log_out.flush()
            util.call(cmd, wdir=wdir, stdout=log_out)
            if os.path.isfile(os.path.join(wdir, "experiments.json")):
                index_ok = True
            else:
                for f in ("dials.index.log", "dials.index.debug.log"):
                    util.rotate_file(os.path.join(wdir, f))

    if not index_ok:
        return

    files_for_integration = "experiments.json indexed.pickle"

    if scan_varying:
        util.call(
            "dials.refine experiments.json indexed.pickle scan_varying=true",
            wdir=wdir,
            stdout=log_out)
        if os.path.isfile(os.path.join(wdir, "refined.pickle")):
            files_for_integration = "refined_experiments.json refined.pickle"
        else:
            log_out.write("dials.refine failed. using intedexed results.\n")

    util.call("dials.integrate %s min_spots.per_degree=10 %s" %
              (files_for_integration, nproc_str),
              wdir=wdir,
              stdout=log_out)
    util.call(
        "dials.export integrated.pickle integrated_experiments.json mtz.hklout=integrated.mtz",
        wdir=wdir,
        stdout=log_out)
    util.call("pointless integrated.mtz hklout pointless.mtz",
              wdir=wdir,
              stdin="SETTING SYMMETRY-BASED\ntolerance 10\n",
              stdout=open(pointless_log, "w"))
    util.call(
        "dials.export integrated_experiments.json integrated.pickle format=xds_ascii xds_ascii.hklout=DIALS.HKL",
        wdir=wdir,
        stdout=log_out)
    util.call("aimless hklin pointless.mtz hklout aimless.mtz",
              wdir=wdir,
              stdin="output UNMERGED\n",
              stdout=open(os.path.join(wdir, "aimless.log"), "w"))

    #job_str += "touch dials_job_finished\n"

    ret = calc_merging_stats(os.path.join(wdir, "aimless_unmerged.mtz"))
    ret["symm"] = get_most_possible_symmetry(wdir)

    pickle.dump(ret, open(os.path.join(wdir, "kamo_dials.pkl"), "w"), -1)
示例#2
0
def run(params):#data_dir, wdir, use_normalized=False):
    if (params.space_group, params.cell).count(None) == 1:
        print "If you want to specify cell or symmetry, give both."
        return

    if params.cell is not None and len(params.cell) > 6:
        print "Too many parameters for unit cell:", params.cell
        return

    xs = None if params.space_group is None else create_crystal_symmetry(params.space_group, params.cell)

    logobjects = pickle.load(open(params.pklin))

    # set topdir
    topdir = os.path.dirname(os.path.commonprefix(map(lambda x: os.path.dirname(x[0]), logobjects)))

    for log, logobj, spot_stats in logobjects:
        relpath = os.path.relpath(log, topdir)
        if relpath.startswith(os.sep) or relpath.startswith(".."):
            print "Outside:", log
            continue

        if len(logobj.jobs) > 1:
            print "##############################################################"
            print relpath
            print "Why %d jobs!?" % len(logobj.jobs)
            print "May be overwritten? Additional images?? Need to be careful!!" 
            print "Currently, I just pick up the last information."
            print "##############################################################"

        job, spotstat = logobj.jobs[-1], spot_stats[-1]
        range_inlog = job.get_frame_num_range()
        if None in range_inlog: dirname = "xds_%s" % (job.prefix)
        else: dirname = "xds_%s_%.4d-%.4d" % ((job.prefix,)+job.get_frame_num_range())
        wd = os.path.join(params.workdir, os.path.dirname(relpath), dirname)

        if os.path.isfile(os.path.join(wd, "XDS.INP")) and params.dont_overwrite:
            continue

        print wd, len(job.images)
        data_range = 1, job.n_images
        if params.min_spots_for_ends is not None:
            data_range = decide_data_range_using_nspots(spotstat, params.min_spots_for_ends)
            if None in data_range or data_range[1]-data_range[0] < 3:
                print "  Oh no!! not useful data!"
                if None in data_range:
                    print "  No images contains more than %d spots." % params.min_spots_for_ends
                else:
                    print "  Only %d images contains more than %d spots." % (data_range[1]-data_range[0]+1,
                                                                             params.min_spots_for_ends)
                continue
            print " Data range:", data_range

        # How about okkake sekibun?
        img_files = find_existing_files_in_template(job.filename, data_range[0], data_range[1],
                                                    datadir=os.path.dirname(log), check_compressed=True)

        os.makedirs(wd)
        xdsinp_str = xds_inp.generate_xds_inp(img_files=img_files,
                                              inp_dir=os.path.abspath(wd),
                                              reverse_phi=True, anomalous=params.anomalous,
                                              spot_range=params.frames_for_index, minimum=False,
                                              crystal_symmetry=xs,
                                              integrate_nimages=params.integrate_nimages)
        ofs = open(os.path.join(wd, "XDS.INP"), "w")
        ofs.write(xdsinp_str)

        if spotstat != []:
            make_shikalog(spotstat, open(os.path.join(wd, "shika.log"), "w"))
示例#3
0
def run(params, topdirs):
    import sys

    LogClass = dict(sp8=BssJobLog, pf=None)

    out_root = os.path.dirname(params.pklout)
    out = multi_out()
    out.register("log",
                 open(os.path.join(out_root, "find_datasets.log"), "w"),
                 atexit_send_to=None)
    out.register("stdout", sys.stdout)

    if params.find_spots:
        shikalog = open(os.path.join(out_root, "shika.log"), "w")
        shikalog.write("prefix idx nspots\n")

    config_mgr = spot_finder_gui.ConfigManager(use_cuda=params.use_cuda)

    logobjects = []

    for topdir in topdirs:
        print >> out, "Looking into %s\n" % topdir
        for root, dirnames, filenames in os.walk(topdir, followlinks=True):
            logs = filter(lambda x: x.endswith(".log"), filenames)
            for log in logs:
                log = os.path.join(root, log)

                for logtype in params.logtype:
                    if 1:  #try:
                        try:
                            logobj = LogClass[logtype](log)
                        except:
                            print >> out, traceback.format_exc()
                            print >> out, "\nException raised when parsing %s\n" % log
                            raise

                        logobj.jobs = filter(lambda x: x.job_mode != "XAFS",
                                             logobj.jobs)
                        if len(logobj.jobs) > 0:
                            print >> out, "Found job log:", log
                            spots = []
                            for job in logobj.jobs:
                                if params.find_spots:
                                    spots.append(
                                        search_spots(
                                            os.path.join(
                                                root,
                                                os.path.basename(
                                                    job.filename)),
                                            job.n_images, config_mgr))
                                    for idx, f, stat in spots[-1]:
                                        if stat is None: continue
                                        n_spots = stat.spots.get_n_spots(
                                            "hi_pass_resolution_spots")
                                        shikalog.write(
                                            "%s %6d %4d\n" % (os.path.join(
                                                root,
                                                os.path.basename(
                                                    job.filename)), idx,
                                                              n_spots))
                                        shikalog.flush()
                                else:
                                    spots.append([])
                            logobjects.append([log, logobj, spots])
                    #except:
                    #    pass

    print >> out

    logobjects.sort(key=lambda x: x[0])

    for log, logobj, spots in logobjects:
        print log
        for job, spt in zip(logobj.jobs, spots):
            jobtype = "?"
            if job.advanced_centering == {} or test_gonio_coords_equal(
                    job.advanced_centering.get("centers", [])):
                jobtype = "Single  "
            elif job.advanced_centering.get("mode", "") == "vector_centering":
                jobtype = "Helical "
            elif job.advanced_centering.get("mode",
                                            "") == "multiple_centering":
                jobtype = "MultiCen"
            else:
                print >> out, "WARNING:: WHY REACH HERE?", job.advanced_centering

            osc_range = job.osc_end - job.osc_start
            #nfound = check_files_exist(job.filename, job.n_images, os.path.dirname(log))
            nfound = len(
                dataset.find_existing_files_in_template(
                    job.filename,
                    1,
                    job.n_images,
                    datadir=os.path.dirname(log),
                    check_compressed=True))

            print >> out, " %s osc_step=%6.3f osc_range=%5.1f found=%d/%d %s beam_size=%s" % (
                job.beamline, job.osc_step, osc_range, nfound, job.n_images,
                jobtype, "x".join(map(lambda x: "%.1f" % x, job.beam_size)))
            if params.find_spots:
                n_spots = [
                    stat.spots.get_n_spots("hi_pass_resolution_spots")
                    for idx, f, stat in spt if stat is not None
                ]
                gt20 = len(filter(lambda x: x >= 20, n_spots))
                print >> out, " spots < 5A: %d..%d (>=20 spots: %d frames)" % (
                    min(n_spots), max(n_spots), gt20)

        print >> out

    pickle.dump(logobjects, open(params.pklout, "w"), -1)
示例#4
0
def run(params, topdirs):
    import sys

    LogClass = dict(sp8=BssJobLog,
                    pf=None)

    out_root = os.path.dirname(params.pklout)
    out = multi_out()
    out.register("log", open(os.path.join(out_root, "find_datasets.log"), "w"), atexit_send_to=None)
    out.register("stdout", sys.stdout)

    if params.find_spots:
        shikalog = open(os.path.join(out_root, "shika.log"), "w")
        shikalog.write("prefix idx nspots\n")

    config_mgr = spot_finder_gui.ConfigManager(use_cuda=params.use_cuda)

    logobjects = []

    for topdir in topdirs:
        print >>out, "Looking into %s\n" % topdir
        for root, dirnames, filenames in os.walk(topdir, followlinks=True):
            logs = filter(lambda x: x.endswith(".log"), filenames)
            for log in logs:
                log = os.path.join(root, log)

                for logtype in params.logtype:
                    if 1:#try:
                        try:
                            logobj = LogClass[logtype](log)
                        except:
                            print >>out, traceback.format_exc()
                            print >>out, "\nException raised when parsing %s\n"%log
                            raise

                        logobj.jobs = filter(lambda x: x.job_mode != "XAFS", logobj.jobs)
                        if len(logobj.jobs) > 0:
                            print >>out, "Found job log:", log
                            spots = []
                            for job in logobj.jobs:
                                if params.find_spots:
                                    spots.append(search_spots(os.path.join(root, os.path.basename(job.filename)),
                                                              job.n_images, config_mgr))
                                    for idx, f, stat in spots[-1]:
                                        if stat is None: continue
                                        n_spots = stat.spots.get_n_spots("hi_pass_resolution_spots")
                                        shikalog.write("%s %6d %4d\n" % (os.path.join(root,
                                                                                      os.path.basename(job.filename)),
                                                                         idx, n_spots))
                                        shikalog.flush()
                                else:
                                    spots.append([])
                            logobjects.append([log, logobj, spots])
                    #except:
                    #    pass

    print >>out

    logobjects.sort(key=lambda x:x[0])

    for log, logobj, spots in logobjects:
        print log
        for job, spt in zip(logobj.jobs, spots):
            jobtype = "?"
            if job.advanced_centering == {} or test_gonio_coords_equal(job.advanced_centering.get("centers",[])):
                jobtype = "Single  " 
            elif job.advanced_centering.get("mode", "") == "vector_centering":
                jobtype = "Helical "
            elif job.advanced_centering.get("mode", "") == "multiple_centering":
                jobtype = "MultiCen"
            else:
                print >>out, "WARNING:: WHY REACH HERE?", job.advanced_centering
                
            osc_range = job.osc_end - job.osc_start
            #nfound = check_files_exist(job.filename, job.n_images, os.path.dirname(log))
            nfound = len(dataset.find_existing_files_in_template(job.filename, 1, job.n_images,
                                                                 datadir=os.path.dirname(log),
                                                                 check_compressed=True))

            print >>out, " %s osc_step=%6.3f osc_range=%5.1f found=%d/%d %s beam_size=%s" % (job.beamline, job.osc_step, osc_range,
                                                   nfound, job.n_images, jobtype,
                                                   "x".join(map(lambda x:"%.1f"%x,job.beam_size)))
            if params.find_spots:
                n_spots = [stat.spots.get_n_spots("hi_pass_resolution_spots") for idx, f, stat in spt if stat is not None]
                gt20 = len(filter(lambda x: x>=20, n_spots))
                print >>out, " spots < 5A: %d..%d (>=20 spots: %d frames)" % (min(n_spots), max(n_spots), gt20)

        print >>out

    pickle.dump(logobjects, open(params.pklout, "w"), -1)
示例#5
0
def run(params):  #data_dir, wdir, use_normalized=False):
    if (params.space_group, params.cell).count(None) == 1:
        print "If you want to specify cell or symmetry, give both."
        return

    if params.cell is not None and len(params.cell) > 6:
        print "Too many parameters for unit cell:", params.cell
        return

    xs = None if params.space_group is None else create_crystal_symmetry(
        params.space_group, params.cell)

    logobjects = pickle.load(open(params.pklin))

    # set topdir
    topdir = os.path.dirname(
        os.path.commonprefix(map(lambda x: os.path.dirname(x[0]), logobjects)))

    for log, logobj, spot_stats in logobjects:
        relpath = os.path.relpath(log, topdir)
        if relpath.startswith(os.sep) or relpath.startswith(".."):
            print "Outside:", log
            continue

        if len(logobj.jobs) > 1:
            print "##############################################################"
            print relpath
            print "Why %d jobs!?" % len(logobj.jobs)
            print "May be overwritten? Additional images?? Need to be careful!!"
            print "Currently, I just pick up the last information."
            print "##############################################################"

        job, spotstat = logobj.jobs[-1], spot_stats[-1]
        range_inlog = job.get_frame_num_range()
        if None in range_inlog: dirname = "xds_%s" % (job.prefix)
        else:
            dirname = "xds_%s_%.4d-%.4d" % (
                (job.prefix, ) + job.get_frame_num_range())
        wd = os.path.join(params.workdir, os.path.dirname(relpath), dirname)

        if os.path.isfile(os.path.join(wd,
                                       "XDS.INP")) and params.dont_overwrite:
            continue

        print wd, len(job.images)
        data_range = 1, job.n_images
        if params.min_spots_for_ends is not None:
            data_range = decide_data_range_using_nspots(
                spotstat, params.min_spots_for_ends)
            if None in data_range or data_range[1] - data_range[0] < 3:
                print "  Oh no!! not useful data!"
                if None in data_range:
                    print "  No images contains more than %d spots." % params.min_spots_for_ends
                else:
                    print "  Only %d images contains more than %d spots." % (
                        data_range[1] - data_range[0] + 1,
                        params.min_spots_for_ends)
                continue
            print " Data range:", data_range

        # How about okkake sekibun?
        img_files = find_existing_files_in_template(
            job.filename,
            data_range[0],
            data_range[1],
            datadir=os.path.dirname(log),
            check_compressed=True)

        os.makedirs(wd)
        xdsinp_str = xds_inp.generate_xds_inp(
            img_files=img_files,
            inp_dir=os.path.abspath(wd),
            reverse_phi=True,
            anomalous=params.anomalous,
            spot_range=params.frames_for_index,
            minimum=False,
            crystal_symmetry=xs,
            integrate_nimages=params.integrate_nimages)
        ofs = open(os.path.join(wd, "XDS.INP"), "w")
        ofs.write(xdsinp_str)

        if spotstat != []:
            make_shikalog(spotstat, open(os.path.join(wd, "shika.log"), "w"))