コード例 #1
0
def getEquipInt(tags, equip, hightag=201901):
    '''
    returns data as integer from detector for specified tags, hightag
    input:
        tags: tuple of integer values
        equip: equipment name
        hightag: high tag integer value 
    output: detector value accross tags as integer
    '''
    return dbpy.read_syncdatalist(equip, hightag, tags)
コード例 #2
0
def get_energies(tags, taghi, bl):
    if not tags: return []

    valid_tags = filter(lambda x: type(x) is int, tags)

    if valid_tags:
        ene = dbpy.read_syncdatalist("xfel_bl_%d_tc_spec_1/energy"%bl, taghi, tuple(valid_tags))
        assert len(ene) == len(valid_tags)
    else:
        ene = ()

    tag_ene = dict(zip(valid_tags, ene))

    ret = []
    for tag in tags:
        e = tag_ene.get(tag)
        try: e = float(e)
        except: e = float("nan")
        ret.append(e)
    return ret            
コード例 #3
0
def run(opts):
    assert opts.runid is not None
    assert opts.bl is not None

    # Beamline specific constants
    if opts.bl == 2:
        sensor_spec = "xfel_bl_2_tc_spec_1/energy"
        sensor_shutter = "xfel_bl_2_shutter_1_open_valid/status"
    elif opts.bl == 3:
        sensor_spec = "xfel_bl_3_tc_spec_1/energy"
        sensor_shutter = "xfel_bl_3_shutter_1_open_valid/status"
    else:
        error_status("BadBeamline")
        return -1

    # Get run info
    try:
        run_info = dbpy.read_runinfo(opts.bl, opts.runid)
    except:
        error_status("BadRunID")
        return -1

    high_tag = dbpy.read_hightagnumber(opts.bl, opts.runid)
    start_tag = run_info['start_tagnumber']
    end_tag = run_info['end_tagnumber']

    tag_list = numpy.array(dbpy.read_taglist_byrun(opts.bl, opts.runid))
    print "# Run %d: HighTag %d, Tags %d (inclusive) to %d (exclusive), thus %d tags" % (opts.runid, high_tag, start_tag, end_tag, len(tag_list))
    comment = dbpy.read_comment(opts.bl, opts.runid)
    print "# Comment: %s" % comment

    # Get shutter status and find images
    try:
        shutter = numpy.array(map(str2float, dbpy.read_syncdatalist(sensor_shutter, high_tag, tuple(tag_list))))
    except:
        print traceback.format_exc()
        error_status("NoShutterStatus")
        return -1

    # XXX
    valid_tags = tag_list[shutter==1] # [tag for tag, is_open in zip(tag_list, shutter) if is_open == 1]
    if 0:
        tag_offset = 3
        tag_list = tag_list[tag_offset:]
        valid_tags = tag_list[numpy.arange(1, len(tag_list)+1)%6==0]
        
    if valid_tags.size == 0:
        error_status("NoValidTags")
        return -1

    # Get PD values
    pd1_values, pd2_values, pd3_values = map(lambda y: map(lambda x: float("nan"), xrange(len(valid_tags))), xrange(3))
    if opts.pd1_sensor_name:
        pd1_values = numpy.array(map(str2float, dbpy.read_syncdatalist(opts.pd1_sensor_name, high_tag, tuple(valid_tags))))
    if opts.pd2_sensor_name:
        pd2_values = numpy.array(map(str2float, dbpy.read_syncdatalist(opts.pd2_sensor_name, high_tag, tuple(valid_tags))))
    if opts.pd3_sensor_name:
        pd3_values = numpy.array(map(str2float, dbpy.read_syncdatalist(opts.pd3_sensor_name, high_tag, tuple(valid_tags))))

    print "tag pd1 pd2 pd3"
    for tag, pd1, pd2, pd3 in zip(valid_tags, pd1_values, pd2_values, pd3_values):
        print tag, pd1, pd2, pd3

    for i in xrange(len(valid_tags)):
        bad = []
        if (opts.pd1_threshold != 0 and
            not (opts.pd1_threshold > 0 and opts.pd1_threshold <= pd1_values[i]) and
            not (opts.pd1_threshold < 0 and -opts.pd1_threshold > pd1_values[i])): bad.append(1)
        if (opts.pd2_threshold != 0 and
            not (opts.pd2_threshold > 0 and opts.pd2_threshold <= pd2_values[i]) and
            not (opts.pd2_threshold < 0 and -opts.pd2_threshold > pd2_values[i])): bad.append(2)
        if (opts.pd3_threshold != 0 and
            not (opts.pd3_threshold > 0 and opts.pd3_threshold <= pd3_values[i]) and
            not (opts.pd3_threshold < 0 and -opts.pd3_threshold > pd3_values[i])): bad.append(3)

        if bad:
            print "# Bad tag=%d BadPD=%s" %(valid_tags[i], bad)
コード例 #4
0
ファイル: cheetah_marccd.py プロジェクト: alfred-f-yu/cheetah
def run(opts):
    eltime_from = time.time()
    print("#\n#Configurations:")
    print("# runNumber (-r/--run):         %d" % opts.runid)
    print("# output H5 file (-o/--output): %s (default = run######.h5)" %
          opts.outputH5)
    print("# beamline (--bl):              %d (default = 3)" % opts.bl)
    print("# img root (--rayonix-root):    %s" % opts.rayonix_root)
    print("# distance (--clen):            %s" % opts.clen)
    print("# beam center (--beam-x/y):     %s,%s" % (opts.beam_x, opts.beam_y))
    print("# Cheetah settings")
    print("#  --dmin, --dmax:              %s,%s" % (opts.d_min, opts.d_max))
    print("#  --adc-threshold:             %s" % opts.ADCthresh)
    print("#  --min-snr:                   %s" % opts.MinSNR)
    print("#  --min/max-pixcount:          %s,%s" %
          (opts.MinPixCount, opts.MaxPixCount))
    print("#  --local-bgradius:            %s" % opts.LocalBGRadius)
    print("#  --min-peaksep:               %s" % opts.MinPeakSeparation)
    print("#  --min-spots:                 %s" % opts.min_spots)
    print("#  --algorithm:                 %s" % opts.algorithm)
    print("# PD1 threshold (--pd1_thresh): %.3f (default = 0; ignore.)" %
          opts.pd1_threshold)
    print("# PD2 threshold (--pd2_thresh): %.3f (default = 0; ignore.)" %
          opts.pd2_threshold)
    print("# PD3 threshold (--pd3_thresh): %.3f (default = 0; ignore.)" %
          opts.pd3_threshold)
    print("# PD1 sensor name (--pd1_name): %s)" % opts.pd1_sensor_name)
    print("# PD2 sensor name (--pd2_name): %s)" % opts.pd2_sensor_name)
    print("# PD3 sensor name (--pd3_name): %s)" % opts.pd3_sensor_name)
    print(
        "# nFrame after light:           %d (default = -1; accept all image. -2; accept all dark images)"
        % opts.light_dark)
    print(
        "# parallel_block:               %d (default = -1; no parallelization)"
        % opts.parallel_block)
    print("# nproc:                        %d (default = 1)" % opts.nproc)
    print("")

    assert opts.algorithm in (6, 8)
    assert opts.runid is not None
    assert opts.bl is not None

    # Beamline specific constants
    if opts.bl == 2:
        sensor_spec = "xfel_bl_2_tc_spec_1/energy"
        sensor_shutter = "xfel_bl_2_shutter_1_open_valid/status"
    elif opts.bl == 3:
        sensor_spec = "xfel_bl_3_tc_spec_1/energy"
        sensor_shutter = "xfel_bl_3_shutter_1_open_valid/status"
    else:
        error_status("BadBeamline")
        return -1

    # Get run info
    try:
        run_info = dbpy.read_runinfo(opts.bl, opts.runid)
    except:
        error_status("BadRunID")
        return -1

    high_tag = dbpy.read_hightagnumber(opts.bl, opts.runid)
    start_tag = run_info['start_tagnumber']
    end_tag = run_info['end_tagnumber']

    tag_list = numpy.array(dbpy.read_taglist_byrun(opts.bl, opts.runid))
    print "# Run %d: HighTag %d, Tags %d (inclusive) to %d (exclusive), thus %d tags" % (
        opts.runid, high_tag, start_tag, end_tag, len(tag_list))
    comment = dbpy.read_comment(opts.bl, opts.runid)
    print "# Comment: %s" % comment
    print

    # Get shutter status and find images
    try:
        shutter = numpy.array(
            map(
                str2float,
                dbpy.read_syncdatalist(sensor_shutter, high_tag,
                                       tuple(tag_list))))
    except:
        print traceback.format_exc()
        error_status("NoShutterStatus")
        return -1

    # XXX
    valid_tags = tag_list[
        shutter ==
        1]  # [tag for tag, is_open in zip(tag_list, shutter) if is_open == 1]
    print "# DEBUG:: shutter=", shutter
    print "# DEBUG:: valid_tags=", valid_tags
    if 0:
        tag_offset = 3
        tag_list = tag_list[tag_offset:]
        valid_tags = tag_list[numpy.arange(1, len(tag_list) + 1) % 6 == 0]

    if valid_tags.size == 0:
        error_status("NoValidTags")
        return -1

    # Find images
    img_files = sorted(
        glob.glob(
            os.path.join(opts.rayonix_root, str(opts.runid), "data_*.img")))
    print "# DEBUG:: img_files=%d valid_tags=%d" % (len(img_files),
                                                    len(valid_tags))
    if len(img_files) + 1 != len(valid_tags):  # last valid tag is not saved.
        print "# WARNING!! img_files and valid_tag number mismatch"

        img_numbers = map(lambda x: int(x[x.rindex("_") + 1:-4]), img_files)
        dropped_frames = sorted(
            set(range(1, len(valid_tags))).difference(img_numbers))
        print "# Unsaved frame numbers =", tuple(dropped_frames)
        print "# DEBUG::", len(img_files) - len(dropped_frames) + 1, len(
            valid_tags)
        if len(img_files) + len(dropped_frames) + 1 == len(valid_tags):
            print "#  %d unsaved img files found, which explains number mismatch" % len(
                dropped_frames)
            valid_tags = numpy.delete(valid_tags,
                                      numpy.array(dropped_frames) - 1)
            assert len(img_files) + 1 == len(valid_tags)
        else:
            print "# Assuming last %d img files are generated after stopping run.." % (
                len(img_files) - len(valid_tags) + 1)
            img_files = img_files[:len(valid_tags) - 1]
            assert len(img_files) + 1 == len(valid_tags)

    # Get photon energies
    photon_energies_in_keV = numpy.array([
        str2float(s) for s in dbpy.read_syncdatalist(sensor_spec, high_tag,
                                                     tuple(valid_tags))
    ])
    mean_photon_energy = numpy.mean(photon_energies_in_keV[
        photon_energies_in_keV ==
        photon_energies_in_keV])  # XXX if no valid data?
    print "# Photon energies obtained: %d valid numbers, %d invalid, average=%f sd=%f" % (
        len(photon_energies_in_keV),
        sum(photon_energies_in_keV != photon_energies_in_keV),
        mean_photon_energy,
        numpy.std(photon_energies_in_keV[photon_energies_in_keV ==
                                         photon_energies_in_keV]))
    photon_energies_in_keV[
        photon_energies_in_keV != photon_energies_in_keV] = mean_photon_energy

    # Get PD values
    pd1_values, pd2_values, pd3_values = [], [], []
    if opts.pd1_threshold != 0:
        pd1_values = numpy.array(
            map(
                str2float,
                dbpy.read_syncdatalist(opts.pd1_sensor_name, high_tag,
                                       tuple(valid_tags))))
    if opts.pd2_threshold != 0:
        pd2_values = numpy.array(
            map(
                str2float,
                dbpy.read_syncdatalist(opts.pd2_sensor_name, high_tag,
                                       tuple(valid_tags))))
    if opts.pd3_threshold != 0:
        pd3_values = numpy.array(
            map(
                str2float,
                dbpy.read_syncdatalist(opts.pd3_sensor_name, high_tag,
                                       tuple(valid_tags))))

    # Identify bad tags
    # XXX not tested!! this feature must not be used. tags with bad PDs must be detected after experiment.
    frame_after_light = 9999
    bad_tag_idxes = []
    for i in xrange(len(valid_tags)):
        light = True
        if (opts.pd1_threshold != 0
                and not (opts.pd1_threshold > 0
                         and opts.pd1_threshold <= pd1_values[i])
                and not (opts.pd1_threshold < 0
                         and -opts.pd1_threshold > pd1_values[i])):
            light = False
        if (opts.pd2_threshold != 0
                and not (opts.pd2_threshold > 0
                         and opts.pd2_threshold <= pd2_values[i])
                and not (opts.pd2_threshold < 0
                         and -opts.pd2_threshold > pd2_values[i])):
            light = False
        if (opts.pd3_threshold != 0
                and not (opts.pd3_threshold > 0
                         and opts.pd3_threshold <= pd3_values[i])
                and not (opts.pd3_threshold < 0
                         and -opts.pd3_threshold > pd3_values[i])):
            light = False

        if light:
            frame_after_light = 0
        else:
            frame_after_light += 1

        if ((opts.light_dark >= 0 and frame_after_light != opts.light_dark) or
            (opts.light_dark == PD_DARK_ANY and frame_after_light == 0)):
            print "# PD check: %d is bad tag!" % valid_tags[i]
            bad_tag_idxes.append(i)

    if bad_tag_idxes:
        valid_tags = numpy.delete(valid_tags, numpy.array(bad_tag_idxes))
        for i in reversed(bad_tag_idxes):
            del img_files[i]

    # Debug code; this takes too much time!
    try:
        if 0 and opts.parallel_block == 0:
            tag_timestamp = map(
                lambda x: datetime.datetime.fromtimestamp(
                    dbpy.read_timestamp_fromtag(high_tag, x, sensor_shutter)).
                strftime('%Y-%m-%d %H:%M:%S.%f'), valid_tags)
            img_timestamp = map(
                lambda x: marccd.MarCCD(x).acquire_time.strftime(
                    '%Y-%m-%d %H:%M:%S.%f'), img_files)
            ofs = open("tag_file_time.dat", "w")
            ofs.write("run tag file tag.time file.time\n")
            for i in xrange(len(img_files)):
                ofs.write('%d %d %s "%s" "%s"\n' %
                          (opts.runid, valid_tags[i], img_files[i],
                           tag_timestamp[i], img_timestamp[i]))
            ofs.close()
    except:
        pass

    # block spliting
    # TODO db query may be slow, which may need to be done only once?
    if opts.parallel_block >= 0:
        width = len(valid_tags) // parallel_size
        i_start = opts.parallel_block * width
        i_end = (opts.parallel_block + 1
                 ) * width if opts.parallel_block < parallel_size - 1 else None
        valid_tags = valid_tags[i_start:i_end]
        photon_energies_in_keV = photon_energies_in_keV[i_start:i_end]
        img_files = img_files[i_start:i_end]
        print "# parallel_block=%d: %d tags will be processed (%d..%d)" % (
            opts.parallel_block, len(valid_tags), valid_tags[0],
            valid_tags[-1])

    make_geom(img_files[0],
              opts.output_geom,
              beam_x=opts.beam_x,
              beam_y=opts.beam_y,
              clen=opts.clen)

    # Hit-finding
    results = process_images(img_files, mean_photon_energy, opts)
    file_tag_ene = []
    for frame, tag, ene in zip(sorted(results), valid_tags,
                               photon_energies_in_keV):
        if len(results[frame]["spots"]) < opts.min_spots:
            continue
        file_tag_ene.append((frame, tag, ene))

    # TODO on-the-fly status updating
    open("status.txt", "w").write("""\
# Cheetah status
Update time: %(ctime)s
Elapsed time: %(eltime)f sec
Status: Total=%(ntotal)d,Processed=%(ntotal)d,LLFpassed=%(ntotal)d,Hits=%(nhits)d,Status=WritingH5
Frames processed: %(ntotal)d
Number of hits: %(nhits)d
""" % dict(ctime=time.ctime(),
           eltime=time.time() - eltime_from,
           ntotal=len(img_files),
           nhits=len(file_tag_ene)))

    # Save h5
    # TODO implement on-the-fly h5 file writing in hit-finding to avoid reading img file twice.
    make_h5(out=opts.outputH5, file_tag_ene=file_tag_ene, comment=comment)

    open("status.txt", "w").write("""\
# Cheetah status
Update time: %(ctime)s
Elapsed time: %(eltime)f sec
Status: Total=%(ntotal)d,Processed=%(ntotal)d,LLFpassed=%(ntotal)d,Hits=%(nhits)d,Status=Finished
Frames processed: %(ntotal)d
Number of hits: %(nhits)d
""" % dict(ctime=time.ctime(),
           eltime=time.time() - eltime_from,
           ntotal=len(img_files),
           nhits=len(file_tag_ene)))

    ofs = open("cheetah.dat", "w")
    ofs.write("file tag nspots total_snr\n")
    for frame, tag in zip(sorted(results), valid_tags):
        ret = results[frame]
        n_spots = len(ret["spots"])
        total_snr = sum(map(lambda x: x[2], ret["spots"]))
        ofs.write("%s %d %6d %.3e\n" % (frame, tag, n_spots, total_snr))
    ofs.close()

    if opts.gen_adx:
        for frame in sorted(results):
            ret = results[frame]
            adx_out = open(os.path.basename(frame) + ".adx", "w")
            for x, y, snr, d in ret["spots"]:
                adx_out.write("%6d %6d %.2e\n" % (x, y, snr))
            adx_out.close()
コード例 #5
0
def run(runid, bl=3, clen=50.0):
    # Beamline specific constants
    if bl == 2:
        sensor_spec = "xfel_bl_2_tc_spec_1/energy"
        sensor_shutter = "xfel_bl_2_shutter_1_open_valid/status"
    elif bl == 3:
        sensor_spec = "xfel_bl_3_tc_spec_1/energy"
        sensor_shutter = "xfel_bl_3_shutter_1_open_valid/status"
    else:
        log_error("BadBeamline")
        sys.exit(-1)

    # Get Run info
    try:
        run_info = dbpy.read_runinfo(bl, runid)
    except:
        log_error("BadRunID")
        sys.exit(-1)
    high_tag = dbpy.read_hightagnumber(bl, runid)
    start_tag = run_info['start_tagnumber']
    end_tag = run_info['end_tagnumber']

    tag_list = dbpy.read_taglist_byrun(bl, runid)
    tag = tag_list[0]
    print "Run %d: HighTag %d, Tags %d (inclusive) to %d (exclusive), thus %d images" % (
        runid, high_tag, start_tag, end_tag, len(tag_list))
    comment = dbpy.read_comment(bl, runid)
    print "Comment: %s" % comment
    print

    # Find detectors
    det_IDs = dbpy.read_detidlist(bl, runid)
    print "Detector IDs: " + " ".join(det_IDs)
    det_IDs = sorted([x for x in det_IDs if re.match("^MPCCD-8.*-[1-8]$", x)])
    if len(det_IDs) != 8:
        log_error("NoSupportedDetectorFound")
        sys.exit(-1)
    print "MPCCD Octal IDs to use: " + " ".join(det_IDs)
    print

    # Get shutter status and find dark images
    try:
        shutter = [
            str2float(s)
            for s in dbpy.read_syncdatalist(sensor_shutter, high_tag, tag_list)
        ]
    except:
        log_error("NoShutterStatus")
        sys.exit(-1)
    dark_tags = [
        tag for tag, is_open in zip(tag_list, shutter) if is_open == 0
    ]

    if bl == 2 and runid >= 32348:  # and runid <= 33416:
        # 2018 Feb: Unreliable shutter status. Should use PD and take darks only at the beginning of a run
        print "The shutter status was unreliable for runs in 2018 Feb."
        print "The number of tags with shutter closed:", len(dark_tags)
        print "Since the above value is not reliable, we use X-ray PD values instead."
        xray_pd = "xfel_bl_2_st_3_bm_1_pd/charge"
        pd_values = [
            str2float(s)
            for s in dbpy.read_syncdatalist(xray_pd, high_tag, tag_list)
        ]
        dark_tags = []
        is_head = True
        for tag, pd in zip(tag_list, pd_values):
            if pd == None and is_head:
                dark_tags.append(tag)
            else:
                is_head = False
        print "Number of tags without X-ray:", len(
            [1 for pd_val in pd_values if pd_val is None])
        print "But we use only tags at the beginning of a run."

    if len(dark_tags) == 0:
        log_error("NoDarkImage")
        sys.exit(-1)
    print "Number of dark images to average: %d" % len(dark_tags)
    print

    # Setup buffer readers
    try:
        readers = [
            stpy.StorageReader(det_id, bl, (runid, )) for det_id in det_IDs
        ]
    except:
        log_error("FailedOn_create_streader")
        sys.exit(-1)
    try:
        buffers = [stpy.StorageBuffer(reader) for reader in readers]
    except:
        log_error("FailedOn_create_stbuf")
        sys.exit(-1)

    # Read first image to get detector info
    det_infos = []
    for reader, buf in zip(readers, buffers):
        try:
            reader.collect(buf, dark_tags[0])
        except:
            log_error("FailedOn_collect_data")
            sys.exit(-1)
    det_infos = [buf.read_det_info(0) for buf in buffers]
    for i, det_info in enumerate(det_infos):
        det_info['id'] = det_IDs[i]

    # Collect pulse energies
    config_photon_energy = 1000.0 * dbpy.read_config_photonenergy(bl, runid)
    config_photon_energy_sensible = True
    if config_photon_energy < 5000 or config_photon_energy > 14000:
        print "WARNING: dbpy.read_config_photonenergy returned %f eV, which is absurd!" % config_photon_energy
        print "         Report this to SACLA DAQ team."
        print "         This is not problematic unless the inline spectrometer is also broken."
        config_photon_energy_sensible = False

    pulse_energies_in_keV = [
        str2float(s) for s in dbpy.read_syncdatalist(sensor_spec, high_tag,
                                                     tuple(dark_tags))
    ]
    pulse_energies = []
    for tag, energy in zip(dark_tags, pulse_energies_in_keV):
        if energy is not None and energy > 0:
            pulse_energies.append(energy * 1000.0)
        else:
            print "WARNING: The wavelength from the inline spectrometer does not look sensible for tag %d." % tag
            if config_photon_energy_sensible:
                pulse_energies.append(config_photon_energy)
                print "         Used the accelerator config value instead."
            else:
                pulse_energies.append(7000.0)
                print "         The accelerator config value is also broken; assumed 7 keV as a last resort!"

    print
    mean_energy = np.mean(pulse_energies)
    print "Mean photon energy: %f eV" % mean_energy
    print "Configured photon energy: %f eV" % config_photon_energy
    print

    # Create geometry files
    write_crystfel_geom("%d.geom" % runid, det_infos, mean_energy, clen)
    write_cheetah_geom("%d-geom.h5" % runid, det_infos)

    # Write metadata
    write_metadata("%d.h5" % runid, det_infos, clen, comment)

    # Create dark average
    print
    print "Calculating a dark average:"
    num_added = 0
    sum_buffer = np.zeros((YSIZE * NPANELS, XSIZE), dtype=np.float64)
    gains = [det_info['mp_absgain'] for det_info in det_infos]

    for j, tag_id in enumerate(dark_tags):
        print "Processing tag %d (%2.1f%% done)" % (tag_id, 100.0 *
                                                    (j + 1) / len(dark_tags))
        if (j % 5 == 0):
            with open("status.txt", "w") as status:
                status.write(
                    "Status: Total=%d,Processed=%d,Status=DarkAveraging\n" %
                    (len(dark_tags), j + 1))
        num_added += add_image(sum_buffer, readers, buffers, gains, tag_id,
                               pulse_energies[j])
    print "\nDone. Averaged %d frames." % num_added

    if (num_added < 1):
        return -1

    sum_buffer /= num_added
    ushort_max = np.iinfo(np.uint16).max
    print " #neg (< 0) %d, #overflow (> %d) %d" % (np.sum(
        sum_buffer < 0), ushort_max, np.sum(sum_buffer > ushort_max))
    sum_buffer[sum_buffer < 0] = 0
    sum_buffer[sum_buffer > ushort_max] = ushort_max
    averaged = sum_buffer.astype(np.uint16)

    # In the Phase 3 detector, some pixels average to negative values.
    # Most are around -0.1 and all those below -1 are at panel edges that will be masked.
    # So we don't have to worry about them.

    f = h5py.File("%d-dark.h5" % runid, "w")
    f.create_dataset("/data/data",
                     data=averaged,
                     compression="gzip",
                     shuffle=True)
    #    f.create_dataset("/data/raw", data=sum_buffer, compression="gzip", shuffle=True)
    f.close()
    print "Dark average was written to %s" % ("%d-dark.h5" % runid)
コード例 #6
0
def run(runid, bl=3, clen=50.0, dry_run=False):
    # Beamline specific constants
    if bl == 2:
        sensor_spec = "xfel_bl_2_tc_spec_1/energy"
        sensor_shutter = "xfel_bl_2_shutter_1_open_valid/status"
    elif bl == 3:
        sensor_spec = "xfel_bl_3_tc_spec_1/energy"
        sensor_shutter = "xfel_bl_3_shutter_1_open_valid/status"
    else:
        log_error("BadBeamline")
        sys.exit(-1)

    # Get Run info
    try:
        run_info = dbpy.read_runinfo(bl, runid)
    except:
        log_error("BadRunID")
        sys.exit(-1)
    high_tag = dbpy.read_hightagnumber(bl, runid)
    start_tag = run_info['start_tagnumber']
    end_tag = run_info['end_tagnumber']

    tag_list = dbpy.read_taglist_byrun(bl, runid)
    tag = tag_list[0]
    print "Run %d: HighTag %d, Tags %d (inclusive) to %d (exclusive), thus %d images" % (runid, high_tag, start_tag, end_tag, len(tag_list))
    comment = dbpy.read_comment(bl, runid)
    print "Comment: %s" % comment
    print

    # Find detectors
    det_IDs = dbpy.read_detidlist(bl, runid)
    print "Detector IDs: " + " ".join(det_IDs)
    det_IDs = sorted([x for x in det_IDs if re.match("^MPCCD-8.*-[1-8]$", x)])
    if len(det_IDs) != 8:
        log_error("NoSupportedDetectorFound")
        sys.exit(-1)
    print "MPCCD Octal IDs to use: " + " ".join(det_IDs)
    print

    # Get shutter status and find dark images
    try:
        shutter = [str2float(s) for s in dbpy.read_syncdatalist(sensor_shutter, high_tag, tag_list)]
    except:
        log_error("NoShutterStatus")
        sys.exit(-1)
    dark_tags = [tag for tag, is_open in zip(tag_list, shutter) if is_open == 0]
    
    if bl == 2 and runid >= 32348: # and runid <= 33416:
	# 2018 Feb: Unreliable shutter status. We should use BM1 PD and take darks only at the beginning of a run
        print "The shutter status was unreliable for runs since 2018 Feb."
        print "The number of tags with shutter closed:", len(dark_tags)
        print "Since the above value is not reliable, we use X-ray PD values instead."
        xray_pd = "xfel_bl_2_st_3_bm_1_pd/charge"
        pd_values = [str2float(s) for s in dbpy.read_syncdatalist(xray_pd, high_tag, tag_list)]
        dark_tags = []
        is_head = True
        for tag, pd in zip(tag_list, pd_values):
             if math.isnan(pd) and is_head:
                 dark_tags.append(tag)
             else:
                 is_head = False
        print "Number of tags without X-ray:", len([1 for pd_val in pd_values if math.isnan(pd_val)])
        print "But we use only tags at the beginning of a run."

    if len(dark_tags) == 0:
        log_error("NoDarkImage")
        sys.exit(-1)
    print "Number of dark images to average: %d" % len(dark_tags)
    print

    # Setup buffer readers
    try:
        readers = [stpy.StorageReader(det_id, bl, (runid,)) for det_id in det_IDs]
    except:
        log_error("FailedOn_create_streader")
        sys.exit(-1)
    try:
        buffers = [stpy.StorageBuffer(reader) for reader in readers]
    except:
        log_error("FailedOn_create_stbuf")
        sys.exit(-1)
    
    # Read first image to get detector info
    det_infos = []
    for reader, buf in zip(readers, buffers):
        try:
            reader.collect(buf, dark_tags[0])
        except:
            log_error("FailedOn_collect_data")
            sys.exit(-1)
    det_infos = [buf.read_det_info(0) for buf in buffers]
    for i, det_info in enumerate(det_infos):
        det_info['id'] = det_IDs[i]

    # Collect pulse energies
    config_photon_energy = 1000.0 * dbpy.read_config_photonenergy(bl, runid)
    config_photon_energy_sensible = True
    if config_photon_energy < 5000 or config_photon_energy > 14000:
        print "WARNING: dbpy.read_config_photonenergy returned %f eV, which is absurd!" % config_photon_energy
        print "         Report this to SACLA DAQ team."
        print "         This is not problematic unless the inline spectrometer is also broken." 
        config_photon_energy_sensible = False

    pulse_energies_in_keV  = [str2float(s) for s in dbpy.read_syncdatalist(sensor_spec, high_tag, tuple(dark_tags))]
    pulse_energies = []
    for tag, energy in zip(dark_tags, pulse_energies_in_keV):
        if energy is not None and energy > 0:
            pulse_energies.append(energy * 1000.0)
        else:
            print "WARNING: The wavelength from the inline spectrometer does not look sensible for tag %d." % tag
            if config_photon_energy_sensible:
                pulse_energies.append(config_photon_energy)
                print "         Used the accelerator config value instead."
            else:
                pulse_energies.append(7000.0)
                print "         The accelerator config value is also broken; assumed 7 keV as a last resort!"

    print
    mean_energy = np.mean(pulse_energies)
    print "Mean photon energy: %f eV" % mean_energy
    print "Configured photon energy: %f eV" % config_photon_energy
    print

    # Create geometry files
    write_crystfel_geom("%d.geom" % runid, det_infos, mean_energy, clen)
    write_cheetah_geom("%d-geom.h5" % runid, det_infos)

    # Write metadata
    write_metadata("%d.h5" % runid, det_infos, clen, comment)

    if (dry_run): return
 
    # Create dark average
    print
    print "Calculating a dark average:"
    num_added = 0
    sum_buffer = np.zeros((YSIZE * NPANELS, XSIZE), dtype=np.float64)
    gains = [det_info['mp_absgain'] for det_info in det_infos]

    for j, tag_id in enumerate(dark_tags):
        print "Processing tag %d (%2.1f%% done)" % (tag_id, 100.0 * (j + 1) / len(dark_tags))
        if (j % 5 == 0):
            with open("status.txt", "w") as status:
                status.write("Status: Total=%d,Processed=%d,Status=DarkAveraging\n" % (len(dark_tags), j + 1))
        num_added += add_image(sum_buffer, readers, buffers, gains, tag_id, pulse_energies[j])
    print "\nDone. Averaged %d frames." % num_added
  
    if (num_added < 1):
        return -1

    sum_buffer /= num_added
    ushort_max = np.iinfo(np.uint16).max
    print " #neg (< 0) %d, #overflow (> %d) %d" % (np.sum(sum_buffer < 0), ushort_max, np.sum(sum_buffer > ushort_max))
    sum_buffer[sum_buffer < 0] = 0
    sum_buffer[sum_buffer > ushort_max] = ushort_max
    averaged = sum_buffer.astype(np.uint16)

    # In the Phase 3 detector, some pixels average to negative values.
    # Most are around -0.1 and all those below -1 are at panel edges that will be masked.
    # So we don't have to worry about them.

    f = h5py.File("%d-dark.h5" % runid, "w")
    f.create_dataset("/data/data", data=averaged, compression="gzip", shuffle=True)
#    f.create_dataset("/data/raw", data=sum_buffer, compression="gzip", shuffle=True)
    f.close()
    print "Dark average was written to %s" % ("%d-dark.h5" % runid)
コード例 #7
0
def getEquipInt(tags, equip, hightag=201802):
    return dbpy.read_syncdatalist(equip, hightag, tags)
コード例 #8
0
def run(opts):
    eltime_from = time.time()

    assert opts.runid is not None
    assert opts.bl is not None

    # Beamline specific constants
    if opts.bl == 2:
        sensor_spec = "xfel_bl_2_tc_spec_1/energy"
        sensor_shutter = "xfel_bl_2_shutter_1_open_valid/status"
    elif opts.bl == 3:
        sensor_spec = "xfel_bl_3_tc_spec_1/energy"
        sensor_shutter = "xfel_bl_3_shutter_1_open_valid/status"
    else:
        error_status("BadBeamline")
        return -1

    # Get run info
    try:
        run_info = dbpy.read_runinfo(opts.bl, opts.runid)
    except:
        error_status("BadRunID")
        return -1

    high_tag = dbpy.read_hightagnumber(opts.bl, opts.runid)
    start_tag = run_info['start_tagnumber']
    end_tag = run_info['end_tagnumber']

    tag_list = numpy.array(dbpy.read_taglist_byrun(opts.bl, opts.runid))
    print "# Run %d: HighTag %d, Tags %d (inclusive) to %d (exclusive), thus %d images" % (opts.runid, high_tag, start_tag, end_tag, len(tag_list))
    comment = dbpy.read_comment(opts.bl, opts.runid)
    print "# Comment: %s" % comment
    print

    # Get shutter status and find images
    try:
        shutter = numpy.array(map(str2float, dbpy.read_syncdatalist(sensor_shutter, high_tag, tuple(tag_list))))
    except:
        print traceback.format_exc()
        error_status("NoShutterStatus")
        return -1

    # XXX
    valid_tags = tag_list[shutter==1] # [tag for tag, is_open in zip(tag_list, shutter) if is_open == 1]
    print "DEBUG:: shutter=", shutter
    print "DEBUG:: valid_tags=", valid_tags
    if 0:
        tag_offset = 3
        tag_list = tag_list[tag_offset:]
        valid_tags = tag_list[numpy.arange(1, len(tag_list)+1)%6==0]
        
    if valid_tags.size == 0:
        error_status("NoValidTags")
        return -1

    # Find images
    img_files = sorted(glob.glob(os.path.join(opts.rayonix_root, str(opts.runid), "data_*.img")))
    print "# DEBUG:: img_files=%d valid_tags=%d" % (len(img_files), len(valid_tags))
    if len(img_files)+1 != len(valid_tags): # last valid tag is not saved.
        print "# WARNING!! img_files and valid_tag number mismatch"

        img_numbers = map(lambda x: int(x[x.rindex("_")+1:-4]), img_files)
        dropped_frames = sorted(set(range(1, len(valid_tags))).difference(img_numbers))
        print "# Unsaved frame numbers =", tuple(dropped_frames)
        print "# DEBUG::", len(img_files)-len(dropped_frames)+1, len(valid_tags)
        if len(img_files)+len(dropped_frames)+1 == len(valid_tags):
            print "#  %d unsaved img files found, which explains number mismatch" % len(dropped_frames)
            valid_tags = numpy.delete(valid_tags, numpy.array(dropped_frames)-1)
            assert len(img_files)+1 == len(valid_tags)
        else:
            print "# Assuming last %d img files are generated after stopping run.." % (len(img_files)-len(valid_tags)+1)
            img_files = img_files[:len(valid_tags)-1]
            assert len(img_files)+1 == len(valid_tags)
    
    tag_timestamp = map(lambda x: datetime.datetime.fromtimestamp(dbpy.read_timestamp_fromtag(high_tag, x, sensor_shutter)).strftime('%Y-%m-%d %H:%M:%S.%f'), valid_tags)
    img_timestamp = map(lambda x: marccd.MarCCD(x).acquire_time.strftime('%Y-%m-%d %H:%M:%S.%f'), img_files)
    ofs = open("tag_file_time.dat", "w")
    ofs.write("run tag file tag.time file.time\n")
    for i in xrange(len(img_files)):
        ofs.write('%d %d %s "%s" "%s"\n'%(opts.runid, valid_tags[i], img_files[i], tag_timestamp[i], img_timestamp[i]))
    ofs.close()