def laserOffReference(args):
    import psana
    
    # The calib line below will write the calib directory in the current directory
    # For 'real' analysis during the beamtime just delete it and the calib directory
    # for the experiment will be used by default.
    if args.output is not None :
        psana.setOption('psana.calib-dir',args.output)
    
    from xtcav.GenerateLasingOffReference import GenerateLasingOffReference
    GLOC=GenerateLasingOffReference();
    GLOC.experiment=args.experiment
    GLOC.runs=args.run
    GLOC.maxshots=args.maxshots
    #GLOC.mode=args.mode
    GLOC.nb=args.bunches
    GLOC.islandsplitmethod = 'scipyLabel'       # see confluence documentation for how to set this parameter
    GLOC.groupsize=1                            # see confluence documentation for how to set this parameter
    GLOC.SetValidityRange(int(args.run))        # delete second run number argument to have the validity range be open-ended ("end")
    GLOC.Generate();
Esempio n. 2
0
            if args.noe <= len(times):
                numJobs = args.noe
            else:
                numJobs = len(times)
    elif facility == "PAL":
        # check if the user requested specific number of events
        if args.noe == -1:
            numJobs = numEvents
        else:
            if args.noe <= numEvents:
                numJobs = args.noe
            else:
                numJobs = numEvents
    return numJobs

if args.localCalib: psana.setOption('psana.calib-dir','./calib')

if rank == 0:
    if facility == 'LCLS':
        # Set up psana
        ps = psanaWhisperer.psanaWhisperer(args.exp, args.run, args.det, args.clen, args.localCalib, access=args.access)
        ps.setupExperiment()
        numEvents = ps.eventTotal
        img = None
        for i in np.arange(numEvents):
            ps.getEvent(i)
            img = ps.getCheetahImg()
            if img is not None:
                print "Found an event with image: ", i
                break
    elif facility == 'PAL':
Esempio n. 3
0
from pylab import *
import psana
from psmon.plots import Image, XYPlot
from psmon import publish
import pandas
import time
import os

#import TimeTool
psana.setOption('psana.calib-dir', '/reg//d/psdm/sxr/sxrx24615/calib')
#myDataSource = psana.DataSource("shmem=psana.0:stop=no")
myDataSource.DataSource('exp=sxrx24615:run=22')

#not necessarily online.
#ttOptions = TimeTool.AnalyzeOptions(get_key='TSS_OPAL',eventcode_nobeam = 162)	#162 event is bykick

#ttAnalyze = TimeTool.PyAnalyze(ttOptions)
#ds = psana.DataSource(self.datasource, module=ttAnalyze)

#tssOpalROI = [[xStart,xEnd],[yStart,yEnd]]
tssOpalROI = [[1, -1], [1, -1]]
#opal1ROI = [[1,-1],[1,-1]]
opal1ROI = loadtxt("myConfig.dat")
myConfigLastModTime = os.path.getatime("myConfig.dat")
#opal1SecondROI = [[1,-1],[1,-1]]
tssAxis, opal1Axis = [1, 1]
correlationWindow = 31

myConfig = loadtxt("myConfig.dat")

tssOpalDetectorObject = psana.Detector("TSS_OPAL")
Esempio n. 4
0
from psdata import ImageData
from psdata import IqPlotData

from utilities import *

# =============================================
s_runs = [182, 183]
p_runs = [193, 194]
# ==============================================

# PSANA CONFIG FILE - this must be set before a datasource is created
basedir = os.path.split(os.path.abspath( __file__ ))[0]
config_fn = os.path.join(basedir, "minitti.cfg")
psana.setConfigFile(config_fn)
psana.setOption('psana.l3t-accept-only',0)
print "Loading psana config file:    %s" % config_fn

# Aquire the geometry and mask
geometry_filename = '/reg/neh/home2/tjlane/analysis/xppb0114/geometries/v2/q_geom.npy'
print "Loading geometry from:        %s" % geometry_filename
geometry = np.load(geometry_filename).reshape(32,185,388)

mask_filename = '/reg/neh/home2/tjlane/analysis/xppb0114/geometries/v2/mask_v2.npy'
print "Loading pixel mask from:      %s" % mask_filename
mask = np.load(mask_filename).reshape(32,185,388)


cspad_src  = psana.Source('DetInfo(XppGon.0:Cspad.0)')
evr_src    = psana.Source('DetInfo(NoDetector.0:Evr.0)')
Esempio n. 5
0
    def run(self):
        """ Process all images assigned to this thread """
        params, options = self.parser.parse_args(show_diff_phil=True)

        if params.input.experiment is None or \
           params.input.run_num is None or \
           params.input.address is None:
            raise Usage(self.usage)

        if params.format.file_format == "cbf":
            if params.format.cbf.detz_offset is None:
                raise Usage(self.usage)
        elif params.format.file_format == "pickle":
            if params.input.cfg is None:
                raise Usage(self.usage)
        else:
            raise Usage(self.usage)

        if not os.path.exists(params.output.output_dir):
            raise Sorry("Output path not found:" + params.output.output_dir)

        #Environment variable redirect for CBFLib temporary CBF_TMP_XYZ file output
        if params.format.file_format == "cbf":
            if params.output.tmp_output_dir is None:
                tmp_dir = os.path.join(params.output.output_dir, '.tmp')
            else:
                tmp_dir = os.path.join(params.output.tmp_output_dir, '.tmp')
            if not os.path.exists(tmp_dir):
                try:
                    os.makedirs(tmp_dir)
                except Exception as e:
                    if not os.path.exists(tmp_dir):
                        halraiser(e)
            os.environ['CBF_TMP_DIR'] = tmp_dir

        # Save the paramters
        self.params = params
        self.options = options

        from mpi4py import MPI
        comm = MPI.COMM_WORLD
        rank = comm.Get_rank(
        )  # each process in MPI has a unique id, 0-indexed
        size = comm.Get_size()  # size: number of processes running in this job

        # set up psana
        if params.input.cfg is not None:
            psana.setConfigFile(params.input.cfg)

        if params.input.calib_dir is not None:
            psana.setOption('psana.calib-dir', params.input.calib_dir)

        dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment,
                                              params.input.run_num)
        if params.input.xtc_dir is not None:
            dataset_name = "exp=%s:run=%s:idx:dir=%s" % (
                params.input.experiment, params.input.run_num,
                params.input.xtc_dir)

        ds = psana.DataSource(dataset_name)

        if params.format.file_format == "cbf":
            src = psana.Source('DetInfo(%s)' % params.input.address)
            psana_det = psana.Detector(params.input.address, ds.env())

        # set this to sys.maxint to analyze all events
        if params.dispatch.max_events is None:
            max_events = sys.maxint
        else:
            max_events = params.dispatch.max_events

        for run in ds.runs():
            if params.format.file_format == "cbf":
                if params.format.cbf.mode == "cspad":
                    # load a header only cspad cbf from the slac metrology
                    base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(
                        run, params.input.address)
                    if base_dxtbx is None:
                        raise Sorry(
                            "Couldn't load calibration file for run %d" %
                            run.run())
                elif params.format.cbf.mode == "rayonix":
                    # load a header only rayonix cbf from the input parameters
                    base_dxtbx = rayonix_tbx.get_dxtbx_from_params(
                        params.format.cbf.rayonix)

            # list of all events
            times = run.times()
            if params.dispatch.selected_events:
                times = [
                    t for t in times
                    if cspad_tbx.evt_timestamp((t.seconds(), t.nanoseconds() /
                                                1e6)) in params.input.timestamp
                ]
            nevents = min(len(times), max_events)
            # chop the list into pieces, depending on rank.  This assigns each process
            # events such that the get every Nth event where N is the number of processes
            mytimes = [
                times[i] for i in xrange(nevents) if (i + rank) % size == 0
            ]

            for i in xrange(len(mytimes)):
                evt = run.event(mytimes[i])
                id = evt.get(psana.EventId)
                print "Event #", i, " has id:", id

                timestamp = cspad_tbx.evt_timestamp(
                    cspad_tbx.evt_time(evt))  # human readable format
                if timestamp is None:
                    print "No timestamp, skipping shot"
                    continue

                if evt.get("skip_event") or "skip_event" in [
                        key.key() for key in evt.keys()
                ]:
                    print "Skipping event", timestamp
                    continue

                t = timestamp
                s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[
                    17:19] + t[20:23]
                print "Processing shot", s

                if params.format.file_format == "pickle":
                    if evt.get("skip_event"):
                        print "Skipping event", id
                        continue
                    # the data needs to have already been processed and put into the event by psana
                    data = evt.get(params.format.pickle.out_key)
                    if data is None:
                        print "No data"
                        continue

                    # set output paths according to the templates
                    path = os.path.join(params.output.output_dir,
                                        "shot-" + s + ".pickle")

                    print "Saving", path
                    easy_pickle.dump(path, data)

                elif params.format.file_format == "cbf":
                    if params.format.cbf.mode == "cspad":
                        # get numpy array, 32x185x388
                        data = cspad_cbf_tbx.get_psana_corrected_data(
                            psana_det,
                            evt,
                            use_default=False,
                            dark=True,
                            common_mode=None,
                            apply_gain_mask=params.format.cbf.cspad.
                            gain_mask_value is not None,
                            gain_mask_value=params.format.cbf.cspad.
                            gain_mask_value,
                            per_pixel_gain=False)

                        distance = cspad_tbx.env_distance(
                            params.input.address, run.env(),
                            params.format.cbf.detz_offset)
                    elif params.format.cbf.mode == "rayonix":
                        data = rayonix_tbx.get_data_from_psana_event(
                            evt, params.input.address)
                        distance = params.format.cbf.detz_offset

                    if distance is None:
                        print "No distance, skipping shot"
                        continue

                    if self.params.format.cbf.override_energy is None:
                        wavelength = cspad_tbx.evt_wavelength(evt)
                        if wavelength is None:
                            print "No wavelength, skipping shot"
                            continue
                    else:
                        wavelength = 12398.4187 / self.params.format.cbf.override_energy

                    # stitch together the header, data and metadata into the final dxtbx format object
                    if params.format.cbf.mode == "cspad":
                        image = cspad_cbf_tbx.format_object_from_data(
                            base_dxtbx, data, distance, wavelength, timestamp,
                            params.input.address)
                    elif params.format.cbf.mode == "rayonix":
                        image = rayonix_tbx.format_object_from_data(
                            base_dxtbx, data, distance, wavelength, timestamp,
                            params.input.address)
                    path = os.path.join(params.output.output_dir,
                                        "shot-" + s + ".cbf")
                    print "Saving", path

                    # write the file
                    import pycbf
                    image._cbf_handle.write_widefile(path, pycbf.CBF,\
                      pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)

            run.end()
        ds.end()
Esempio n. 6
0
def average(argv=None):
    if argv == None:
        argv = sys.argv[1:]

    try:
        from mpi4py import MPI
    except ImportError:
        raise Sorry("MPI not found")

    command_line = (libtbx.option_parser.option_parser(usage="""
%s [-p] -c config -x experiment -a address -r run -d detz_offset [-o outputdir] [-A averagepath] [-S stddevpath] [-M maxpath] [-n numevents] [-s skipnevents] [-v] [-m] [-b bin_size] [-X override_beam_x] [-Y override_beam_y] [-D xtc_dir] [-f] [-g gain_mask_value] [--min] [--minpath minpath]

To write image pickles use -p, otherwise the program writes CSPAD CBFs.
Writing CBFs requires the geometry to be already deployed.

Examples:
cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571

Use one process on the current node to process all the events from run 25 of
experiment cxi49812, using a detz_offset of 571.

mpirun -n 16 cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571

As above, using 16 cores on the current node.

bsub -a mympi -n 100 -o average.out -q psanaq cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 -o cxi49812

As above, using the psanaq and 100 cores, putting the log in average.out and
the output images in the folder cxi49812.
""" % libtbx.env.dispatcher_name).option(
        None,
        "--as_pickle",
        "-p",
        action="store_true",
        default=False,
        dest="as_pickle",
        help="Write results as image pickle files instead of cbf files"
    ).option(
        None,
        "--raw_data",
        "-R",
        action="store_true",
        default=False,
        dest="raw_data",
        help=
        "Disable psana corrections such as dark pedestal subtraction or common mode (cbf only)"
    ).option(
        None,
        "--background_pickle",
        "-B",
        default=None,
        dest="background_pickle",
        help=""
    ).option(
        None,
        "--config",
        "-c",
        type="string",
        default=None,
        dest="config",
        metavar="PATH",
        help="psana config file"
    ).option(
        None,
        "--experiment",
        "-x",
        type="string",
        default=None,
        dest="experiment",
        help="experiment name (eg cxi84914)"
    ).option(
        None,
        "--run",
        "-r",
        type="int",
        default=None,
        dest="run",
        help="run number"
    ).option(
        None,
        "--address",
        "-a",
        type="string",
        default="CxiDs2.0:Cspad.0",
        dest="address",
        help="detector address name (eg CxiDs2.0:Cspad.0)"
    ).option(
        None,
        "--detz_offset",
        "-d",
        type="float",
        default=None,
        dest="detz_offset",
        help=
        "offset (in mm) from sample interaction region to back of CSPAD detector rail (CXI), or detector distance (XPP)"
    ).option(
        None,
        "--outputdir",
        "-o",
        type="string",
        default=".",
        dest="outputdir",
        metavar="PATH",
        help="Optional path to output directory for output files"
    ).option(
        None,
        "--averagebase",
        "-A",
        type="string",
        default="{experiment!l}_avg-r{run:04d}",
        dest="averagepath",
        metavar="PATH",
        help=
        "Path to output average image without extension. String substitution allowed"
    ).option(
        None,
        "--stddevbase",
        "-S",
        type="string",
        default="{experiment!l}_stddev-r{run:04d}",
        dest="stddevpath",
        metavar="PATH",
        help=
        "Path to output standard deviation image without extension. String substitution allowed"
    ).option(
        None,
        "--maxbase",
        "-M",
        type="string",
        default="{experiment!l}_max-r{run:04d}",
        dest="maxpath",
        metavar="PATH",
        help=
        "Path to output maximum projection image without extension. String substitution allowed"
    ).option(
        None,
        "--numevents",
        "-n",
        type="int",
        default=None,
        dest="numevents",
        help="Maximum number of events to process. Default: all"
    ).option(
        None,
        "--skipevents",
        "-s",
        type="int",
        default=0,
        dest="skipevents",
        help="Number of events in the beginning of the run to skip. Default: 0"
    ).option(
        None,
        "--verbose",
        "-v",
        action="store_true",
        default=False,
        dest="verbose",
        help="Print more information about progress"
    ).option(
        None,
        "--pickle-optical-metrology",
        "-m",
        action="store_true",
        default=False,
        dest="pickle_optical_metrology",
        help=
        "If writing pickle files, use the optical metrology in the experiment's calib directory"
    ).option(
        None,
        "--bin_size",
        "-b",
        type="int",
        default=None,
        dest="bin_size",
        help="Rayonix detector bin size"
    ).option(
        None,
        "--override_beam_x",
        "-X",
        type="float",
        default=None,
        dest="override_beam_x",
        help="Rayonix detector beam center x coordinate"
    ).option(
        None,
        "--override_beam_y",
        "-Y",
        type="float",
        default=None,
        dest="override_beam_y",
        help="Rayonix detector beam center y coordinate"
    ).option(
        None,
        "--calib_dir",
        "-C",
        type="string",
        default=None,
        dest="calib_dir",
        metavar="PATH",
        help="calibration directory"
    ).option(
        None,
        "--pickle_calib_dir",
        "-P",
        type="string",
        default=None,
        dest="pickle_calib_dir",
        metavar="PATH",
        help=
        "pickle calibration directory specification. Replaces --calib_dir functionality."
    ).option(
        None,
        "--xtc_dir",
        "-D",
        type="string",
        default=None,
        dest="xtc_dir",
        metavar="PATH",
        help="xtc stream directory"
    ).option(
        None,
        "--use_ffb",
        "-f",
        action="store_true",
        default=False,
        dest="use_ffb",
        help=
        "Use the fast feedback filesystem at LCLS. Only for the active experiment!"
    ).option(
        None,
        "--gain_mask_value",
        "-g",
        type="float",
        default=None,
        dest="gain_mask_value",
        help=
        "Ratio between low and high gain pixels, if CSPAD in mixed-gain mode. Only used in CBF averaging mode."
    ).option(
        None,
        "--min",
        None,
        action="store_true",
        default=False,
        dest="do_minimum_projection",
        help="Output a minimum projection"
    ).option(
        None,
        "--minpath",
        None,
        type="string",
        default="{experiment!l}_min-r{run:04d}",
        dest="minpath",
        metavar="PATH",
        help=
        "Path to output minimum image without extension. String substitution allowed"
    )).process(args=argv)


    if len(command_line.args) > 0 or \
        command_line.options.as_pickle is None or \
        command_line.options.experiment is None or \
        command_line.options.run is None or \
        command_line.options.address is None or \
        command_line.options.detz_offset is None or \
        command_line.options.averagepath is None or \
        command_line.options.stddevpath is None or \
        command_line.options.maxpath is None or \
        command_line.options.pickle_optical_metrology is None:
        command_line.parser.show_help()
        return

    # set this to sys.maxint to analyze all events
    if command_line.options.numevents is None:
        maxevents = sys.maxint
    else:
        maxevents = command_line.options.numevents

    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()
    size = comm.Get_size()

    if command_line.options.config is not None:
        psana.setConfigFile(command_line.options.config)
    dataset_name = "exp=%s:run=%d:idx" % (command_line.options.experiment,
                                          command_line.options.run)
    if command_line.options.xtc_dir is not None:
        if command_line.options.use_ffb:
            raise Sorry("Cannot specify the xtc_dir and use SLAC's ffb system")
        dataset_name += ":dir=%s" % command_line.options.xtc_dir
    elif command_line.options.use_ffb:
        # as ffb is only at SLAC, ok to hardcode /reg/d here
        dataset_name += ":dir=/reg/d/ffb/%s/%s/xtc" % (
            command_line.options.experiment[0:3],
            command_line.options.experiment)
    if command_line.options.calib_dir is not None:
        psana.setOption('psana.calib-dir', command_line.options.calib_dir)
    ds = psana.DataSource(dataset_name)
    address = command_line.options.address
    src = psana.Source('DetInfo(%s)' % address)
    nevent = np.array([0.])

    if command_line.options.background_pickle is not None:
        background = easy_pickle.load(
            command_line.options.background_pickle)['DATA'].as_numpy_array()

    for run in ds.runs():
        runnumber = run.run()

        if not command_line.options.as_pickle:
            psana_det = psana.Detector(address, ds.env())

        # list of all events
        if command_line.options.skipevents > 0:
            print "Skipping first %d events" % command_line.options.skipevents
        elif "Rayonix" in command_line.options.address:
            print "Skipping first image in the Rayonix detector"  # Shuttering issue
            command_line.options.skipevents = 1

        times = run.times()[command_line.options.skipevents:]
        nevents = min(len(times), maxevents)
        # chop the list into pieces, depending on rank.  This assigns each process
        # events such that the get every Nth event where N is the number of processes
        mytimes = [times[i] for i in range(nevents) if (i + rank) % size == 0]
        for i in range(len(mytimes)):
            if i % 10 == 0:
                print 'Rank', rank, 'processing event', rank * len(
                    mytimes) + i, ', ', i, 'of', len(mytimes)
            evt = run.event(mytimes[i])
            #print "Event #",rank*mylength+i," has id:",evt.get(EventId)
            if 'Rayonix' in command_line.options.address or 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address:
                data = evt.get(psana.Camera.FrameV1, src)
                if data is None:
                    print "No data"
                    continue
                data = data.data16().astype(np.float64)
            elif command_line.options.as_pickle:
                data = evt.get(psana.ndarray_float64_3, src, 'image0')
            else:
                # get numpy array, 32x185x388
                from xfel.cftbx.detector.cspad_cbf_tbx import get_psana_corrected_data
                if command_line.options.raw_data:
                    data = get_psana_corrected_data(psana_det,
                                                    evt,
                                                    use_default=False,
                                                    dark=False,
                                                    common_mode=None,
                                                    apply_gain_mask=False,
                                                    per_pixel_gain=False)
                else:
                    if command_line.options.gain_mask_value is None:
                        data = get_psana_corrected_data(psana_det,
                                                        evt,
                                                        use_default=True)
                    else:
                        data = get_psana_corrected_data(
                            psana_det,
                            evt,
                            use_default=False,
                            dark=True,
                            common_mode=None,
                            apply_gain_mask=True,
                            gain_mask_value=command_line.options.
                            gain_mask_value,
                            per_pixel_gain=False)

            if data is None:
                print "No data"
                continue

            if command_line.options.background_pickle is not None:
                data -= background

            if 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address:
                distance = np.array([0.0])
                wavelength = np.array([1.0])
            else:
                d = cspad_tbx.env_distance(address, run.env(),
                                           command_line.options.detz_offset)
                if d is None:
                    print "No distance, using distance", command_line.options.detz_offset
                    assert command_line.options.detz_offset is not None
                    if 'distance' not in locals():
                        distance = np.array([command_line.options.detz_offset])
                    else:
                        distance += command_line.options.detz_offset
                else:
                    if 'distance' in locals():
                        distance += d
                    else:
                        distance = np.array([float(d)])

                w = cspad_tbx.evt_wavelength(evt)
                if w is None:
                    print "No wavelength"
                    if 'wavelength' not in locals():
                        wavelength = np.array([1.0])
                else:
                    if 'wavelength' in locals():
                        wavelength += w
                    else:
                        wavelength = np.array([w])

            t = cspad_tbx.evt_time(evt)
            if t is None:
                print "No timestamp, skipping shot"
                continue
            if 'timestamp' in locals():
                timestamp += t[0] + (t[1] / 1000)
            else:
                timestamp = np.array([t[0] + (t[1] / 1000)])

            if 'sum' in locals():
                sum += data
            else:
                sum = np.array(data, copy=True)
            if 'sumsq' in locals():
                sumsq += data * data
            else:
                sumsq = data * data
            if 'maximum' in locals():
                maximum = np.maximum(maximum, data)
            else:
                maximum = np.array(data, copy=True)

            if command_line.options.do_minimum_projection:
                if 'minimum' in locals():
                    minimum = np.minimum(minimum, data)
                else:
                    minimum = np.array(data, copy=True)

            nevent += 1

    #sum the images across mpi cores
    if size > 1:
        print "Synchronizing rank", rank
    totevent = np.zeros(nevent.shape)
    comm.Reduce(nevent, totevent)

    if rank == 0 and totevent[0] == 0:
        raise Sorry("No events found in the run")

    sumall = np.zeros(sum.shape).astype(sum.dtype)
    comm.Reduce(sum, sumall)

    sumsqall = np.zeros(sumsq.shape).astype(sumsq.dtype)
    comm.Reduce(sumsq, sumsqall)

    maxall = np.zeros(maximum.shape).astype(maximum.dtype)
    comm.Reduce(maximum, maxall, op=MPI.MAX)

    if command_line.options.do_minimum_projection:
        minall = np.zeros(maximum.shape).astype(minimum.dtype)
        comm.Reduce(minimum, minall, op=MPI.MIN)

    waveall = np.zeros(wavelength.shape).astype(wavelength.dtype)
    comm.Reduce(wavelength, waveall)

    distall = np.zeros(distance.shape).astype(distance.dtype)
    comm.Reduce(distance, distall)

    timeall = np.zeros(timestamp.shape).astype(timestamp.dtype)
    comm.Reduce(timestamp, timeall)

    if rank == 0:
        if size > 1:
            print "Synchronized"

        # Accumulating floating-point numbers introduces errors,
        # which may cause negative variances.  Since a two-pass
        # approach is unacceptable, the standard deviation is
        # clamped at zero.
        mean = sumall / float(totevent[0])
        variance = (sumsqall / float(totevent[0])) - (mean**2)
        variance[variance < 0] = 0
        stddev = np.sqrt(variance)

        wavelength = waveall[0] / totevent[0]
        distance = distall[0] / totevent[0]
        pixel_size = cspad_tbx.pixel_size
        saturated_value = cspad_tbx.cspad_saturated_value
        timestamp = timeall[0] / totevent[0]
        timestamp = (int(timestamp), timestamp % int(timestamp) * 1000)
        timestamp = cspad_tbx.evt_timestamp(timestamp)

        if command_line.options.as_pickle:
            extension = ".pickle"
        else:
            extension = ".cbf"

        dest_paths = [
            cspad_tbx.pathsubst(command_line.options.averagepath + extension,
                                evt, ds.env()),
            cspad_tbx.pathsubst(command_line.options.stddevpath + extension,
                                evt, ds.env()),
            cspad_tbx.pathsubst(command_line.options.maxpath + extension, evt,
                                ds.env())
        ]
        if command_line.options.do_minimum_projection:
            dest_paths.append(
                cspad_tbx.pathsubst(command_line.options.minpath + extension,
                                    evt, ds.env()))

        dest_paths = [
            os.path.join(command_line.options.outputdir, path)
            for path in dest_paths
        ]
        if 'Rayonix' in command_line.options.address:
            all_data = [mean, stddev, maxall]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)
            from xfel.cxi.cspad_ana import rayonix_tbx
            pixel_size = rayonix_tbx.get_rayonix_pixel_size(
                command_line.options.bin_size)
            beam_center = [
                command_line.options.override_beam_x,
                command_line.options.override_beam_y
            ]
            active_areas = flex.int([0, 0, mean.focus()[1], mean.focus()[0]])
            split_address = cspad_tbx.address_split(address)
            old_style_address = split_address[0] + "-" + split_address[
                1] + "|" + split_address[2] + "-" + split_address[3]
            for data, path in zip(all_data, dest_paths):
                print "Saving", path
                d = cspad_tbx.dpack(
                    active_areas=active_areas,
                    address=old_style_address,
                    beam_center_x=pixel_size * beam_center[0],
                    beam_center_y=pixel_size * beam_center[1],
                    data=flex.double(data),
                    distance=distance,
                    pixel_size=pixel_size,
                    saturated_value=rayonix_tbx.rayonix_saturated_value,
                    timestamp=timestamp,
                    wavelength=wavelength)
                easy_pickle.dump(path, d)
        elif 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address:
            all_data = [mean, stddev, maxall]
            split_address = cspad_tbx.address_split(address)
            old_style_address = split_address[0] + "-" + split_address[
                1] + "|" + split_address[2] + "-" + split_address[3]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)
            for data, path in zip(all_data, dest_paths):
                d = cspad_tbx.dpack(address=old_style_address,
                                    data=flex.double(data),
                                    distance=distance,
                                    pixel_size=0.1,
                                    timestamp=timestamp,
                                    wavelength=wavelength)
                print "Saving", path
                easy_pickle.dump(path, d)
        elif command_line.options.as_pickle:
            split_address = cspad_tbx.address_split(address)
            old_style_address = split_address[0] + "-" + split_address[
                1] + "|" + split_address[2] + "-" + split_address[3]

            xpp = 'xpp' in address.lower()
            if xpp:
                evt_time = cspad_tbx.evt_time(
                    evt)  # tuple of seconds, milliseconds
                timestamp = cspad_tbx.evt_timestamp(
                    evt_time)  # human readable format
                from iotbx.detectors.cspad_detector_formats import detector_format_version, reverse_timestamp
                from xfel.cxi.cspad_ana.cspad_tbx import xpp_active_areas
                version_lookup = detector_format_version(
                    old_style_address,
                    reverse_timestamp(timestamp)[0])
                assert version_lookup is not None
                active_areas = xpp_active_areas[version_lookup]['active_areas']
                beam_center = [1765 // 2, 1765 // 2]
            else:
                if command_line.options.pickle_calib_dir is not None:
                    metro_path = command_line.options.pickle_calib_dir
                elif command_line.options.pickle_optical_metrology:
                    from xfel.cftbx.detector.cspad_cbf_tbx import get_calib_file_path
                    metro_path = get_calib_file_path(run.env(), address, run)
                else:
                    metro_path = libtbx.env.find_in_repositories(
                        "xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0")
                sections = parse_calib.calib2sections(metro_path)
                beam_center, active_areas = cspad_tbx.cbcaa(
                    cspad_tbx.getConfig(address, ds.env()), sections)

            class fake_quad(object):
                def __init__(self, q, d):
                    self.q = q
                    self.d = d

                def quad(self):
                    return self.q

                def data(self):
                    return self.d

            if xpp:
                quads = [
                    fake_quad(i, mean[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                mean = cspad_tbx.image_xpp(old_style_address,
                                           None,
                                           ds.env(),
                                           active_areas,
                                           quads=quads)
                mean = flex.double(mean.astype(np.float64))

                quads = [
                    fake_quad(i, stddev[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                stddev = cspad_tbx.image_xpp(old_style_address,
                                             None,
                                             ds.env(),
                                             active_areas,
                                             quads=quads)
                stddev = flex.double(stddev.astype(np.float64))

                quads = [
                    fake_quad(i, maxall[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                maxall = cspad_tbx.image_xpp(old_style_address,
                                             None,
                                             ds.env(),
                                             active_areas,
                                             quads=quads)
                maxall = flex.double(maxall.astype(np.float64))

                if command_line.options.do_minimum_projection:
                    quads = [
                        fake_quad(i, minall[i * 8:(i + 1) * 8, :, :])
                        for i in range(4)
                    ]
                    minall = cspad_tbx.image_xpp(old_style_address,
                                                 None,
                                                 ds.env(),
                                                 active_areas,
                                                 quads=quads)
                    minall = flex.double(minall.astype(np.float64))
            else:
                quads = [
                    fake_quad(i, mean[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                mean = cspad_tbx.CsPadDetector(address,
                                               evt,
                                               ds.env(),
                                               sections,
                                               quads=quads)
                mean = flex.double(mean.astype(np.float64))

                quads = [
                    fake_quad(i, stddev[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                stddev = cspad_tbx.CsPadDetector(address,
                                                 evt,
                                                 ds.env(),
                                                 sections,
                                                 quads=quads)
                stddev = flex.double(stddev.astype(np.float64))

                quads = [
                    fake_quad(i, maxall[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                maxall = cspad_tbx.CsPadDetector(address,
                                                 evt,
                                                 ds.env(),
                                                 sections,
                                                 quads=quads)
                maxall = flex.double(maxall.astype(np.float64))

                if command_line.options.do_minimum_projection:
                    quads = [
                        fake_quad(i, minall[i * 8:(i + 1) * 8, :, :])
                        for i in range(4)
                    ]
                    minall = cspad_tbx.CsPadDetector(address,
                                                     evt,
                                                     ds.env(),
                                                     sections,
                                                     quads=quads)
                    minall = flex.double(minall.astype(np.float64))

            all_data = [mean, stddev, maxall]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)

            for data, path in zip(all_data, dest_paths):
                print "Saving", path

                d = cspad_tbx.dpack(active_areas=active_areas,
                                    address=old_style_address,
                                    beam_center_x=pixel_size * beam_center[0],
                                    beam_center_y=pixel_size * beam_center[1],
                                    data=data,
                                    distance=distance,
                                    pixel_size=pixel_size,
                                    saturated_value=saturated_value,
                                    timestamp=timestamp,
                                    wavelength=wavelength)

                easy_pickle.dump(path, d)
        else:
            # load a header only cspad cbf from the slac metrology
            from xfel.cftbx.detector import cspad_cbf_tbx
            import pycbf
            base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(
                run, address)
            if base_dxtbx is None:
                raise Sorry("Couldn't load calibration file for run %d" %
                            run.run())

            all_data = [mean, stddev, maxall]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)

            for data, path in zip(all_data, dest_paths):
                print "Saving", path
                cspad_img = cspad_cbf_tbx.format_object_from_data(
                    base_dxtbx,
                    data,
                    distance,
                    wavelength,
                    timestamp,
                    address,
                    round_to_int=False)
                cspad_img._cbf_handle.write_widefile(path, pycbf.CBF,\
                  pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)
Esempio n. 7
0
    def setupExperiment(self):
        if self.parent.args.v >= 1: print "Doing setupExperiment"
        if self.hasExpRunInfo():
            self.getUsername()
            # Set up psocake directory in scratch
            if self.parent.args.outDir is None:
                self.parent.rootDir = '/reg/d/psdm/' + self.parent.experimentName[:
                                                                                  3] + '/' + self.parent.experimentName
                self.parent.elogDir = self.parent.rootDir + '/scratch/psocake'
                self.parent.psocakeDir = self.parent.rootDir + '/scratch/' + self.username + '/psocake'
            else:
                self.parent.rootDir = self.parent.args.outDir
                self.parent.elogDir = self.parent.rootDir + '/psocake'
                self.parent.psocakeDir = self.parent.rootDir + '/' + self.username + '/psocake'
            self.parent.psocakeRunDir = self.parent.psocakeDir + '/r' + str(
                self.parent.runNumber).zfill(4)

            if self.parent.args.v >= 1:
                print "psocakeDir: ", self.parent.psocakeDir

            # Update peak finder outdir and run number
            self.parent.pk.p3.param(
                self.parent.pk.hitParam_grp,
                self.parent.pk.hitParam_outDir_str).setValue(
                    self.parent.psocakeDir)
            self.parent.pk.p3.param(self.parent.pk.hitParam_grp,
                                    self.parent.pk.hitParam_runs_str).setValue(
                                        self.parent.runNumber)
            # Update powder outdir and run number
            self.parent.mk.p6.param(self.parent.mk.powder_grp,
                                    self.parent.mk.powder_outDir_str).setValue(
                                        self.parent.psocakeDir)
            self.parent.mk.p6.param(self.parent.mk.powder_grp,
                                    self.parent.mk.powder_runs_str).setValue(
                                        self.parent.runNumber)
            # Update hit finding outdir, run number
            self.parent.hf.p8.param(
                self.parent.hf.spiParam_grp,
                self.parent.hf.spiParam_outDir_str).setValue(
                    self.parent.psocakeDir)
            self.parent.hf.p8.param(self.parent.hf.spiParam_grp,
                                    self.parent.hf.spiParam_runs_str).setValue(
                                        self.parent.runNumber)
            # Update indexing outdir, run number
            self.parent.index.p9.param(self.parent.index.launch_grp,
                                       self.parent.index.outDir_str).setValue(
                                           self.parent.psocakeDir)
            self.parent.index.p9.param(self.parent.index.launch_grp,
                                       self.parent.index.runs_str).setValue(
                                           self.parent.runNumber)
            # Update quantifier filename
            fname = self.parent.psocakeRunDir + '/' + self.parent.experimentName + '_' + str(
                self.parent.runNumber).zfill(4) + '.cxi'
            if self.parent.args.mode == 'sfx':
                dsetname = '/entry_1/result_1/nPeaksAll'
            elif self.parent.args.mode == 'spi':
                dsetname = '/entry_1/result_1/nHitsAll'
            else:
                dsetname = '/entry_1/result_1/'
            self.parent.small.pSmall.param(
                self.parent.small.quantifier_grp,
                self.parent.small.quantifier_filename_str).setValue(fname)
            self.parent.small.pSmall.param(
                self.parent.small.quantifier_grp,
                self.parent.small.quantifier_dataset_str).setValue(dsetname)
            self.setupPsocake()

            # Update hidden CrystFEL files
            self.updateHiddenCrystfelFiles('lcls')

            if self.parent.args.localCalib:
                if self.parent.args.v >= 1: print "Using local calib directory"
                psana.setOption('psana.calib-dir', './calib')

            try:
                self.ds = psana.DataSource('exp=' +
                                           str(self.parent.experimentName) +
                                           ':run=' +
                                           str(self.parent.runNumber) + ':idx')
            except:
                print "############# No such datasource exists ###############"
            self.run = self.ds.runs().next()
            self.times = self.run.times()
            self.eventTotal = len(self.times)
            self.parent.stack.spinBox.setMaximum(self.eventTotal -
                                                 self.parent.stack.stackSize)
            self.p.param(self.exp_grp, self.exp_evt_str).setLimits(
                (0, self.eventTotal - 1))
            self.p.param(self.exp_grp, self.exp_evt_str,
                         self.exp_numEvents_str).setValue(self.eventTotal)
            self.env = self.ds.env()

            if self.parent.detInfoList is None:
                self.parent.evt = self.run.event(self.times[-1])
                myAreaDetectors = []
                self.parent.detnames = psana.DetNames()
                for k in self.parent.detnames:
                    try:
                        if Detector.PyDetector.dettype(
                                str(k[0]), self.env
                        ) == Detector.AreaDetector.AreaDetector:
                            myAreaDetectors.append(k)
                    except ValueError:
                        continue
                self.parent.detInfoList = list(set(myAreaDetectors))
                print "#######################################"
                print "# Available area detectors: "
                for k in self.parent.detInfoList:
                    print "#", k
                print "#######################################"

            # Launch e-log crawler
            if self.logger and self.crawlerRunning == False:
                if self.parent.args.v >= 1: print "Launching crawler"
                self.launchCrawler()
                self.crawlerRunning = True

        if self.hasExpRunDetInfo():
            self.parent.det = psana.Detector(str(self.parent.detInfo),
                                             self.env)
            self.parent.det.do_reshape_2d_to_3d(flag=True)
            self.parent.detAlias = self.getDetectorAlias(
                str(self.parent.detInfo))
            self.parent.epics = self.ds.env().epicsStore()
            self.setClen()

            # detector distance
            self.updateDetectorDistance('lcls')
            # pixel size
            self.updatePixelSize('lcls')
            # photon energy
            self.updatePhotonEnergy('lcls')

            # Some detectors do not read out at 120 Hz. So need to loop over events to guarantee a valid detector image.
            if self.parent.evt is None:
                self.parent.evt = self.run.event(self.times[0])
            self.detGuaranteed = self.parent.det.calib(self.parent.evt)
            if self.detGuaranteed is None:  # image isn't present for this event
                print "No image in this event. Searching for an event..."
                for i in np.arange(len(self.times)):
                    evt = self.run.event(self.times[i])
                    self.detGuaranteed = self.parent.det.calib(evt)
                    if self.detGuaranteed is not None:
                        print "Found an event with image: ", i
                        break

            # Setup pixel indices
            if self.detGuaranteed is not None:
                self.parent.pixelInd = np.reshape(
                    np.arange(self.detGuaranteed.size) + 1,
                    self.detGuaranteed.shape)
                self.parent.pixelIndAssem = self.parent.img.getAssembledImage(
                    'lcls', self.parent.pixelInd)
                self.parent.pixelIndAssem -= 1  # First pixel is 0
                # Get detector shape
                self.detGuaranteedData = self.parent.det.image(
                    self.parent.evt, self.detGuaranteed)

            # Write a temporary geom file
            self.parent.geom.deployCrystfelGeometry('lcls')
            self.parent.geom.writeCrystfelGeom('lcls')

            self.parent.img.setupRadialBackground()
            self.parent.img.updatePolarizationFactor()

        if self.parent.args.v >= 1: print "Done setupExperiment"
Esempio n. 8
0
    def __init__(self, state):
        self.timestamps = None
        self.library = 'psana'
        config_file = None
        if('LCLS/PsanaConf' in state):
            config_file = os.path.abspath(state['LCLS/PsanaConf'])
        elif('LCLS' in state and 'PsanaConf' in state['LCLS']):
            config_file = os.path.abspath(state['LCLS']['PsanaConf'])
        if(config_file is not None):
            if(not os.path.isfile(config_file)):
                raise RuntimeError("Could not find [LCLS][PsanaConf]: %s" %
                                   (config_file))
            logging.info("Info: Found configuration file %s.", config_file)
            psana.setConfigFile(config_file)

        if 'LCLS/CalibDir' in state:
            calibdir = state['LCLS/CalibDir']
            logging.info("Setting calib-dir to %s" % calibdir)
            psana.setOption('psana.calib-dir', calibdir)
        elif('LCLS' in state and 'CalibDir' in state['LCLS']):
            calibdir = state['LCLS']['CalibDir']
            logging.info("Setting calib-dir to %s" % calibdir)
            psana.setOption('psana.calib-dir', calibdir)

        if('LCLS/DataSource' in state):
            dsrc = state['LCLS/DataSource']
        elif('LCLS' in state and 'DataSource' in state['LCLS']):
            dsrc = state['LCLS']['DataSource']
        else:
            raise ValueError("You need to set the '[LCLS][DataSource]'"
                             " in the configuration")
        
        cmdline_args = parse_cmdline_args()
        self.N = cmdline_args.lcls_number_of_frames          
        if cmdline_args.lcls_run_number is not None:
            dsrc += ":run=%i" % cmdline_args.lcls_run_number

        # Cache times of events that shall be extracted from XTC (does not work for stream)
        self.event_slice = slice(0,None,1)
        if 'times' in state or 'fiducials' in state:
            if not ('times' in state and 'fiducials' in state):
                raise ValueError("Times or fiducials missing in state."
                                 " Extraction of selected events expects both event identifiers")                
            if dsrc[:len('exp=')] != 'exp=':
                raise ValueError("Extraction of events with given times and fiducials"
                                 " only works when reading from XTC with index files")
            if dsrc[-len(':idx'):] != ':idx':
                dsrc += ':idx'
            self.times = state['times']
            self.fiducials = state['fiducials']
            self.i = 0
            self.data_source = psana.DataSource(dsrc)
            self.run = self.data_source.runs().next()                        
        elif 'indexing' in state:
            if dsrc[-len(':idx'):] != ':idx':
                dsrc += ':idx'
            if 'index_offset' in state:
                self.i = state['index_offset'] / ipc.mpi.nr_workers()
            else:
                self.i = 0
            self.data_source = psana.DataSource(dsrc)
            self.run = self.data_source.runs().next()
            self.timestamps = self.run.times()
            if self.N is not None:
                self.timestamps = self.timestamps[:self.N]
            self.timestamps = self.timestamps[ipc.mpi.slave_rank()::ipc.mpi.nr_workers()]
        else:
            self.times = None
            self.fiducials = None
            self.i = 0
            if not dsrc.startswith('shmem='):
                self.event_slice = slice(ipc.mpi.slave_rank(), None, ipc.mpi.nr_workers())
            self.data_source = psana.DataSource(dsrc)
            self.run = None
            
        # Define how to translate between LCLS types and Hummingbird ones
        self._n2c = {}
        self._n2c[psana.Bld.BldDataFEEGasDetEnergy] = 'pulseEnergies'
        self._n2c[psana.Bld.BldDataFEEGasDetEnergyV1] = 'pulseEnergies'
        self._n2c[psana.Lusi.IpmFexV1] = 'pulseEnergies'
        self._n2c[psana.Camera.FrameV1] = 'camera'
        # Guard against old(er) psana versions
        try:
            self._n2c[psana.Bld.BldDataEBeamV1] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV2] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV3] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV4] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV5] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV6] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV7] = 'photonEnergies'
        except AttributeError:
            pass
        # CXI (CsPad)
        self._n2c[psana.CsPad.DataV2] = 'photonPixelDetectors'
        self._n2c[psana.CsPad2x2.ElementV1] = 'photonPixelDetectors'
        # CXI (OffAxis Cam)
        #self._n2c[psana.Camera.FrameV1] = 'photonPixelDetectors'
        # AMO (pnCCD)
        self._n2c[psana.PNCCD.FullFrameV1] = 'photonPixelDetectors'
        self._n2c[psana.PNCCD.FramesV1] = 'photonPixelDetectors'
        # --
        self._n2c[psana.Acqiris.DataDescV1] = 'ionTOFs'
        self._n2c[psana.EventId] = 'eventID'
        # Guard against old(er) psana versions
        try:
            self._n2c[psana.EvrData.DataV3] = 'eventCodes'
            self._n2c[psana.EvrData.DataV4] = 'eventCodes'
        except AttributeError:
            pass

        # Calculate the inverse mapping
        self._c2n = {}
        for k, v in self._n2c.iteritems():
            self._c2n[v] = self._c2n.get(v, [])
            self._c2n[v].append(k)

        # Define how to translate between LCLS sources and Hummingbird ones
        self._s2c = {}
        # CXI (OnAxis Cam)
        self._s2c['DetInfo(CxiEndstation.0:Opal4000.1)'] = 'Sc2Questar'
        # CXI (OffAxis Cam)
        self._s2c['DetInfo(CxiEndstation.0.Opal11000.0)'] = 'Sc2Offaxis'
        # CXI (CsPad)
        self._s2c['DetInfo(CxiDs1.0:Cspad.0)'] = 'CsPad Ds1'
        self._s2c['DetInfo(CxiDsd.0:Cspad.0)'] = 'CsPad Dsd'
        self._s2c['DetInfo(CxiDs2.0:Cspad.0)'] = 'CsPad Ds2'
        self._s2c['DetInfo(CxiDg3.0:Cspad2x2.0)'] = 'CsPad Dg3'
        self._s2c['DetInfo(CxiDg2.0:Cspad2x2.0)'] = 'CsPad Dg2'
        # AMO (pnCCD)
        self._s2c['DetInfo(Camp.0:pnCCD.1)'] = 'pnccdBack'
        self._s2c['DetInfo(Camp.0:pnCCD.0)'] = 'pnccdFront'
        # ToF detector
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.1)'] = 'Acqiris 1'
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.2)'] = 'Acqiris 2'
        # AMO (Acqiris)
        self._s2c['DetInfo(AmoETOF.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(AmoETOF.0:Acqiris.1)'] = 'Acqiris 1'
        self._s2c['DetInfo(AmoITOF.0:Acqiris.0)'] = 'Acqiris 2'
        self._s2c['DetInfo(AmoITOF.0:Acqiris.1)'] = 'Acqiris 3'

        # MCP Camera
        self._s2c['DetInfo(AmoEndstation.0:Opal1000.1)'] = 'OPAL1'
        # CXI (Acqiris)
        self._s2c['DetInfo(CxiEndstation.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(CxiEndstation.0:Acqiris.1)'] = 'Acqiris 1'
Esempio n. 9
0
    def event_generator(
        self,
        node_rank: int,
        node_pool_size: int,
    ) -> Generator[Dict[str, Any], None, None]:
        """
        Retrieves psana events.

        This method overrides the corresponding method of the base class: please also
        refer to the documentation of that class for more information.

        When OM runs in shared memory mode (the usual way to retrieve real-time data at
        the LCLS facility), each processing node retrieves data from a shared memory
        server operated by the facility. The memory server must be running on the same
        machine as the processing node.

        When instead OM uses the psana framework to read offline data, this Data Event
        Handler distributes the data events as evenly as possible across all the
        processing nodes. Each node ideally retrieves the same number of events from
        psana. Only the last node might retrieve fewer events, depending on how evenly
        the total number can be split.

        Each retrieved psana event contains a single detector frame, along with all the
        data whose timestamp matches the timestamp of the frame. This is also true for
        data that is is updated at a slower rate than the frame itself. For this kind
        of  data, the last reported value at the time the frame is collected is
        associated with it.

        This generator function yields a dictionary storing the data for the current
        event.

        Arguments:

            node_rank: The rank, in the OM pool, of the processing node calling the
                function.

            node_pool_size: The total number of nodes in the OM pool, including all the
                processing nodes and the collecting node.
        """
        # TODO: Check types of Generator
        # Detects if data is being read from an online or offline source.
        if "shmem" in self._source:
            offline: bool = False
        else:
            offline = True
        if offline and not self._source[-4:] == ":idx":
            self._source += ":idx"

        # If the psana calibration directory is provided in the configuration file, it
        # is added as an option to psana before the DataSource is set.
        psana_calib_dir: str = self._monitor_params.get_param(
            group="data_retrieval_layer",
            parameter="psana_calibration_directory",
            parameter_type=str,
        )
        if psana_calib_dir is not None:
            psana.setOption("psana.calib-dir", psana_calib_dir)
        else:
            print("OM Warning: Calibration directory not provided or not found.")

        psana_source = psana.DataSource(self._source)

        data_event: Dict[str, Dict[str, Any]] = {}
        data_event["data_extraction_funcs"] = self._required_data_extraction_funcs
        data_event["additional_info"] = {}
        data_event["additional_info"].update(self._event_info_to_append)

        # Calls all the required psana detector interface initialization functions and
        # stores the returned objects in a dictionary.
        data_event["additional_info"]["psana_detector_interface"] = {}
        f_name: str
        func: Callable[[parameters.MonitorParams], Any]
        for f_name, func in self._required_psana_detector_init_funcs.items():
            data_event["additional_info"]["psana_detector_interface"][
                f_name.split("_init")[0]
            ] = func(self._monitor_params)

        # Initializes the psana event source and starts retrieving events.
        if offline:
            psana_events: Any = _psana_offline_event_generator(
                psana_source=psana_source,
                node_rank=node_rank,
                mpi_pool_size=node_pool_size,
            )
        else:
            psana_events = psana_source.events()

        psana_event: Any
        for psana_event in psana_events:
            data_event["data"] = psana_event

            # Recovers the timestamp from the psana event (as seconds from the Epoch)
            # and stores it in the event dictionary to be retrieved later.
            timestamp_epoch_format: Any = psana_event.get(psana.EventId).time()
            data_event["additional_info"]["timestamp"] = numpy.float64(
                str(timestamp_epoch_format[0]) + "." + str(timestamp_epoch_format[1])
            )

            yield data_event
Esempio n. 10
0
def process_run_mpi(ds_string, calib_dir, out_filename):
    ds = psana.DataSource(ds_string)
    proc = XTCavProcessor()
    proc.set_data_source(ds)
    psana.setOption('psana.calib-dir', calib_dir)

    fit_delays = []
    fit_fwhms = []
    fit_errors = []
    moment_delays = []
    moment_fwhms = []
    agreement = []

    n_good = 0
    for idx, evt in enumerate(ds.events()):
        if idx % size != rank:
            continue
        ok = proc.set_event(evt)
        if not ok:
            continue
        ok = proc.process(agr_thresh=0., verbose=False, force_split=True)
        if not ok:
            continue
        fit_delays.append(proc.results['fit_delay'])
        fit_fwhms.append(proc.results['fit_fwhms'])
        fit_errors.append(proc.results['fit_errors'])
        moment_delays.append(proc.results['moment_delay'])
        moment_fwhms.append(proc.results['moment_fwhms'])
        agreement.append(proc.results['retr_agreement'])

        n_good += 1
        if n_good % 100 == 0:
            print('Processed {0} events with {1} successes in rank {2}'.format(
                idx, n_good, rank))

    fit_delays = np.array(fit_delays)
    fit_fwhms = np.array(fit_fwhms)
    fit_errors = np.array(fit_errors)
    moment_delays = np.array(moment_delays)
    moment_fwhms = np.array(moment_fwhms)
    agreement = np.array(agreement)

    fit_delays = comm.gather(fit_delays, root=0)
    fit_fwhms = comm.gather(fit_fwhms, root=0)
    fit_errors = comm.gather(fit_errors, root=0)
    moment_delays = comm.gather(moment_delays, root=0)
    moment_fwhms = comm.gather(moment_fwhms, root=0)
    agreement = comm.gather(agreement, root=0)

    if rank == 0:
        fit_delays = np.concatenate(fit_delays)
        fit_fwhms = np.concatenate(fit_fwhms)
        fit_errors = np.concatenate(fit_errors)
        moment_delays = np.concatenate(moment_delays)
        moment_fwhms = np.concatenate(moment_fwhms)
        agreement = np.concatenate(agreement)

        d = dict(fit_delays=fit_delays,
                 fit_fwhms=fit_fwhms,
                 fit_erors=fit_errors,
                 moment_delays=moment_delays,
                 moment_fwhms=moment_fwhms,
                 agreement=agreement)
        with open(out_filename, 'wb') as file:
            pickle.dump(d, file)
    MPI.Finalize()
Esempio n. 11
0
def allow_corrupt_epics():
    psana.setOption('psana.allow-corrupt-epics', True)
Esempio n. 12
0
# these two lines for example purposes only, to allow user to write
# calibration information to local directory called "calib"
# should be deleted for real analysis.
import psana
psana.setOption('psana.calib-dir', 'calib')

from xtcav.GenerateDarkBackground import *
GDB = GenerateDarkBackground()
GDB.experiment = 'xpptut15'
GDB.runs = '300'
GDB.maxshots = 1000
GDB.SetValidityRange(
    300, 302
)  # delete second run number argument to have the validity range be open-ended ("end")
GDB.Generate()
Esempio n. 13
0
    # cal_file_path = ''.join([jt_dir, cal_file])
    print('Calibration file: {}'.format(cal_file_path))
    with open(cal_file_path) as f:
        cal_results = json.load(f)
else:
    logger.warning('You must run a calibration before starting jet tracking')
    sys.exit()

if sim:
    # Run from offline data
    exp_dir = ''.join(['/cds/data/psdm/', hutch, '/', exp, '/xtc/'])
    dsname = ''.join(['exp=', exp, ':run=', run, ':smd:', 'dir=', exp_dir])
else:
    # Run on shared memeory
    dsname = 'shmem=psana.0:stop=no'
    psana.setOption('psana.calib-dir', calib_dir)

ds = psana.DataSource(dsname)
detector = psana.Detector(det_map['name'])
ipm = (psana.Detector(ipm_name), ipm_det)
if jet_cam_name is not None:
    jet_cam = psana.Detector(jet_cam_name)
else:
    jet_cam = None
evr = psana.Detector(evr_name)
r_mask = get_r_masks(det_map['shape'])

if rank == 0:
    master = MpiMaster(rank, api_port, det_map, pv_map, sim=sim)
    master.start_run()
else:
Esempio n. 14
0
    def start(self, verbose=False):

        if self.role == 'worker':

            req = None
            
            psana.setOption('psana.calib-dir', self.psana_calib_dir)

            self.psana_source = psana.DataSource(self.source)

            if self.offline is False:
                psana_events = self.psana_source.events()
            else:
                def psana_events_generator():
                    for r in self.psana_source.runs():
                        times = r.times()
                        mylength = int(math.ceil(len(times) / float(self.mpi_size-1)))
                        mytimes = times[(self.mpi_rank-1) * mylength: self.mpi_rank * mylength]
                        for mt in mytimes:
                            yield r.event(mt)
                psana_events = psana_events_generator()

            event = {'monitor_params': self.monitor_params}

            det = psana.Detector(self.detector_name)
            det_dist = psana.Detector(self.detector_dist_epics_pv)

            # Loop over events and process
            for evt in psana_events:

                if evt is None:
                    continue

                # Reject events above the rejection threshold
                event_id = str(evt.get(psana.EventId))
                timestring = event_id.split('time=')[1].split(',')[0]
                timestamp = time.strptime(timestring[:-6], '%Y-%m-%d %H:%M:%S.%f')
                timestamp = datetime.datetime.fromtimestamp(time.mktime(timestamp))
                timenow = datetime.datetime.now()

                if (timenow - timestamp).total_seconds() > self.event_rejection_threshold:
                    continue

                self.event_timestamp = timestamp

                # Check if a shutdown message is coming from the server
                if mpi4py.MPI.COMM_WORLD.Iprobe(source=0, tag=self.DIETAG):
                    self.shutdown('Shutting down RANK: {0}.'.format(self.mpi_rank))

                event['evt'] = evt
                event['det'] = det
                event['det_dist'] = det_dist

                self.extract_data(event, self)

                if self.raw_data is None:
                    continue

                result = self.map()

                # send the mapped event data to the master process
                if req:
                    req.Wait()  # be sure we're not still sending something
                req = mpi4py.MPI.COMM_WORLD.isend(result, dest=0, tag=0)

            # When all events have been processed, send the master a
            # dictionary with an 'end' flag and die
            end_dict = {'end': True}
            if req:
                req.Wait()  # be sure we're not still sending something
            mpi4py.MPI.COMM_WORLD.isend((end_dict, self.mpi_rank), dest=0, tag=0)
            mpi4py.MPI.Finalize()
            sys.exit(0)

        # The following is executed on the master
        elif self.role == 'master':

            if verbose:
                print ('Starting master.')

            # Loops continuously waiting for processed data from workers
            while True:

                try:

                    buffer_data = mpi4py.MPI.COMM_WORLD.recv(
                        source=mpi4py.MPI.ANY_SOURCE,
                        tag=0)
                    if 'end' in buffer_data[0].keys():
                        print ('Finalizing {0}'.format(buffer_data[1]))
                        self.num_nomore += 1
                        if self.num_nomore == self.mpi_size - 1:
                            print('All workers have run out of events.')
                            print('Shutting down.')
                            self.end_processing()
                            mpi4py.MPI.Finalize()
                            sys.exit(0)
                        continue

                    self.reduce(buffer_data)
                    self.num_reduced_events += 1

                except KeyboardInterrupt as e:
                    print ('Recieved keyboard sigterm...')
                    print (str(e))
                    print ('shutting down MPI.')
                    self.shutdown()
                    print ('---> execution finished.')
                    sys.exit(0)

        return
Esempio n. 15
0
assert os.path.isdir(args.outdir)

from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
assert size>=2, 'Require at least two mpi ranks'
numslaves = size-1

if rank == 0:
    psana_version = subprocess.check_output(["ls","-lart","/reg/g/psdm/sw/releases/ana-current"]).split('-> ')[-1].split('\n')[0]
    svn_version = subprocess.check_output("svnversion")
    print("psana,svn versions: ", psana_version, svn_version)
    print("Running litPixels: ", args.exprun)

if args.localCalib: psana.setOption('psana.calib-dir','./calib')

def getAveragePhotonEnergy():
    """
    Get average photon energy of this experiment in keV
    """
    times = run.times()
    eventTotal = len(times)
    np.random.seed(2016)
    randomEvents = np.random.permutation(eventTotal)
    numSample = 100
    if numSample > eventTotal:
        numSample = eventTotal
    photonEnergySample = np.zeros(numSample)
    ebeamDet = psana.Detector('EBeam')
    epics = ds.env().epicsStore()
Esempio n. 16
0
#needs to be run on daq-sxr-mon06
#data source needs to be changed to shared memory by uncommenting DataSource lines below.

import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import psana
import time
from scipy.odr import *
print "Needs to be run on daq-sxr-mon06"

#ds = psana.DataSource("exp=sxrlq2715:run=42")				#using file comment out as necessary.

psana.setOption('psana.calib-dir',
                '/reg//d/psdm/sxr/sxrlq2715/calib')  #using shared memory
ds = psana.DataSource("shmem=psana.0:stop=no")  #using shared memory

myEnumeratedEvents = enumerate(ds.events())

#myBackGroundImage = 0

#imagingDetectorObject = psana.Detector("EXS_OPAL")
imagingDetectorObject = psana.Detector("pnccd")
#waveFormDetectorObject = psana.Detector("Acq01")

myAverageImage = 0
Intensity_ROI1 = np.zeros(100)
Intensity_ROI2 = np.zeros(100)

#im = plt.imshow(np.zeros([5,5]), animated=True,clim=(-500,1400))
Esempio n. 17
0
    def __init__(self, state):
        self.timestamps = None
        self.library = 'psana'
        config_file = None
        if('LCLS/PsanaConf' in state):
            config_file = os.path.abspath(state['LCLS/PsanaConf'])
        elif('LCLS' in state and 'PsanaConf' in state['LCLS']):
            config_file = os.path.abspath(state['LCLS']['PsanaConf'])
        if(config_file is not None):
            if(not os.path.isfile(config_file)):
                raise RuntimeError("Could not find [LCLS][PsanaConf]: %s" %
                                   (config_file))
            logging.info("Info: Found configuration file %s.", config_file)
            psana.setConfigFile(config_file)

        if 'LCLS/CalibDir' in state:
            calibdir = state['LCLS/CalibDir']
            logging.info("Setting calib-dir to %s" % calibdir)
            psana.setOption('psana.calib-dir', calibdir)
        elif('LCLS' in state and 'CalibDir' in state['LCLS']):
            calibdir = state['LCLS']['CalibDir']
            logging.info("Setting calib-dir to %s" % calibdir)
            psana.setOption('psana.calib-dir', calibdir)

        if('LCLS/DataSource' in state):
            dsrc = state['LCLS/DataSource']
        elif('LCLS' in state and 'DataSource' in state['LCLS']):
            dsrc = state['LCLS']['DataSource']
        else:
            raise ValueError("You need to set the '[LCLS][DataSource]'"
                             " in the configuration")
        
        cmdline_args = _argparser.parse_args()
        self.N = cmdline_args.lcls_number_of_frames          
        if cmdline_args.lcls_run_number is not None:
            dsrc += ":run=%i" % cmdline_args.lcls_run_number

        # Cache times of events that shall be extracted from XTC (does not work for stream)
        self.event_slice = slice(0,None,1)
        if 'times' in state or 'fiducials' in state:
            if not ('times' in state and 'fiducials' in state):
                raise ValueError("Times or fiducials missing in state."
                                 " Extraction of selected events expects both event identifiers")                
            if dsrc[:len('exp=')] != 'exp=':
                raise ValueError("Extraction of events with given times and fiducials"
                                 " only works when reading from XTC with index files")
            if dsrc[-len(':idx'):] != ':idx':
                dsrc += ':idx'
            self.times = state['times']
            self.fiducials = state['fiducials']
            self.i = 0
            self.data_source = psana.DataSource(dsrc)
            self.run = self.data_source.runs().next()                        
        elif 'indexing' in state:
            if dsrc[-len(':idx'):] != ':idx':
                dsrc += ':idx'
            if 'index_offset' in state:
                self.i = state['index_offset'] / ipc.mpi.nr_event_readers()
            else:
                self.i = 0
            self.data_source = psana.DataSource(dsrc)
            self.run = self.data_source.runs().next()
            self.timestamps = self.run.times()
            if self.N is not None:
                self.timestamps = self.timestamps[:self.N]
            self.timestamps = self.timestamps[ipc.mpi.event_reader_rank()::ipc.mpi.nr_event_readers()]
        else:
            self.times = None
            self.fiducials = None
            self.i = 0
            if not dsrc.startswith('shmem='):
                self.event_slice = slice(ipc.mpi.event_reader_rank(), None, ipc.mpi.nr_event_readers())
            self.data_source = psana.DataSource(dsrc)
            self.run = None
            
        # Define how to translate between LCLS types and Hummingbird ones
        self._n2c = {}
        self._n2c[psana.Bld.BldDataFEEGasDetEnergy] = 'pulseEnergies'
        self._n2c[psana.Bld.BldDataFEEGasDetEnergyV1] = 'pulseEnergies'
        self._n2c[psana.Lusi.IpmFexV1] = 'pulseEnergies'
        self._n2c[psana.Camera.FrameV1] = 'camera'
        # Guard against old(er) psana versions
        try:
            self._n2c[psana.Bld.BldDataEBeamV1] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV2] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV3] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV4] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV5] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV6] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV7] = 'photonEnergies'
        except AttributeError:
            pass
        # CXI (CsPad)
        self._n2c[psana.CsPad.DataV2] = 'photonPixelDetectors'
        self._n2c[psana.CsPad2x2.ElementV1] = 'photonPixelDetectors'
        # CXI (OffAxis Cam)
        #self._n2c[psana.Camera.FrameV1] = 'photonPixelDetectors'
        # AMO (pnCCD)
        self._n2c[psana.PNCCD.FullFrameV1] = 'photonPixelDetectors'
        self._n2c[psana.PNCCD.FramesV1] = 'photonPixelDetectors'
        # --
        self._n2c[psana.Acqiris.DataDescV1] = 'ionTOFs'
        self._n2c[psana.EventId] = 'eventID'
        # Guard against old(er) psana versions
        try:
            self._n2c[psana.EvrData.DataV3] = 'eventCodes'
            self._n2c[psana.EvrData.DataV4] = 'eventCodes'
        except AttributeError:
            pass

        # Calculate the inverse mapping
        self._c2n = {}
        for k, v in self._n2c.iteritems():
            self._c2n[v] = self._c2n.get(v, [])
            self._c2n[v].append(k)

        # Define how to translate between LCLS sources and Hummingbird ones
        self._s2c = {}
        # CXI (OnAxis Cam)
        self._s2c['DetInfo(CxiEndstation.0:Opal4000.1)'] = 'Sc2Questar'
        # CXI (OffAxis Cam)
        self._s2c['DetInfo(CxiEndstation.0.Opal11000.0)'] = 'Sc2Offaxis'
        # CXI (CsPad)
        self._s2c['DetInfo(CxiDs1.0:Cspad.0)'] = 'CsPad Ds1'
        self._s2c['DetInfo(CxiDsd.0:Cspad.0)'] = 'CsPad Dsd'
        self._s2c['DetInfo(CxiDs2.0:Cspad.0)'] = 'CsPad Ds2'
        self._s2c['DetInfo(CxiDg3.0:Cspad2x2.0)'] = 'CsPad Dg3'
        self._s2c['DetInfo(CxiDg2.0:Cspad2x2.0)'] = 'CsPad Dg2'
        # AMO (pnCCD)
        self._s2c['DetInfo(Camp.0:pnCCD.1)'] = 'pnccdBack'
        self._s2c['DetInfo(Camp.0:pnCCD.0)'] = 'pnccdFront'
        # ToF detector
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.1)'] = 'Acqiris 1'
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.2)'] = 'Acqiris 2'
        # AMO (Acqiris)
        self._s2c['DetInfo(AmoETOF.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(AmoETOF.0:Acqiris.1)'] = 'Acqiris 1'
        self._s2c['DetInfo(AmoITOF.0:Acqiris.0)'] = 'Acqiris 2'
        self._s2c['DetInfo(AmoITOF.0:Acqiris.1)'] = 'Acqiris 3'

        # MCP Camera
        self._s2c['DetInfo(AmoEndstation.0:Opal1000.1)'] = 'OPAL1'
        # CXI (Acqiris)
        self._s2c['DetInfo(CxiEndstation.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(CxiEndstation.0:Acqiris.1)'] = 'Acqiris 1'

        self.init_detectors(state)
                    # attempt to pass as an array of ints e.g. '1, 2, 3'
                    try :
                        l = monitor_params[sect][op].split(',')
                        monitor_params[sect][op] = np.array(l, dtype=np.int)
                        continue
                    except :
                        pass

    return monitor_params


if __name__ == '__main__':
    args = parse_cmdline_args()
    
    import psana
    
    # The calib line below will write the calib directory in the current directory
    # For 'real' analysis during the beamtime just delete it and the calib directory
    # for the experiment will be used by default.
    if args.output is not None :
        psana.setOption('psana.calib-dir',args.output)

    #from xtcav.GenerateDarkBackground import *
    from GenerateDarkBackground import GenerateDarkBackground
    GDB=GenerateDarkBackground();
    GDB.experiment=args.experiment
    GDB.runs=args.run
    GDB.maxshots=args.maxshots
    GDB.SetValidityRange(args.run) # delete second run number argument to have the validity range be open-ended ("end")
    GDB.Generate();
Esempio n. 19
0
    def setupExperiment(self):
        if self.parent.args.v >= 1: print "Doing setupExperiment"
        if self.hasExpRunInfo():
            self.getUsername()
            # Set up psocake directory in scratch
            if self.parent.args.outDir is None:
                self.parent.rootDir = '/reg/d/psdm/' + self.parent.experimentName[:3] + '/' + self.parent.experimentName
                self.parent.elogDir = self.parent.rootDir + '/scratch/psocake'
                self.parent.psocakeDir = self.parent.rootDir + '/scratch/' + self.username + '/psocake'
            else:
                self.parent.rootDir = self.parent.args.outDir
                self.parent.elogDir = self.parent.rootDir + '/psocake'
                self.parent.psocakeDir = self.parent.rootDir + '/' + self.username + '/psocake'
            self.parent.psocakeRunDir = self.parent.psocakeDir + '/r' + str(self.parent.runNumber).zfill(4)

            if self.parent.args.v >= 1: print "psocakeDir: ", self.parent.psocakeDir

            # Update peak finder outdir and run number
            self.parent.pk.p3.param(self.parent.pk.hitParam_grp, self.parent.pk.hitParam_outDir_str).setValue(self.parent.psocakeDir)
            self.parent.pk.p3.param(self.parent.pk.hitParam_grp, self.parent.pk.hitParam_runs_str).setValue(self.parent.runNumber)
            # Update powder outdir and run number
            self.parent.mk.p6.param(self.parent.mk.powder_grp, self.parent.mk.powder_outDir_str).setValue(self.parent.psocakeDir)
            self.parent.mk.p6.param(self.parent.mk.powder_grp, self.parent.mk.powder_runs_str).setValue(self.parent.runNumber)
            # Update hit finding outdir, run number
            self.parent.hf.p8.param(self.parent.hf.spiParam_grp, self.parent.hf.spiParam_outDir_str).setValue(self.parent.psocakeDir)
            self.parent.hf.p8.param(self.parent.hf.spiParam_grp, self.parent.hf.spiParam_runs_str).setValue(self.parent.runNumber)
            # Update indexing outdir, run number
            self.parent.index.p9.param(self.parent.index.launch_grp, self.parent.index.outDir_str).setValue(self.parent.psocakeDir)
            self.parent.index.p9.param(self.parent.index.launch_grp, self.parent.index.runs_str).setValue(self.parent.runNumber)
            # Update quantifier filename
            fname = self.parent.psocakeRunDir + '/' + self.parent.experimentName + '_' + str(self.parent.runNumber).zfill(4) + '.cxi'
            if self.parent.args.mode == 'sfx':
                dsetname = '/entry_1/result_1/nPeaksAll'
            elif self.parent.args.mode == 'spi':
                dsetname = '/entry_1/result_1/nHitsAll'
            else:
                dsetname = '/entry_1/result_1/'
            self.parent.small.pSmall.param(self.parent.small.quantifier_grp, self.parent.small.quantifier_filename_str).setValue(fname)
            self.parent.small.pSmall.param(self.parent.small.quantifier_grp,  self.parent.small.quantifier_dataset_str).setValue(dsetname)
            self.setupPsocake()
    
            # Update hidden CrystFEL files
            self.updateHiddenCrystfelFiles('lcls')
    
            if self.parent.args.localCalib:
                if self.parent.args.v >= 1: print "Using local calib directory"
                psana.setOption('psana.calib-dir', './calib')
    
            try:
                self.ds = psana.DataSource('exp=' + str(self.parent.experimentName) + ':run=' + str(
                    self.parent.runNumber) + ':idx')
            except:
                print "############# No such datasource exists ###############"
            self.run = self.ds.runs().next()
            self.times = self.run.times()
            self.eventTotal = len(self.times)
            self.parent.stack.spinBox.setMaximum(self.eventTotal - self.parent.stack.stackSize)
            self.p.param(self.exp_grp, self.exp_evt_str).setLimits((0, self.eventTotal - 1))
            self.p.param(self.exp_grp, self.exp_evt_str, self.exp_numEvents_str).setValue(self.eventTotal)
            self.env = self.ds.env()
    
            if self.parent.detInfoList is None:
                self.parent.evt = self.run.event(self.times[-1])
                myAreaDetectors = []
                self.parent.detnames = psana.DetNames()
                for k in self.parent.detnames:
                    try:
                        if Detector.PyDetector.dettype(str(k[0]), self.env) == Detector.AreaDetector.AreaDetector:
                            myAreaDetectors.append(k)
                    except ValueError:
                        continue
                self.parent.detInfoList = list(set(myAreaDetectors))
                print "#######################################"
                print "# Available area detectors: "
                for k in self.parent.detInfoList:
                    print "#", k
                print "#######################################"
    
            # Launch e-log crawler
            if self.logger and self.crawlerRunning == False:
                if self.parent.args.v >= 1: print "Launching crawler"
                self.launchCrawler()
                self.crawlerRunning = True
    
        if self.hasExpRunDetInfo():
            self.parent.det = psana.Detector(str(self.parent.detInfo), self.env)
            self.parent.det.do_reshape_2d_to_3d(flag=True)
            self.parent.detAlias = self.getDetectorAlias(str(self.parent.detInfo))
            self.parent.epics = self.ds.env().epicsStore()
            self.setClen()

            # detector distance
            self.updateDetectorDistance('lcls')
            # pixel size
            self.updatePixelSize('lcls')
            # photon energy
            self.updatePhotonEnergy('lcls')

            # Some detectors do not read out at 120 Hz. So need to loop over events to guarantee a valid detector image.
            if self.parent.evt is None:
                self.parent.evt = self.run.event(self.times[0])
            self.detGuaranteed = self.parent.det.calib(self.parent.evt)
            if self.detGuaranteed is None:  # image isn't present for this event
                print "No image in this event. Searching for an event..."
                for i in np.arange(len(self.times)):
                    evt = self.run.event(self.times[i])
                    self.detGuaranteed = self.parent.det.calib(evt)
                    if self.detGuaranteed is not None:
                        print "Found an event with image: ", i
                        break

            # Setup pixel indices
            if self.detGuaranteed is not None:
                self.parent.pixelInd = np.reshape(np.arange(self.detGuaranteed.size) + 1, self.detGuaranteed.shape)
                self.parent.pixelIndAssem = self.parent.img.getAssembledImage('lcls', self.parent.pixelInd)
                self.parent.pixelIndAssem -= 1  # First pixel is 0
                # Get detector shape
                self.detGuaranteedData = self.parent.det.image(self.parent.evt, self.detGuaranteed)

            # Write a temporary geom file
            self.parent.geom.deployCrystfelGeometry('lcls')
            self.parent.geom.writeCrystfelGeom('lcls')

            self.parent.img.setupRadialBackground()
            self.parent.img.updatePolarizationFactor()

        if self.parent.args.v >= 1: print "Done setupExperiment"