Ejemplo n.º 1
0
 def register_cfg_file(self, path):
     """
     Registers a psana configuration file at `path`.
     """
     if not os.path.exists(path):
         raise IOError('Could not find configuration file: %s' % path)
     psana.setConfigFile(path)
     self._cfg_file = path
     return
Ejemplo n.º 2
0
def data_source(run, expt='cxic0415', times=None):
    """
    Replacement data source (event generator)
    """

    cfg_path = '/reg/neh/home2/tjlane/analysis/ssc-com/ssc/psana.cfg' # hardwired for now :(
    psana.setConfigFile(cfg_path)

    ds = psana.DataSource('exp=CXI/%s:run=%d:idx' % (expt, run))
    
    for r in ds.runs():
        if times == None:
            times = r.times()
        for t in times:
            yield Event(r.event(t))
Ejemplo n.º 3
0
    def run(self):
        """ Process all images assigned to this thread """
        params, options = self.parser.parse_args(show_diff_phil=True)

        if params.input.experiment is None or \
           params.input.run_num is None or \
           params.input.address is None:
            raise Usage(self.usage)

        if params.format.file_format == "cbf":
            if params.format.cbf.detz_offset is None:
                raise Usage(self.usage)
        elif params.format.file_format == "pickle":
            if params.format.pickle.cfg is None:
                raise Usage(self.usage)
        else:
            raise Usage(self.usage)

        if not os.path.exists(params.output.output_dir):
            raise Sorry("Output path not found:" + params.output.output_dir)

        # Save the paramters
        self.params = params
        self.options = options

        from mpi4py import MPI
        comm = MPI.COMM_WORLD
        rank = comm.Get_rank(
        )  # each process in MPI has a unique id, 0-indexed
        size = comm.Get_size()  # size: number of processes running in this job

        # set up psana
        if params.format.file_format == "pickle":
            psana.setConfigFile(params.format.pickle.cfg)

        dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment,
                                              params.input.run_num)
        ds = psana.DataSource(dataset_name)

        if params.format.file_format == "cbf":
            src = psana.Source('DetInfo(%s)' % params.input.address)
            psana_det = psana.Detector(params.input.address, ds.env())

        # set this to sys.maxint to analyze all events
        if params.dispatch.max_events is None:
            max_events = sys.maxint
        else:
            max_events = params.dispatch.max_events

        for run in ds.runs():
            if params.format.file_format == "cbf":
                # load a header only cspad cbf from the slac metrology
                base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(
                    run, params.input.address)
                if base_dxtbx is None:
                    raise Sorry("Couldn't load calibration file for run %d" %
                                run.run())

            # list of all events
            times = run.times()
            nevents = min(len(times), max_events)
            # chop the list into pieces, depending on rank.  This assigns each process
            # events such that the get every Nth event where N is the number of processes
            mytimes = [
                times[i] for i in xrange(nevents) if (i + rank) % size == 0
            ]

            for i in xrange(len(mytimes)):
                evt = run.event(mytimes[i])
                id = evt.get(psana.EventId)
                print "Event #", i, " has id:", id

                timestamp = cspad_tbx.evt_timestamp(
                    cspad_tbx.evt_time(evt))  # human readable format
                if timestamp is None:
                    print "No timestamp, skipping shot"
                    continue
                t = timestamp
                s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[
                    17:19] + t[20:23]
                print "Processing shot", s

                if params.format.file_format == "pickle":
                    if evt.get("skip_event"):
                        print "Skipping event", id
                        continue
                    # the data needs to have already been processed and put into the event by psana
                    data = evt.get(params.format.pickle.out_key)
                    if data is None:
                        print "No data"
                        continue

                    # set output paths according to the templates
                    path = os.path.join(params.output.output_dir,
                                        "shot-" + s + ".pickle")

                    print "Saving", path
                    easy_pickle.dump(path, data)

                elif params.format.file_format == "cbf":
                    # get numpy array, 32x185x388
                    data = cspad_cbf_tbx.get_psana_corrected_data(
                        psana_det,
                        evt,
                        use_default=False,
                        dark=True,
                        common_mode=None,
                        apply_gain_mask=params.format.cbf.gain_mask_value
                        is not None,
                        gain_mask_value=params.format.cbf.gain_mask_value,
                        per_pixel_gain=False)

                    distance = cspad_tbx.env_distance(
                        params.input.address, run.env(),
                        params.format.cbf.detz_offset)
                    if distance is None:
                        print "No distance, skipping shot"
                        continue

                    if self.params.format.cbf.override_energy is None:
                        wavelength = cspad_tbx.evt_wavelength(evt)
                        if wavelength is None:
                            print "No wavelength, skipping shot"
                            continue
                    else:
                        wavelength = 12398.4187 / self.params.format.cbf.override_energy

                    # stitch together the header, data and metadata into the final dxtbx format object
                    cspad_img = cspad_cbf_tbx.format_object_from_data(
                        base_dxtbx, data, distance, wavelength, timestamp,
                        params.input.address)
                    path = os.path.join(params.output.output_dir,
                                        "shot-" + s + ".cbf")
                    print "Saving", path

                    # write the file
                    import pycbf
                    cspad_img._cbf_handle.write_widefile(path, pycbf.CBF,\
                      pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)

            run.end()
        ds.end()
Ejemplo n.º 4
0
def average(argv=None):
    if argv == None:
        argv = sys.argv[1:]

    try:
        from mpi4py import MPI
    except ImportError:
        raise Sorry("MPI not found")

    command_line = (libtbx.option_parser.option_parser(usage="""
%s [-p] -c config -x experiment -a address -r run -d detz_offset [-o outputdir] [-A averagepath] [-S stddevpath] [-M maxpath] [-n numevents] [-s skipnevents] [-v] [-m] [-b bin_size] [-X override_beam_x] [-Y override_beam_y] [-D xtc_dir] [-f] [-g gain_mask_value] [--min] [--minpath minpath]

To write image pickles use -p, otherwise the program writes CSPAD CBFs.
Writing CBFs requires the geometry to be already deployed.

Examples:
cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571

Use one process on the current node to process all the events from run 25 of
experiment cxi49812, using a detz_offset of 571.

mpirun -n 16 cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571

As above, using 16 cores on the current node.

bsub -a mympi -n 100 -o average.out -q psanaq cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 -o cxi49812

As above, using the psanaq and 100 cores, putting the log in average.out and
the output images in the folder cxi49812.
""" % libtbx.env.dispatcher_name).option(
        None,
        "--as_pickle",
        "-p",
        action="store_true",
        default=False,
        dest="as_pickle",
        help="Write results as image pickle files instead of cbf files"
    ).option(
        None,
        "--raw_data",
        "-R",
        action="store_true",
        default=False,
        dest="raw_data",
        help=
        "Disable psana corrections such as dark pedestal subtraction or common mode (cbf only)"
    ).option(
        None,
        "--background_pickle",
        "-B",
        default=None,
        dest="background_pickle",
        help=""
    ).option(
        None,
        "--config",
        "-c",
        type="string",
        default=None,
        dest="config",
        metavar="PATH",
        help="psana config file"
    ).option(
        None,
        "--experiment",
        "-x",
        type="string",
        default=None,
        dest="experiment",
        help="experiment name (eg cxi84914)"
    ).option(
        None,
        "--run",
        "-r",
        type="int",
        default=None,
        dest="run",
        help="run number"
    ).option(
        None,
        "--address",
        "-a",
        type="string",
        default="CxiDs2.0:Cspad.0",
        dest="address",
        help="detector address name (eg CxiDs2.0:Cspad.0)"
    ).option(
        None,
        "--detz_offset",
        "-d",
        type="float",
        default=None,
        dest="detz_offset",
        help=
        "offset (in mm) from sample interaction region to back of CSPAD detector rail (CXI), or detector distance (XPP)"
    ).option(
        None,
        "--outputdir",
        "-o",
        type="string",
        default=".",
        dest="outputdir",
        metavar="PATH",
        help="Optional path to output directory for output files"
    ).option(
        None,
        "--averagebase",
        "-A",
        type="string",
        default="{experiment!l}_avg-r{run:04d}",
        dest="averagepath",
        metavar="PATH",
        help=
        "Path to output average image without extension. String substitution allowed"
    ).option(
        None,
        "--stddevbase",
        "-S",
        type="string",
        default="{experiment!l}_stddev-r{run:04d}",
        dest="stddevpath",
        metavar="PATH",
        help=
        "Path to output standard deviation image without extension. String substitution allowed"
    ).option(
        None,
        "--maxbase",
        "-M",
        type="string",
        default="{experiment!l}_max-r{run:04d}",
        dest="maxpath",
        metavar="PATH",
        help=
        "Path to output maximum projection image without extension. String substitution allowed"
    ).option(
        None,
        "--numevents",
        "-n",
        type="int",
        default=None,
        dest="numevents",
        help="Maximum number of events to process. Default: all"
    ).option(
        None,
        "--skipevents",
        "-s",
        type="int",
        default=0,
        dest="skipevents",
        help="Number of events in the beginning of the run to skip. Default: 0"
    ).option(
        None,
        "--verbose",
        "-v",
        action="store_true",
        default=False,
        dest="verbose",
        help="Print more information about progress"
    ).option(
        None,
        "--pickle-optical-metrology",
        "-m",
        action="store_true",
        default=False,
        dest="pickle_optical_metrology",
        help=
        "If writing pickle files, use the optical metrology in the experiment's calib directory"
    ).option(
        None,
        "--bin_size",
        "-b",
        type="int",
        default=None,
        dest="bin_size",
        help="Rayonix detector bin size"
    ).option(
        None,
        "--override_beam_x",
        "-X",
        type="float",
        default=None,
        dest="override_beam_x",
        help="Rayonix detector beam center x coordinate"
    ).option(
        None,
        "--override_beam_y",
        "-Y",
        type="float",
        default=None,
        dest="override_beam_y",
        help="Rayonix detector beam center y coordinate"
    ).option(
        None,
        "--calib_dir",
        "-C",
        type="string",
        default=None,
        dest="calib_dir",
        metavar="PATH",
        help="calibration directory"
    ).option(
        None,
        "--pickle_calib_dir",
        "-P",
        type="string",
        default=None,
        dest="pickle_calib_dir",
        metavar="PATH",
        help=
        "pickle calibration directory specification. Replaces --calib_dir functionality."
    ).option(
        None,
        "--xtc_dir",
        "-D",
        type="string",
        default=None,
        dest="xtc_dir",
        metavar="PATH",
        help="xtc stream directory"
    ).option(
        None,
        "--use_ffb",
        "-f",
        action="store_true",
        default=False,
        dest="use_ffb",
        help=
        "Use the fast feedback filesystem at LCLS. Only for the active experiment!"
    ).option(
        None,
        "--gain_mask_value",
        "-g",
        type="float",
        default=None,
        dest="gain_mask_value",
        help=
        "Ratio between low and high gain pixels, if CSPAD in mixed-gain mode. Only used in CBF averaging mode."
    ).option(
        None,
        "--min",
        None,
        action="store_true",
        default=False,
        dest="do_minimum_projection",
        help="Output a minimum projection"
    ).option(
        None,
        "--minpath",
        None,
        type="string",
        default="{experiment!l}_min-r{run:04d}",
        dest="minpath",
        metavar="PATH",
        help=
        "Path to output minimum image without extension. String substitution allowed"
    )).process(args=argv)


    if len(command_line.args) > 0 or \
        command_line.options.as_pickle is None or \
        command_line.options.experiment is None or \
        command_line.options.run is None or \
        command_line.options.address is None or \
        command_line.options.detz_offset is None or \
        command_line.options.averagepath is None or \
        command_line.options.stddevpath is None or \
        command_line.options.maxpath is None or \
        command_line.options.pickle_optical_metrology is None:
        command_line.parser.show_help()
        return

    # set this to sys.maxint to analyze all events
    if command_line.options.numevents is None:
        maxevents = sys.maxsize
    else:
        maxevents = command_line.options.numevents

    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()
    size = comm.Get_size()

    if command_line.options.config is not None:
        psana.setConfigFile(command_line.options.config)
    dataset_name = "exp=%s:run=%d:smd" % (command_line.options.experiment,
                                          command_line.options.run)
    if command_line.options.xtc_dir is not None:
        if command_line.options.use_ffb:
            raise Sorry("Cannot specify the xtc_dir and use SLAC's ffb system")
        dataset_name += ":dir=%s" % command_line.options.xtc_dir
    elif command_line.options.use_ffb:
        # as ffb is only at SLAC, ok to hardcode /reg/d here
        dataset_name += ":dir=/reg/d/ffb/%s/%s/xtc" % (
            command_line.options.experiment[0:3],
            command_line.options.experiment)
    if command_line.options.calib_dir is not None:
        psana.setOption('psana.calib-dir', command_line.options.calib_dir)
    ds = psana.DataSource(dataset_name)
    address = command_line.options.address
    src = psana.Source('DetInfo(%s)' % address)
    nevent = np.array([0.])

    if command_line.options.background_pickle is not None:
        background = easy_pickle.load(
            command_line.options.background_pickle)['DATA'].as_numpy_array()

    for run in ds.runs():
        runnumber = run.run()

        if not command_line.options.as_pickle:
            psana_det = psana.Detector(address, ds.env())

        # list of all events
        if command_line.options.skipevents > 0:
            print("Skipping first %d events" % command_line.options.skipevents)
        elif "Rayonix" in command_line.options.address:
            print("Skipping first image in the Rayonix detector"
                  )  # Shuttering issue
            command_line.options.skipevents = 1

        for i, evt in enumerate(run.events()):
            if i % size != rank: continue
            if i < command_line.options.skipevents: continue
            if i >= maxevents: break
            if i % 10 == 0: print('Rank', rank, 'processing event', i)
            #print "Event #",rank*mylength+i," has id:",evt.get(EventId)
            if 'Rayonix' in command_line.options.address or 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address:
                data = evt.get(psana.Camera.FrameV1, src)
                if data is None:
                    print("No data")
                    continue
                data = data.data16().astype(np.float64)
            elif command_line.options.as_pickle:
                data = evt.get(psana.ndarray_float64_3, src, 'image0')
            else:
                # get numpy array, 32x185x388
                from xfel.cftbx.detector.cspad_cbf_tbx import get_psana_corrected_data
                if command_line.options.raw_data:
                    data = get_psana_corrected_data(psana_det,
                                                    evt,
                                                    use_default=False,
                                                    dark=False,
                                                    common_mode=None,
                                                    apply_gain_mask=False,
                                                    per_pixel_gain=False)
                else:
                    if command_line.options.gain_mask_value is None:
                        data = get_psana_corrected_data(psana_det,
                                                        evt,
                                                        use_default=True)
                    else:
                        data = get_psana_corrected_data(
                            psana_det,
                            evt,
                            use_default=False,
                            dark=True,
                            common_mode=None,
                            apply_gain_mask=True,
                            gain_mask_value=command_line.options.
                            gain_mask_value,
                            per_pixel_gain=False)

            if data is None:
                print("No data")
                continue

            if command_line.options.background_pickle is not None:
                data -= background

            if 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address:
                distance = np.array([0.0])
                wavelength = np.array([1.0])
            else:
                d = cspad_tbx.env_distance(address, run.env(),
                                           command_line.options.detz_offset)
                if d is None:
                    print("No distance, using distance",
                          command_line.options.detz_offset)
                    assert command_line.options.detz_offset is not None
                    if 'distance' not in locals():
                        distance = np.array([command_line.options.detz_offset])
                    else:
                        distance += command_line.options.detz_offset
                else:
                    if 'distance' in locals():
                        distance += d
                    else:
                        distance = np.array([float(d)])

                w = cspad_tbx.evt_wavelength(evt)
                if w is None:
                    print("No wavelength")
                    if 'wavelength' not in locals():
                        wavelength = np.array([1.0])
                else:
                    if 'wavelength' in locals():
                        wavelength += w
                    else:
                        wavelength = np.array([w])

            t = cspad_tbx.evt_time(evt)
            if t is None:
                print("No timestamp, skipping shot")
                continue
            if 'timestamp' in locals():
                timestamp += t[0] + (t[1] / 1000)
            else:
                timestamp = np.array([t[0] + (t[1] / 1000)])

            if 'sum' in locals():
                sum += data
            else:
                sum = np.array(data, copy=True)
            if 'sumsq' in locals():
                sumsq += data * data
            else:
                sumsq = data * data
            if 'maximum' in locals():
                maximum = np.maximum(maximum, data)
            else:
                maximum = np.array(data, copy=True)

            if command_line.options.do_minimum_projection:
                if 'minimum' in locals():
                    minimum = np.minimum(minimum, data)
                else:
                    minimum = np.array(data, copy=True)

            nevent += 1

    #sum the images across mpi cores
    if size > 1:
        print("Synchronizing rank", rank)
    totevent = np.zeros(nevent.shape)
    comm.Reduce(nevent, totevent)

    if rank == 0 and totevent[0] == 0:
        raise Sorry("No events found in the run")

    sumall = np.zeros(sum.shape).astype(sum.dtype)
    comm.Reduce(sum, sumall)

    sumsqall = np.zeros(sumsq.shape).astype(sumsq.dtype)
    comm.Reduce(sumsq, sumsqall)

    maxall = np.zeros(maximum.shape).astype(maximum.dtype)
    comm.Reduce(maximum, maxall, op=MPI.MAX)

    if command_line.options.do_minimum_projection:
        minall = np.zeros(maximum.shape).astype(minimum.dtype)
        comm.Reduce(minimum, minall, op=MPI.MIN)

    waveall = np.zeros(wavelength.shape).astype(wavelength.dtype)
    comm.Reduce(wavelength, waveall)

    distall = np.zeros(distance.shape).astype(distance.dtype)
    comm.Reduce(distance, distall)

    timeall = np.zeros(timestamp.shape).astype(timestamp.dtype)
    comm.Reduce(timestamp, timeall)

    if rank == 0:
        if size > 1:
            print("Synchronized")

        # Accumulating floating-point numbers introduces errors,
        # which may cause negative variances.  Since a two-pass
        # approach is unacceptable, the standard deviation is
        # clamped at zero.
        mean = sumall / float(totevent[0])
        variance = (sumsqall / float(totevent[0])) - (mean**2)
        variance[variance < 0] = 0
        stddev = np.sqrt(variance)

        wavelength = waveall[0] / totevent[0]
        distance = distall[0] / totevent[0]
        pixel_size = cspad_tbx.pixel_size
        saturated_value = cspad_tbx.cspad_saturated_value
        timestamp = timeall[0] / totevent[0]
        timestamp = (int(timestamp), timestamp % int(timestamp) * 1000)
        timestamp = cspad_tbx.evt_timestamp(timestamp)

        if command_line.options.as_pickle:
            extension = ".pickle"
        else:
            extension = ".cbf"

        dest_paths = [
            cspad_tbx.pathsubst(command_line.options.averagepath + extension,
                                evt, ds.env()),
            cspad_tbx.pathsubst(command_line.options.stddevpath + extension,
                                evt, ds.env()),
            cspad_tbx.pathsubst(command_line.options.maxpath + extension, evt,
                                ds.env())
        ]
        if command_line.options.do_minimum_projection:
            dest_paths.append(
                cspad_tbx.pathsubst(command_line.options.minpath + extension,
                                    evt, ds.env()))

        dest_paths = [
            os.path.join(command_line.options.outputdir, path)
            for path in dest_paths
        ]
        if 'Rayonix' in command_line.options.address:
            all_data = [mean, stddev, maxall]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)
            from xfel.cxi.cspad_ana import rayonix_tbx
            pixel_size = rayonix_tbx.get_rayonix_pixel_size(
                command_line.options.bin_size)
            beam_center = [
                command_line.options.override_beam_x,
                command_line.options.override_beam_y
            ]
            active_areas = flex.int([0, 0, mean.shape[1], mean.shape[0]])
            split_address = cspad_tbx.address_split(address)
            old_style_address = split_address[0] + "-" + split_address[
                1] + "|" + split_address[2] + "-" + split_address[3]
            for data, path in zip(all_data, dest_paths):
                print("Saving", path)
                d = cspad_tbx.dpack(
                    active_areas=active_areas,
                    address=old_style_address,
                    beam_center_x=pixel_size * beam_center[0],
                    beam_center_y=pixel_size * beam_center[1],
                    data=flex.double(data),
                    distance=distance,
                    pixel_size=pixel_size,
                    saturated_value=rayonix_tbx.rayonix_saturated_value,
                    timestamp=timestamp,
                    wavelength=wavelength)
                easy_pickle.dump(path, d)
        elif 'FeeHxSpectrometer' in command_line.options.address or 'XrayTransportDiagnostic' in command_line.options.address:
            all_data = [mean, stddev, maxall]
            split_address = cspad_tbx.address_split(address)
            old_style_address = split_address[0] + "-" + split_address[
                1] + "|" + split_address[2] + "-" + split_address[3]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)
            for data, path in zip(all_data, dest_paths):
                d = cspad_tbx.dpack(address=old_style_address,
                                    data=flex.double(data),
                                    distance=distance,
                                    pixel_size=0.1,
                                    timestamp=timestamp,
                                    wavelength=wavelength)
                print("Saving", path)
                easy_pickle.dump(path, d)
        elif command_line.options.as_pickle:
            split_address = cspad_tbx.address_split(address)
            old_style_address = split_address[0] + "-" + split_address[
                1] + "|" + split_address[2] + "-" + split_address[3]

            xpp = 'xpp' in address.lower()
            if xpp:
                evt_time = cspad_tbx.evt_time(
                    evt)  # tuple of seconds, milliseconds
                timestamp = cspad_tbx.evt_timestamp(
                    evt_time)  # human readable format
                from iotbx.detectors.cspad_detector_formats import detector_format_version, reverse_timestamp
                from xfel.cxi.cspad_ana.cspad_tbx import xpp_active_areas
                version_lookup = detector_format_version(
                    old_style_address,
                    reverse_timestamp(timestamp)[0])
                assert version_lookup is not None
                active_areas = xpp_active_areas[version_lookup]['active_areas']
                beam_center = [1765 // 2, 1765 // 2]
            else:
                if command_line.options.pickle_calib_dir is not None:
                    metro_path = command_line.options.pickle_calib_dir
                elif command_line.options.pickle_optical_metrology:
                    from xfel.cftbx.detector.cspad_cbf_tbx import get_calib_file_path
                    metro_path = get_calib_file_path(run.env(), address, run)
                else:
                    metro_path = libtbx.env.find_in_repositories(
                        "xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0")
                sections = parse_calib.calib2sections(metro_path)
                beam_center, active_areas = cspad_tbx.cbcaa(
                    cspad_tbx.getConfig(address, ds.env()), sections)

            class fake_quad(object):
                def __init__(self, q, d):
                    self.q = q
                    self.d = d

                def quad(self):
                    return self.q

                def data(self):
                    return self.d

            if xpp:
                quads = [
                    fake_quad(i, mean[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                mean = cspad_tbx.image_xpp(old_style_address,
                                           None,
                                           ds.env(),
                                           active_areas,
                                           quads=quads)
                mean = flex.double(mean.astype(np.float64))

                quads = [
                    fake_quad(i, stddev[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                stddev = cspad_tbx.image_xpp(old_style_address,
                                             None,
                                             ds.env(),
                                             active_areas,
                                             quads=quads)
                stddev = flex.double(stddev.astype(np.float64))

                quads = [
                    fake_quad(i, maxall[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                maxall = cspad_tbx.image_xpp(old_style_address,
                                             None,
                                             ds.env(),
                                             active_areas,
                                             quads=quads)
                maxall = flex.double(maxall.astype(np.float64))

                if command_line.options.do_minimum_projection:
                    quads = [
                        fake_quad(i, minall[i * 8:(i + 1) * 8, :, :])
                        for i in range(4)
                    ]
                    minall = cspad_tbx.image_xpp(old_style_address,
                                                 None,
                                                 ds.env(),
                                                 active_areas,
                                                 quads=quads)
                    minall = flex.double(minall.astype(np.float64))
            else:
                quads = [
                    fake_quad(i, mean[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                mean = cspad_tbx.CsPadDetector(address,
                                               evt,
                                               ds.env(),
                                               sections,
                                               quads=quads)
                mean = flex.double(mean.astype(np.float64))

                quads = [
                    fake_quad(i, stddev[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                stddev = cspad_tbx.CsPadDetector(address,
                                                 evt,
                                                 ds.env(),
                                                 sections,
                                                 quads=quads)
                stddev = flex.double(stddev.astype(np.float64))

                quads = [
                    fake_quad(i, maxall[i * 8:(i + 1) * 8, :, :])
                    for i in range(4)
                ]
                maxall = cspad_tbx.CsPadDetector(address,
                                                 evt,
                                                 ds.env(),
                                                 sections,
                                                 quads=quads)
                maxall = flex.double(maxall.astype(np.float64))

                if command_line.options.do_minimum_projection:
                    quads = [
                        fake_quad(i, minall[i * 8:(i + 1) * 8, :, :])
                        for i in range(4)
                    ]
                    minall = cspad_tbx.CsPadDetector(address,
                                                     evt,
                                                     ds.env(),
                                                     sections,
                                                     quads=quads)
                    minall = flex.double(minall.astype(np.float64))

            all_data = [mean, stddev, maxall]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)

            for data, path in zip(all_data, dest_paths):
                print("Saving", path)

                d = cspad_tbx.dpack(active_areas=active_areas,
                                    address=old_style_address,
                                    beam_center_x=pixel_size * beam_center[0],
                                    beam_center_y=pixel_size * beam_center[1],
                                    data=data,
                                    distance=distance,
                                    pixel_size=pixel_size,
                                    saturated_value=saturated_value,
                                    timestamp=timestamp,
                                    wavelength=wavelength)

                easy_pickle.dump(path, d)
        else:
            # load a header only cspad cbf from the slac metrology
            from xfel.cftbx.detector import cspad_cbf_tbx
            import pycbf
            base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(
                run, address)
            if base_dxtbx is None:
                raise Sorry("Couldn't load calibration file for run %d" %
                            run.run())

            all_data = [mean, stddev, maxall]
            if command_line.options.do_minimum_projection:
                all_data.append(minall)

            for data, path in zip(all_data, dest_paths):
                print("Saving", path)
                cspad_img = cspad_cbf_tbx.format_object_from_data(
                    base_dxtbx,
                    data,
                    distance,
                    wavelength,
                    timestamp,
                    address,
                    round_to_int=False)
                cspad_img._cbf_handle.write_widefile(path, pycbf.CBF,\
                  pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)
Ejemplo n.º 5
0
    def __init__(self, state):
        self.timestamps = None
        self.library = 'psana'
        config_file = None
        if('LCLS/PsanaConf' in state):
            config_file = os.path.abspath(state['LCLS/PsanaConf'])
        elif('LCLS' in state and 'PsanaConf' in state['LCLS']):
            config_file = os.path.abspath(state['LCLS']['PsanaConf'])
        if(config_file is not None):
            if(not os.path.isfile(config_file)):
                raise RuntimeError("Could not find [LCLS][PsanaConf]: %s" %
                                   (config_file))
            logging.info("Info: Found configuration file %s.", config_file)
            psana.setConfigFile(config_file)

        if 'LCLS/CalibDir' in state:
            calibdir = state['LCLS/CalibDir']
            logging.info("Setting calib-dir to %s" % calibdir)
            psana.setOption('psana.calib-dir', calibdir)
        elif('LCLS' in state and 'CalibDir' in state['LCLS']):
            calibdir = state['LCLS']['CalibDir']
            logging.info("Setting calib-dir to %s" % calibdir)
            psana.setOption('psana.calib-dir', calibdir)

        if('LCLS/DataSource' in state):
            dsrc = state['LCLS/DataSource']
        elif('LCLS' in state and 'DataSource' in state['LCLS']):
            dsrc = state['LCLS']['DataSource']
        else:
            raise ValueError("You need to set the '[LCLS][DataSource]'"
                             " in the configuration")
        
        cmdline_args = parse_cmdline_args()
        self.N = cmdline_args.lcls_number_of_frames          
        if cmdline_args.lcls_run_number is not None:
            dsrc += ":run=%i" % cmdline_args.lcls_run_number

        # Cache times of events that shall be extracted from XTC (does not work for stream)
        self.event_slice = slice(0,None,1)
        if 'times' in state or 'fiducials' in state:
            if not ('times' in state and 'fiducials' in state):
                raise ValueError("Times or fiducials missing in state."
                                 " Extraction of selected events expects both event identifiers")                
            if dsrc[:len('exp=')] != 'exp=':
                raise ValueError("Extraction of events with given times and fiducials"
                                 " only works when reading from XTC with index files")
            if dsrc[-len(':idx'):] != ':idx':
                dsrc += ':idx'
            self.times = state['times']
            self.fiducials = state['fiducials']
            self.i = 0
            self.data_source = psana.DataSource(dsrc)
            self.run = self.data_source.runs().next()                        
        elif 'indexing' in state:
            if dsrc[-len(':idx'):] != ':idx':
                dsrc += ':idx'
            if 'index_offset' in state:
                self.i = state['index_offset'] / ipc.mpi.nr_workers()
            else:
                self.i = 0
            self.data_source = psana.DataSource(dsrc)
            self.run = self.data_source.runs().next()
            self.timestamps = self.run.times()
            if self.N is not None:
                self.timestamps = self.timestamps[:self.N]
            self.timestamps = self.timestamps[ipc.mpi.slave_rank()::ipc.mpi.nr_workers()]
        else:
            self.times = None
            self.fiducials = None
            self.i = 0
            if not dsrc.startswith('shmem='):
                self.event_slice = slice(ipc.mpi.slave_rank(), None, ipc.mpi.nr_workers())
            self.data_source = psana.DataSource(dsrc)
            self.run = None
            
        # Define how to translate between LCLS types and Hummingbird ones
        self._n2c = {}
        self._n2c[psana.Bld.BldDataFEEGasDetEnergy] = 'pulseEnergies'
        self._n2c[psana.Bld.BldDataFEEGasDetEnergyV1] = 'pulseEnergies'
        self._n2c[psana.Lusi.IpmFexV1] = 'pulseEnergies'
        self._n2c[psana.Camera.FrameV1] = 'camera'
        # Guard against old(er) psana versions
        try:
            self._n2c[psana.Bld.BldDataEBeamV1] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV2] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV3] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV4] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV5] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV6] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV7] = 'photonEnergies'
        except AttributeError:
            pass
        # CXI (CsPad)
        self._n2c[psana.CsPad.DataV2] = 'photonPixelDetectors'
        self._n2c[psana.CsPad2x2.ElementV1] = 'photonPixelDetectors'
        # CXI (OffAxis Cam)
        #self._n2c[psana.Camera.FrameV1] = 'photonPixelDetectors'
        # AMO (pnCCD)
        self._n2c[psana.PNCCD.FullFrameV1] = 'photonPixelDetectors'
        self._n2c[psana.PNCCD.FramesV1] = 'photonPixelDetectors'
        # --
        self._n2c[psana.Acqiris.DataDescV1] = 'ionTOFs'
        self._n2c[psana.EventId] = 'eventID'
        # Guard against old(er) psana versions
        try:
            self._n2c[psana.EvrData.DataV3] = 'eventCodes'
            self._n2c[psana.EvrData.DataV4] = 'eventCodes'
        except AttributeError:
            pass

        # Calculate the inverse mapping
        self._c2n = {}
        for k, v in self._n2c.iteritems():
            self._c2n[v] = self._c2n.get(v, [])
            self._c2n[v].append(k)

        # Define how to translate between LCLS sources and Hummingbird ones
        self._s2c = {}
        # CXI (OnAxis Cam)
        self._s2c['DetInfo(CxiEndstation.0:Opal4000.1)'] = 'Sc2Questar'
        # CXI (OffAxis Cam)
        self._s2c['DetInfo(CxiEndstation.0.Opal11000.0)'] = 'Sc2Offaxis'
        # CXI (CsPad)
        self._s2c['DetInfo(CxiDs1.0:Cspad.0)'] = 'CsPad Ds1'
        self._s2c['DetInfo(CxiDsd.0:Cspad.0)'] = 'CsPad Dsd'
        self._s2c['DetInfo(CxiDs2.0:Cspad.0)'] = 'CsPad Ds2'
        self._s2c['DetInfo(CxiDg3.0:Cspad2x2.0)'] = 'CsPad Dg3'
        self._s2c['DetInfo(CxiDg2.0:Cspad2x2.0)'] = 'CsPad Dg2'
        # AMO (pnCCD)
        self._s2c['DetInfo(Camp.0:pnCCD.1)'] = 'pnccdBack'
        self._s2c['DetInfo(Camp.0:pnCCD.0)'] = 'pnccdFront'
        # ToF detector
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.1)'] = 'Acqiris 1'
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.2)'] = 'Acqiris 2'
        # AMO (Acqiris)
        self._s2c['DetInfo(AmoETOF.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(AmoETOF.0:Acqiris.1)'] = 'Acqiris 1'
        self._s2c['DetInfo(AmoITOF.0:Acqiris.0)'] = 'Acqiris 2'
        self._s2c['DetInfo(AmoITOF.0:Acqiris.1)'] = 'Acqiris 3'

        # MCP Camera
        self._s2c['DetInfo(AmoEndstation.0:Opal1000.1)'] = 'OPAL1'
        # CXI (Acqiris)
        self._s2c['DetInfo(CxiEndstation.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(CxiEndstation.0:Acqiris.1)'] = 'Acqiris 1'
Ejemplo n.º 6
0
print '\t\t MINITTI BEAMTIME SERVER -- PEW PEW PEW'
print ''
print '*' * 80
print ''

# ZMQ SETUP 
context = zmq.Context()
socket = context.socket(zmq.PUB)
socket.setsockopt(zmq.SNDHWM, 10)
socket.bind("tcp://*:%d" % args.port)
print "Broadcasting via ZMQ on port: %d" % args.port

# PSANA CONFIG FILE - this must be set before a datasource is created
basedir = os.path.split(os.path.abspath( __file__ ))[0]
config_fn = os.path.join(basedir, "minitti.cfg")
psana.setConfigFile(config_fn)
psana.setOption('psana.l3t-accept-only',0)
print "Loading psana config file:    %s" % config_fn

# Aquire the geometry and mask
geometry_filename = '/reg/neh/home2/tjlane/analysis/xppb0114/geometries/v2/q_geom.npy'
print "Loading geometry from:        %s" % geometry_filename
geometry = np.load(geometry_filename).reshape(32,185,388)

mask_filename = '/reg/neh/home2/tjlane/analysis/xppb0114/geometries/v2/mask_v2.npy'
print "Loading pixel mask from:      %s" % mask_filename
mask = np.load(mask_filename).reshape(32,185,388)

# get a vacuum background
f = h5py.File('/reg/neh/home2/tjlane/analysis/xppb0114/averages/r262_laser_avg.h5')
vacuum = (np.array(f['/laser_on']) + np.array(f['/laser_off'])) / 2.0
Ejemplo n.º 7
0
    def __init__(self, state):
        self.timestamps = None
        self.library = 'psana'
        config_file = None
        if ('LCLS/PsanaConf' in state):
            config_file = os.path.abspath(state['LCLS/PsanaConf'])
        elif ('LCLS' in state and 'PsanaConf' in state['LCLS']):
            config_file = os.path.abspath(state['LCLS']['PsanaConf'])
        if (config_file is not None):
            if (not os.path.isfile(config_file)):
                raise RuntimeError("Could not find [LCLS][PsanaConf]: %s" %
                                   (config_file))
            logging.info("Info: Found configuration file %s.", config_file)
            psana.setConfigFile(config_file)

        if 'LCLS/CalibDir' in state:
            calibdir = state['LCLS/CalibDir']
            logging.info("Setting calib-dir to %s" % calibdir)
            psana.setOption('psana.calib-dir', calibdir)
        elif ('LCLS' in state and 'CalibDir' in state['LCLS']):
            calibdir = state['LCLS']['CalibDir']
            logging.info("Setting calib-dir to %s" % calibdir)
            psana.setOption('psana.calib-dir', calibdir)

        if ('LCLS/DataSource' in state):
            dsrc = state['LCLS/DataSource']
        elif ('LCLS' in state and 'DataSource' in state['LCLS']):
            dsrc = state['LCLS']['DataSource']
        else:
            raise ValueError("You need to set the '[LCLS][DataSource]'"
                             " in the configuration")

        cmdline_args = parse_cmdline_args()
        self.N = cmdline_args.lcls_number_of_frames
        if cmdline_args.lcls_run_number is not None:
            dsrc += ":run=%i" % cmdline_args.lcls_run_number

        # Cache times of events that shall be extracted from XTC (does not work for stream)
        self.event_slice = slice(0, None, 1)
        if 'times' in state or 'fiducials' in state:
            if not ('times' in state and 'fiducials' in state):
                raise ValueError(
                    "Times or fiducials missing in state."
                    " Extraction of selected events expects both event identifiers"
                )
            if dsrc[:len('exp=')] != 'exp=':
                raise ValueError(
                    "Extraction of events with given times and fiducials"
                    " only works when reading from XTC with index files")
            if dsrc[-len(':idx'):] != ':idx':
                dsrc += ':idx'
            self.times = state['times']
            self.fiducials = state['fiducials']
            self.i = 0
            self.data_source = psana.DataSource(dsrc)
            self.run = self.data_source.runs().next()
        elif 'indexing' in state:
            if dsrc[-len(':idx'):] != ':idx':
                dsrc += ':idx'
            if 'index_offset' in state:
                self.i = state['index_offset'] / ipc.mpi.nr_workers()
            else:
                self.i = 0
            self.data_source = psana.DataSource(dsrc)
            self.run = self.data_source.runs().next()
            self.timestamps = self.run.times()
            if self.N is not None:
                self.timestamps = self.timestamps[:self.N]
            self.timestamps = self.timestamps[ipc.mpi.slave_rank()::ipc.mpi.
                                              nr_workers()]
        else:
            self.times = None
            self.fiducials = None
            self.i = 0
            if not dsrc.startswith('shmem='):
                self.event_slice = slice(ipc.mpi.slave_rank(), None,
                                         ipc.mpi.nr_workers())
            self.data_source = psana.DataSource(dsrc)
            self.run = None

        # Define how to translate between LCLS types and Hummingbird ones
        self._n2c = {}
        self._n2c[psana.Bld.BldDataFEEGasDetEnergy] = 'pulseEnergies'
        self._n2c[psana.Bld.BldDataFEEGasDetEnergyV1] = 'pulseEnergies'
        self._n2c[psana.Lusi.IpmFexV1] = 'pulseEnergies'
        self._n2c[psana.Camera.FrameV1] = 'camera'
        # Guard against old(er) psana versions
        try:
            self._n2c[psana.Bld.BldDataEBeamV1] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV2] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV3] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV4] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV5] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV6] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV7] = 'photonEnergies'
        except AttributeError:
            pass
        # CXI (CsPad)
        self._n2c[psana.CsPad.DataV2] = 'photonPixelDetectors'
        self._n2c[psana.CsPad2x2.ElementV1] = 'photonPixelDetectors'
        # CXI (OffAxis Cam)
        #self._n2c[psana.Camera.FrameV1] = 'photonPixelDetectors'
        # AMO (pnCCD)
        self._n2c[psana.PNCCD.FullFrameV1] = 'photonPixelDetectors'
        self._n2c[psana.PNCCD.FramesV1] = 'photonPixelDetectors'
        # --
        self._n2c[psana.Acqiris.DataDescV1] = 'ionTOFs'
        self._n2c[psana.EventId] = 'eventID'
        # Guard against old(er) psana versions
        try:
            self._n2c[psana.EvrData.DataV3] = 'eventCodes'
            self._n2c[psana.EvrData.DataV4] = 'eventCodes'
        except AttributeError:
            pass

        # Calculate the inverse mapping
        self._c2n = {}
        for k, v in self._n2c.iteritems():
            self._c2n[v] = self._c2n.get(v, [])
            self._c2n[v].append(k)

        # Define how to translate between LCLS sources and Hummingbird ones
        self._s2c = {}
        # CXI (OnAxis Cam)
        self._s2c['DetInfo(CxiEndstation.0:Opal4000.1)'] = 'Sc2Questar'
        # CXI (OffAxis Cam)
        self._s2c['DetInfo(CxiEndstation.0.Opal11000.0)'] = 'Sc2Offaxis'
        # CXI (CsPad)
        self._s2c['DetInfo(CxiDs1.0:Cspad.0)'] = 'CsPad Ds1'
        self._s2c['DetInfo(CxiDsd.0:Cspad.0)'] = 'CsPad Dsd'
        self._s2c['DetInfo(CxiDs2.0:Cspad.0)'] = 'CsPad Ds2'
        self._s2c['DetInfo(CxiDg3.0:Cspad2x2.0)'] = 'CsPad Dg3'
        self._s2c['DetInfo(CxiDg2.0:Cspad2x2.0)'] = 'CsPad Dg2'
        # AMO (pnCCD)
        self._s2c['DetInfo(Camp.0:pnCCD.1)'] = 'pnccdBack'
        self._s2c['DetInfo(Camp.0:pnCCD.0)'] = 'pnccdFront'
        # ToF detector
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.1)'] = 'Acqiris 1'
        self._s2c['DetInfo(AmoEndstation.0:Acqiris.2)'] = 'Acqiris 2'
        # AMO (Acqiris)
        self._s2c['DetInfo(AmoETOF.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(AmoETOF.0:Acqiris.1)'] = 'Acqiris 1'
        self._s2c['DetInfo(AmoITOF.0:Acqiris.0)'] = 'Acqiris 2'
        self._s2c['DetInfo(AmoITOF.0:Acqiris.1)'] = 'Acqiris 3'

        # MCP Camera
        self._s2c['DetInfo(AmoEndstation.0:Opal1000.1)'] = 'OPAL1'
        # CXI (Acqiris)
        self._s2c['DetInfo(CxiEndstation.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(CxiEndstation.0:Acqiris.1)'] = 'Acqiris 1'
Ejemplo n.º 8
0
    def run(self):
        """ Process all images assigned to this thread """
        params, options = self.parser.parse_args(show_diff_phil=True)

        if params.input.experiment is None or \
           params.input.run_num is None or \
           params.input.address is None:
            raise Usage(self.usage)

        if params.format.file_format == "cbf":
            if params.format.cbf.detz_offset is None:
                raise Usage(self.usage)
        elif params.format.file_format == "pickle":
            if params.input.cfg is None:
                raise Usage(self.usage)
        else:
            raise Usage(self.usage)

        if not os.path.exists(params.output.output_dir):
            raise Sorry("Output path not found:" + params.output.output_dir)

        #Environment variable redirect for CBFLib temporary CBF_TMP_XYZ file output
        if params.format.file_format == "cbf":
            if params.output.tmp_output_dir is None:
                tmp_dir = os.path.join(params.output.output_dir, '.tmp')
            else:
                tmp_dir = os.path.join(params.output.tmp_output_dir, '.tmp')
            if not os.path.exists(tmp_dir):
                with show_mail_on_error():
                    try:
                        os.makedirs(tmp_dir)
                        # Can fail if running multiprocessed - that's OK if the folder was created
                    except OSError as e:  # In Python 2, a FileExistsError is just an OSError
                        if e.errno != errno.EEXIST:  # If this OSError is not a FileExistsError
                            raise
            os.environ['CBF_TMP_DIR'] = tmp_dir

        # Save the paramters
        self.params = params
        self.options = options

        from mpi4py import MPI
        comm = MPI.COMM_WORLD
        rank = comm.Get_rank(
        )  # each process in MPI has a unique id, 0-indexed
        size = comm.Get_size()  # size: number of processes running in this job

        # set up psana
        if params.input.cfg is not None:
            psana.setConfigFile(params.input.cfg)

        if params.input.calib_dir is not None:
            psana.setOption('psana.calib-dir', params.input.calib_dir)

        dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment,
                                              params.input.run_num)
        if params.input.xtc_dir is not None:
            dataset_name = "exp=%s:run=%s:idx:dir=%s" % (
                params.input.experiment, params.input.run_num,
                params.input.xtc_dir)

        ds = psana.DataSource(dataset_name)

        if params.format.file_format == "cbf":
            src = psana.Source('DetInfo(%s)' % params.input.address)
            psana_det = psana.Detector(params.input.address, ds.env())

        # set this to sys.maxint to analyze all events
        if params.dispatch.max_events is None:
            max_events = sys.maxsize
        else:
            max_events = params.dispatch.max_events

        for run in ds.runs():
            if params.format.file_format == "cbf":
                if params.format.cbf.mode == "cspad":
                    # load a header only cspad cbf from the slac metrology
                    base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(
                        run, params.input.address)
                    if base_dxtbx is None:
                        raise Sorry(
                            "Couldn't load calibration file for run %d" %
                            run.run())
                elif params.format.cbf.mode == "rayonix":
                    # load a header only rayonix cbf from the input parameters
                    detector_size = rayonix_tbx.get_rayonix_detector_dimensions(
                        ds.env())
                    base_dxtbx = rayonix_tbx.get_dxtbx_from_params(
                        params.format.cbf.rayonix, detector_size)

            # list of all events
            times = run.times()
            if params.dispatch.selected_events:
                times = [
                    t for t in times
                    if cspad_tbx.evt_timestamp((t.seconds(), t.nanoseconds() /
                                                1e6)) in params.input.timestamp
                ]
            nevents = min(len(times), max_events)
            # chop the list into pieces, depending on rank.  This assigns each process
            # events such that the get every Nth event where N is the number of processes
            mytimes = [
                times[i] for i in range(nevents) if (i + rank) % size == 0
            ]

            for i in range(len(mytimes)):
                evt = run.event(mytimes[i])
                id = evt.get(psana.EventId)
                print("Event #", i, " has id:", id)

                timestamp = cspad_tbx.evt_timestamp(
                    cspad_tbx.evt_time(evt))  # human readable format
                if timestamp is None:
                    print("No timestamp, skipping shot")
                    continue

                if evt.get("skip_event") or "skip_event" in [
                        key.key() for key in evt.keys()
                ]:
                    print("Skipping event", timestamp)
                    continue

                t = timestamp
                s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[
                    17:19] + t[20:23]
                print("Processing shot", s)

                if params.format.file_format == "pickle":
                    if evt.get("skip_event"):
                        print("Skipping event", id)
                        continue
                    # the data needs to have already been processed and put into the event by psana
                    data = evt.get(params.format.pickle.out_key)
                    if data is None:
                        print("No data")
                        continue

                    # set output paths according to the templates
                    path = os.path.join(params.output.output_dir,
                                        "shot-" + s + ".pickle")

                    print("Saving", path)
                    easy_pickle.dump(path, data)

                elif params.format.file_format == "cbf":
                    if params.format.cbf.mode == "cspad":
                        # get numpy array, 32x185x388
                        data = cspad_cbf_tbx.get_psana_corrected_data(
                            psana_det,
                            evt,
                            use_default=False,
                            dark=True,
                            common_mode=None,
                            apply_gain_mask=params.format.cbf.cspad.
                            gain_mask_value is not None,
                            gain_mask_value=params.format.cbf.cspad.
                            gain_mask_value,
                            per_pixel_gain=False)

                        distance = cspad_tbx.env_distance(
                            params.input.address, run.env(),
                            params.format.cbf.detz_offset)
                    elif params.format.cbf.mode == "rayonix":
                        data = rayonix_tbx.get_data_from_psana_event(
                            evt, params.input.address)
                        distance = params.format.cbf.detz_offset

                    if distance is None:
                        print("No distance, skipping shot")
                        continue

                    if self.params.format.cbf.override_energy is None:
                        wavelength = cspad_tbx.evt_wavelength(evt)
                        if wavelength is None:
                            print("No wavelength, skipping shot")
                            continue
                    else:
                        wavelength = 12398.4187 / self.params.format.cbf.override_energy

                    # stitch together the header, data and metadata into the final dxtbx format object
                    if params.format.cbf.mode == "cspad":
                        image = cspad_cbf_tbx.format_object_from_data(
                            base_dxtbx,
                            data,
                            distance,
                            wavelength,
                            timestamp,
                            params.input.address,
                            round_to_int=False)
                    elif params.format.cbf.mode == "rayonix":
                        image = rayonix_tbx.format_object_from_data(
                            base_dxtbx, data, distance, wavelength, timestamp,
                            params.input.address)
                    path = os.path.join(params.output.output_dir,
                                        "shot-" + s + ".cbf")
                    print("Saving", path)

                    # write the file
                    import pycbf
                    image._cbf_handle.write_widefile(path.encode(), pycbf.CBF,\
                      pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)

            run.end()
        ds.end()
Ejemplo n.º 9
0
class InMemScript(DialsProcessScript):
    """ Script to process XFEL data at LCLS """
    def __init__(self):
        """ Set up the option parser. Arguments come from the command line or a phil file """
        self.usage = """
%s input.experiment=experimentname input.run_num=N input.address=address
 format.file_format=cbf format.cbf.detz_offset=N
%s input.experiment=experimentname input.run_num=N input.address=address
 format.file_format=pickle input.cfg=filename
    """ % (libtbx.env.dispatcher_name, libtbx.env.dispatcher_name)
        self.parser = OptionParser(usage=self.usage, phil=phil_scope)

        self.debug_file_path = None
        self.debug_str = None
        self.mpi_log_file_path = None

        self.reference_detector = None

    def debug_start(self, ts):
        self.debug_str = "%s,%s" % (socket.gethostname(), ts)
        self.debug_str += ",%s,%s,%s\n"
        self.debug_write("start")

    def debug_write(self, string, state=None):
        ts = cspad_tbx.evt_timestamp()  # Now
        debug_file_handle = open(self.debug_file_path, 'a')
        if string == "":
            debug_file_handle.write("\n")
        else:
            if state is None:
                state = "    "
            debug_file_handle.write(self.debug_str % (ts, state, string))
        debug_file_handle.close()

    def mpi_log_write(self, string):
        mpi_log_file_handle = open(self.mpi_log_file_path, 'a')
        mpi_log_file_handle.write(string)
        mpi_log_file_handle.close()

    def psana_mask_to_dials_mask(self, psana_mask):
        if psana_mask.dtype == np.bool:
            psana_mask = flex.bool(psana_mask)
        else:
            psana_mask = flex.bool(psana_mask == 1)
        assert psana_mask.focus() == (32, 185, 388)
        dials_mask = []
        for i in xrange(32):
            dials_mask.append(psana_mask[i:i + 1, :, :194])
            dials_mask[-1].reshape(flex.grid(185, 194))
            dials_mask.append(psana_mask[i:i + 1, :, 194:])
            dials_mask[-1].reshape(flex.grid(185, 194))
        return dials_mask

    def run(self):
        """ Process all images assigned to this thread """

        params, options = self.parser.parse_args(show_diff_phil=True)

        # Check inputs
        if params.input.experiment is None or \
           params.input.run_num is None or \
           params.input.address is None:
            raise Usage(self.usage)

        if params.format.file_format == "cbf":
            if params.format.cbf.detz_offset is None:
                raise Usage(self.usage)
        elif params.format.file_format == "pickle":
            if params.input.cfg is None:
                raise Usage(self.usage)
        else:
            raise Usage(self.usage)

        if not os.path.exists(params.output.output_dir):
            raise Sorry("Output path not found:" + params.output.output_dir)

        self.params = params
        self.load_reference_geometry()

        # The convention is to put %s in the phil parameter to add a time stamp to
        # each output datafile. Save the initial templates here.
        self.strong_filename_template = params.output.strong_filename
        self.indexed_filename_template = params.output.indexed_filename
        self.refined_experiments_filename_template = params.output.refined_experiments_filename
        self.integrated_filename_template = params.output.integrated_filename
        self.reindexedstrong_filename_template = params.output.reindexedstrong_filename

        # Don't allow the strong reflections to be written unless there are enough to
        # process
        params.output.strong_filename = None

        # Save the paramters
        self.params_cache = copy.deepcopy(params)
        self.options = options

        if params.mp.method == "mpi":
            from mpi4py import MPI
            comm = MPI.COMM_WORLD
            rank = comm.Get_rank(
            )  # each process in MPI has a unique id, 0-indexed
            size = comm.Get_size(
            )  # size: number of processes running in this job
        elif params.mp.method == "sge" and \
            'SGE_TASK_ID'    in os.environ and \
            'SGE_TASK_FIRST' in os.environ and \
            'SGE_TASK_LAST'  in os.environ:
            if 'SGE_STEP_SIZE' in os.environ:
                assert int(os.environ['SGE_STEP_SIZE']) == 1
            if os.environ['SGE_TASK_ID'] == 'undefined' or os.environ[
                    'SGE_TASK_ID'] == 'undefined' or os.environ[
                        'SGE_TASK_ID'] == 'undefined':
                rank = 0
                size = 1
            else:
                rank = int(os.environ['SGE_TASK_ID']) - int(
                    os.environ['SGE_TASK_FIRST'])
                size = int(os.environ['SGE_TASK_LAST']) - int(
                    os.environ['SGE_TASK_FIRST']) + 1
        else:
            rank = 0
            size = 1

        # Configure the logging
        if params.output.logging_dir is None:
            info_path = ''
            debug_path = ''
        else:
            log_path = os.path.join(params.output.logging_dir,
                                    "log_rank%04d.out" % rank)
            error_path = os.path.join(params.output.logging_dir,
                                      "error_rank%04d.out" % rank)
            print "Redirecting stdout to %s" % log_path
            print "Redirecting stderr to %s" % error_path
            sys.stdout = open(log_path, 'a', buffering=0)
            sys.stderr = open(error_path, 'a', buffering=0)
            print "Should be redirected now"

            info_path = os.path.join(params.output.logging_dir,
                                     "info_rank%04d.out" % rank)
            debug_path = os.path.join(params.output.logging_dir,
                                      "debug_rank%04d.out" % rank)

        from dials.util import log
        log.config(params.verbosity, info=info_path, debug=debug_path)

        debug_dir = os.path.join(params.output.output_dir, "debug")
        if not os.path.exists(debug_dir):
            try:
                os.makedirs(debug_dir)
            except OSError, e:
                pass  # due to multiprocessing, makedirs can sometimes fail
        assert os.path.exists(debug_dir)

        if params.debug.skip_processed_events or params.debug.skip_unprocessed_events or params.debug.skip_bad_events:
            print "Reading debug files..."
            self.known_events = {}
            for filename in os.listdir(debug_dir):
                # format: hostname,timestamp_event,timestamp_now,status,detail
                for line in open(os.path.join(debug_dir, filename)):
                    vals = line.strip().split(',')
                    if len(vals) != 5:
                        continue
                    _, ts, _, status, detail = vals
                    if status in ["done", "stop", "fail"]:
                        self.known_events[ts] = status
                    else:
                        self.known_events[ts] = "unknown"

        self.debug_file_path = os.path.join(debug_dir, "debug_%d.txt" % rank)
        write_newline = os.path.exists(self.debug_file_path)
        if write_newline:  # needed if the there was a crash
            self.debug_write("")

        if params.mp.method != 'mpi' or params.mp.mpi.method == 'client_server':
            if rank == 0:
                self.mpi_log_file_path = os.path.join(debug_dir, "mpilog.out")
                write_newline = os.path.exists(self.mpi_log_file_path)
                if write_newline:  # needed if the there was a crash
                    self.mpi_log_write("\n")

        # set up psana
        if params.input.cfg is not None:
            psana.setConfigFile(params.input.cfg)
        dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment,
                                              params.input.run_num)
        if params.input.xtc_dir is not None:
            if params.input.use_ffb:
                raise Sorry(
                    "Cannot specify the xtc_dir and use SLAC's ffb system")
            dataset_name += ":dir=%s" % params.input.xtc_dir
        elif params.input.use_ffb:
            # as ffb is only at SLAC, ok to hardcode /reg/d here
            dataset_name += ":dir=/reg/d/ffb/%s/%s/xtc" % (
                params.input.experiment[0:3], params.input.experiment)
        if params.input.stream is not None:
            dataset_name += ":stream=%d" % params.input.stream
        ds = psana.DataSource(dataset_name)

        if params.format.file_format == "cbf":
            self.psana_det = psana.Detector(params.input.address, ds.env())

        # set this to sys.maxint to analyze all events
        if params.dispatch.max_events is None:
            max_events = sys.maxint
        else:
            max_events = params.dispatch.max_events

        for run in ds.runs():
            if params.format.file_format == "cbf":
                # load a header only cspad cbf from the slac metrology
                try:
                    self.base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(
                        run, params.input.address)
                except Exception, e:
                    raise Sorry(
                        "Couldn't load calibration file for run %d, %s" %
                        (run.run(), str(e)))
                if self.base_dxtbx is None:
                    raise Sorry("Couldn't load calibration file for run %d" %
                                run.run())

                if params.format.file_format == 'cbf':
                    if params.format.cbf.common_mode.algorithm == "custom":
                        self.common_mode = params.format.cbf.common_mode.custom_parameterization
                        assert self.common_mode is not None
                    else:
                        self.common_mode = params.format.cbf.common_mode.algorithm  # could be None or default

                if params.format.cbf.invalid_pixel_mask is not None:
                    self.dials_mask = easy_pickle.load(
                        params.format.cbf.invalid_pixel_mask)
                    assert len(self.dials_mask) == 64
                    if self.params.format.cbf.mask_nonbonded_pixels:
                        psana_mask = self.psana_det.mask(run,
                                                         calib=False,
                                                         status=False,
                                                         edges=False,
                                                         central=False,
                                                         unbond=True,
                                                         unbondnbrs=True)
                        dials_mask = self.psana_mask_to_dials_mask(psana_mask)
                        self.dials_mask = [
                            self.dials_mask[i] & dials_mask[i]
                            for i in xrange(len(dials_mask))
                        ]
                else:
                    psana_mask = self.psana_det.mask(run,
                                                     calib=True,
                                                     status=True,
                                                     edges=True,
                                                     central=True,
                                                     unbond=True,
                                                     unbondnbrs=True)
                    self.dials_mask = self.psana_mask_to_dials_mask(psana_mask)

            if self.params.spotfinder.lookup.mask is not None:
                self.spotfinder_mask = easy_pickle.load(
                    self.params.spotfinder.lookup.mask)
            else:
                self.spotfinder_mask = None
            if self.params.integration.lookup.mask is not None:
                self.integration_mask = easy_pickle.load(
                    self.params.integration.lookup.mask)
            else:
                self.integration_mask = None

            # list of all events
            times = run.times()
            nevents = min(len(times), max_events)
            times = times[:nevents]
            if params.dispatch.process_percent is not None:
                import fractions
                percent = params.dispatch.process_percent / 100
                f = fractions.Fraction(percent).limit_denominator(100)
                times = [
                    times[i] for i in xrange(len(times))
                    if i % f.denominator < f.numerator
                ]
                print "Dividing %d of %d events (%4.1f%%) between all processes" % (
                    len(times), nevents, 100 * len(times) / nevents)
                nevents = len(times)
            else:
                print "Dividing %d events between all processes" % nevents
            if params.mp.method == "mpi" and params.mp.mpi.method == 'client_server' and size > 2:
                print "Using MPI client server"
                # use a client/server approach to be sure every process is busy as much as possible
                # only do this if there are more than 2 processes, as one process will be a server
                try:
                    if rank == 0:
                        # server process
                        self.mpi_log_write("MPI START\n")
                        for t in times:
                            # a client process will indicate it's ready by sending its rank
                            self.mpi_log_write(
                                "Getting next available process\n")
                            rankreq = comm.recv(source=MPI.ANY_SOURCE)
                            ts = cspad_tbx.evt_timestamp(
                                (t.seconds(), t.nanoseconds() / 1e6))
                            self.mpi_log_write(
                                "Process %s is ready, sending ts %s\n" %
                                (rankreq, ts))
                            comm.send(t, dest=rankreq)
                        # send a stop command to each process
                        self.mpi_log_write("MPI DONE, sending stops\n")
                        for rankreq in range(size - 1):
                            self.mpi_log_write(
                                "Getting next available process\n")
                            rankreq = comm.recv(source=MPI.ANY_SOURCE)
                            self.mpi_log_write("Sending stop to %d\n" %
                                               rankreq)
                            comm.send('endrun', dest=rankreq)
                    else:
                        # client process
                        while True:
                            # inform the server this process is ready for an event
                            comm.send(rank, dest=0)
                            evttime = comm.recv(source=0)
                            if evttime == 'endrun': break
                            self.process_event(run, evttime)
                except Exception, e:
                    print "Error caught in main loop"
                    print str(e)
Ejemplo n.º 10
0
def average(argv=None):
  if argv == None:
    argv = sys.argv[1:]

  try:
    from mpi4py import MPI
  except ImportError:
    raise Sorry("MPI not found")

  command_line = (libtbx.option_parser.option_parser(
    usage="""
%s [-p] -c config -x experiment -a address -r run -d detz_offset [-o outputdir] [-A averagepath] [-S stddevpath] [-M maxpath] [-n numevents] [-s skipnevents] [-v] [-m] [-b bin_size] [-X override_beam_x] [-Y override_beam_y] [-D xtc_dir] [-f]

To write image pickles use -p, otherwise the program writes CSPAD CBFs.
Writing CBFs requires the geometry to be already deployed.

Examples:
cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571

Use one process on the current node to process all the events from run 25 of
experiment cxi49812, using a detz_offset of 571.

mpirun -n 16 cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571

As above, using 16 cores on the current node.

bsub -a mympi -n 100 -o average.out -q psanaq cxi.mpi_average -c cxi49812/average.cfg -x cxi49812 -a CxiDs1.0:Cspad.0 -r 25 -d 571 -o cxi49812

As above, using the psanaq and 100 cores, putting the log in average.out and
the output images in the folder cxi49812.
""" % libtbx.env.dispatcher_name)
                .option(None, "--as_pickle", "-p",
                        action="store_true",
                        default=False,
                        dest="as_pickle",
                        help="Write results as image pickle files instead of cbf files")
                .option(None, "--config", "-c",
                        type="string",
                        default=None,
                        dest="config",
                        metavar="PATH",
                        help="psana config file")
                .option(None, "--experiment", "-x",
                        type="string",
                        default=None,
                        dest="experiment",
                        help="experiment name (eg cxi84914)")
                .option(None, "--run", "-r",
                        type="int",
                        default=None,
                        dest="run",
                        help="run number")
                .option(None, "--address", "-a",
                        type="string",
                        default="CxiDs2.0:Cspad.0",
                        dest="address",
                        help="detector address name (eg CxiDs2.0:Cspad.0)")
                .option(None, "--detz_offset", "-d",
                        type="float",
                        default=None,
                        dest="detz_offset",
                        help="offset (in mm) from sample interaction region to back of CSPAD detector rail (CXI), or detector distance (XPP)")
                .option(None, "--outputdir", "-o",
                        type="string",
                        default=".",
                        dest="outputdir",
                        metavar="PATH",
                        help="Optional path to output directory for output files")
                .option(None, "--averagebase", "-A",
                        type="string",
                        default="{experiment!l}_avg-r{run:04d}",
                        dest="averagepath",
                        metavar="PATH",
                        help="Path to output average image without extension. String substitution allowed")
                .option(None, "--stddevbase", "-S",
                        type="string",
                        default="{experiment!l}_stddev-r{run:04d}",
                        dest="stddevpath",
                        metavar="PATH",
                        help="Path to output standard deviation image without extension. String substitution allowed")
                .option(None, "--maxbase", "-M",
                        type="string",
                        default="{experiment!l}_max-r{run:04d}",
                        dest="maxpath",
                        metavar="PATH",
                        help="Path to output maximum projection image without extension. String substitution allowed")
                .option(None, "--numevents", "-n",
                        type="int",
                        default=None,
                        dest="numevents",
                        help="Maximum number of events to process. Default: all")
                .option(None, "--skipevents", "-s",
                        type="int",
                        default=0,
                        dest="skipevents",
                        help="Number of events in the beginning of the run to skip. Default: 0")
                .option(None, "--verbose", "-v",
                        action="store_true",
                        default=False,
                        dest="verbose",
                        help="Print more information about progress")
                .option(None, "--pickle-optical-metrology", "-m",
                        action="store_true",
                        default=False,
                        dest="pickle_optical_metrology",
                        help="If writing pickle files, use the optical metrology in the experiment's calib directory")
                .option(None, "--bin_size", "-b",
                        type="int",
                        default=None,
                        dest="bin_size",
                        help="Rayonix detector bin size")
                .option(None, "--override_beam_x", "-X",
                        type="float",
                        default=None,
                        dest="override_beam_x",
                        help="Rayonix detector beam center x coordinate")
                .option(None, "--override_beam_y", "-Y",
                        type="float",
                        default=None,
                        dest="override_beam_y",
                        help="Rayonix detector beam center y coordinate")
                .option(None, "--calib_dir", "-C",
                        type="string",
                        default=None,
                        dest="calib_dir",
                        metavar="PATH",
                        help="calibration directory")
                .option(None, "--xtc_dir", "-D",
                        type="string",
                        default=None,
                        dest="xtc_dir",
                        metavar="PATH",
                        help="xtc stream directory")
                .option(None, "--use_ffb", "-f",
                        action="store_true",
                        default=False,
                        dest="use_ffb",
                        help="Use the fast feedback filesystem at LCLS. Only for the active experiment!")
                ).process(args=argv)


  if len(command_line.args) > 0 or \
      command_line.options.as_pickle is None or \
      command_line.options.experiment is None or \
      command_line.options.run is None or \
      command_line.options.address is None or \
      command_line.options.detz_offset is None or \
      command_line.options.averagepath is None or \
      command_line.options.stddevpath is None or \
      command_line.options.maxpath is None or \
      command_line.options.pickle_optical_metrology is None:
    command_line.parser.show_help()
    return

  # set this to sys.maxint to analyze all events
  if command_line.options.numevents is None:
    maxevents = sys.maxint
  else:
    maxevents = command_line.options.numevents

  comm = MPI.COMM_WORLD
  rank = comm.Get_rank()
  size = comm.Get_size()

  if command_line.options.config is not None:
    psana.setConfigFile(command_line.options.config)
  dataset_name = "exp=%s:run=%d:idx"%(command_line.options.experiment, command_line.options.run)
  if command_line.options.xtc_dir is not None:
    if command_line.options.use_ffb:
      raise Sorry("Cannot specify the xtc_dir and use SLAC's ffb system")
    dataset_name += ":dir=%s"%command_line.options.xtc_dir
  elif command_line.options.use_ffb:
    # as ffb is only at SLAC, ok to hardcode /reg/d here
    dataset_name += ":dir=/reg/d/ffb/%s/%s/xtc"%(command_line.options.experiment[0:3],command_line.options.experiment)
  ds = psana.DataSource(dataset_name)
  address = command_line.options.address
  src = psana.Source('DetInfo(%s)'%address)
  if not command_line.options.as_pickle:
    psana_det = psana.Detector(address, ds.env())

  nevent = np.array([0.])

  for run in ds.runs():
    runnumber = run.run()
    # list of all events
    if command_line.options.skipevents > 0:
      print "Skipping first %d events"%command_line.options.skipevents

    times = run.times()[command_line.options.skipevents:]
    nevents = min(len(times),maxevents)
    # chop the list into pieces, depending on rank.  This assigns each process
    # events such that the get every Nth event where N is the number of processes
    mytimes = [times[i] for i in xrange(nevents) if (i+rank)%size == 0]
    for i in xrange(len(mytimes)):
      if i%10==0: print 'Rank',rank,'processing event',rank*len(mytimes)+i,', ',i,'of',len(mytimes)
      evt = run.event(mytimes[i])
      #print "Event #",rank*mylength+i," has id:",evt.get(EventId)
      if 'Rayonix' in command_line.options.address:
        data = evt.get(Camera.FrameV1,src)
        if data is None:
          print "No data"
          continue
        data=data.data16().astype(np.float64)
      elif command_line.options.as_pickle:
        data = evt.get(psana.ndarray_float64_3, src, 'image0')
      else:
        # get numpy array, 32x185x388
        data = psana_det.calib(evt) # applies psana's complex run-dependent calibrations
      if data is None:
        print "No data"
        continue

      d = cspad_tbx.env_distance(address, run.env(), command_line.options.detz_offset)
      if d is None:
        print "No distance, skipping shot"
        continue
      if 'distance' in locals():
        distance += d
      else:
        distance = np.array([float(d)])

      w = cspad_tbx.evt_wavelength(evt)
      if w is None:
        print "No wavelength, skipping shot"
        continue
      if 'wavelength' in locals():
        wavelength += w
      else:
        wavelength = np.array([w])

      t = cspad_tbx.evt_time(evt)
      if t is None:
        print "No timestamp, skipping shot"
        continue
      if 'timestamp' in locals():
        timestamp += t[0] + (t[1]/1000)
      else:
        timestamp = np.array([t[0] + (t[1]/1000)])

      if 'sum' in locals():
        sum+=data
      else:
        sum=np.array(data, copy=True)
      if 'sumsq' in locals():
        sumsq+=data*data
      else:
        sumsq=data*data
      if 'maximum' in locals():
        maximum=np.maximum(maximum,data)
      else:
        maximum=np.array(data, copy=True)

      nevent += 1

  #sum the images across mpi cores
  if size > 1:
    print "Synchronizing rank", rank
  totevent = np.zeros(nevent.shape)
  comm.Reduce(nevent,totevent)

  if rank == 0 and totevent[0] == 0:
    raise Sorry("No events found in the run")

  sumall = np.zeros(sum.shape).astype(sum.dtype)
  comm.Reduce(sum,sumall)

  sumsqall = np.zeros(sumsq.shape).astype(sumsq.dtype)
  comm.Reduce(sumsq,sumsqall)

  maxall = np.zeros(maximum.shape).astype(maximum.dtype)
  comm.Reduce(maximum,maxall, op=MPI.MAX)

  waveall = np.zeros(wavelength.shape).astype(wavelength.dtype)
  comm.Reduce(wavelength,waveall)

  distall = np.zeros(distance.shape).astype(distance.dtype)
  comm.Reduce(distance,distall)

  timeall = np.zeros(timestamp.shape).astype(timestamp.dtype)
  comm.Reduce(timestamp,timeall)

  if rank==0:
    if size > 1:
      print "Synchronized"

    # Accumulating floating-point numbers introduces errors,
    # which may cause negative variances.  Since a two-pass
    # approach is unacceptable, the standard deviation is
    # clamped at zero.
    mean = sumall / float(totevent[0])
    variance = (sumsqall / float(totevent[0])) - (mean**2)
    variance[variance < 0] = 0
    stddev = np.sqrt(variance)

    wavelength = waveall[0] / totevent[0]
    distance = distall[0] / totevent[0]
    pixel_size = cspad_tbx.pixel_size
    saturated_value = cspad_tbx.cspad_saturated_value
    timestamp = timeall[0] / totevent[0]
    timestamp = (int(timestamp), timestamp % int(timestamp) * 1000)
    timestamp = cspad_tbx.evt_timestamp(timestamp)


    if command_line.options.as_pickle:
      extension = ".pickle"
    else:
      extension = ".cbf"

    dest_paths = [cspad_tbx.pathsubst(command_line.options.averagepath + extension, evt, ds.env()),
                  cspad_tbx.pathsubst(command_line.options.stddevpath  + extension, evt, ds.env()),
                  cspad_tbx.pathsubst(command_line.options.maxpath     + extension, evt, ds.env())]
    dest_paths = [os.path.join(command_line.options.outputdir, path) for path in dest_paths]
    if 'Rayonix' in command_line.options.address:
      from xfel.cxi.cspad_ana import rayonix_tbx
      pixel_size = rayonix_tbx.get_rayonix_pixel_size(command_line.options.bin_size)
      beam_center = [command_line.options.override_beam_x,command_line.options.override_beam_y]
      detector_dimensions = rayonix_tbx.get_rayonix_detector_dimensions(command_line.options.bin_size)
      active_areas = flex.int([0,0,detector_dimensions[0],detector_dimensions[1]])
      split_address = cspad_tbx.address_split(address)
      old_style_address = split_address[0] + "-" + split_address[1] + "|" + split_address[2] + "-" + split_address[3]
      for data, path in zip([mean, stddev, maxall], dest_paths):
        print "Saving", path
        d = cspad_tbx.dpack(
            active_areas=active_areas,
            address=old_style_address,
            beam_center_x=pixel_size * beam_center[0],
            beam_center_y=pixel_size * beam_center[1],
            data=flex.double(data),
            distance=distance,
            pixel_size=pixel_size,
            saturated_value=rayonix_tbx.rayonix_saturated_value,
            timestamp=timestamp,
            wavelength=wavelength)
        easy_pickle.dump(path, d)
    elif command_line.options.as_pickle:
      split_address = cspad_tbx.address_split(address)
      old_style_address = split_address[0] + "-" + split_address[1] + "|" + split_address[2] + "-" + split_address[3]

      xpp = 'xpp' in address.lower()
      if xpp:
        evt_time = cspad_tbx.evt_time(evt) # tuple of seconds, milliseconds
        timestamp = cspad_tbx.evt_timestamp(evt_time) # human readable format
        from xfel.detector_formats import detector_format_version, reverse_timestamp
        from xfel.cxi.cspad_ana.cspad_tbx import xpp_active_areas
        version_lookup = detector_format_version(old_style_address, reverse_timestamp(timestamp)[0])
        assert version_lookup is not None
        active_areas = xpp_active_areas[version_lookup]['active_areas']
        beam_center = [1765 // 2, 1765 // 2]
      else:
        if command_line.options.calib_dir is not None:
          metro_path = command_line.options.calib_dir
        elif command_line.options.pickle_optical_metrology:
          from xfel.cftbx.detector.cspad_cbf_tbx import get_calib_file_path
          metro_path = get_calib_file_path(run.env(), address, run)
        else:
          metro_path = libtbx.env.find_in_repositories("xfel/metrology/CSPad/run4/CxiDs1.0_Cspad.0")
        sections = parse_calib.calib2sections(metro_path)
        beam_center, active_areas = cspad_tbx.cbcaa(
          cspad_tbx.getConfig(address, ds.env()), sections)

      class fake_quad(object):
        def __init__(self, q, d):
          self.q = q
          self.d = d

        def quad(self):
          return self.q

        def data(self):
          return self.d

      if xpp:
        quads = [fake_quad(i, mean[i*8:(i+1)*8,:,:]) for i in xrange(4)]
        mean = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads = quads)
        mean = flex.double(mean.astype(np.float64))

        quads = [fake_quad(i, stddev[i*8:(i+1)*8,:,:]) for i in xrange(4)]
        stddev = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads = quads)
        stddev = flex.double(stddev.astype(np.float64))

        quads = [fake_quad(i, maxall[i*8:(i+1)*8,:,:]) for i in xrange(4)]
        maxall = cspad_tbx.image_xpp(old_style_address, None, ds.env(), active_areas, quads = quads)
        maxall = flex.double(maxall.astype(np.float64))
      else:
        quads = [fake_quad(i, mean[i*8:(i+1)*8,:,:]) for i in xrange(4)]
        mean = cspad_tbx.CsPadDetector(
          address, evt, ds.env(), sections, quads=quads)
        mean = flex.double(mean.astype(np.float64))

        quads = [fake_quad(i, stddev[i*8:(i+1)*8,:,:]) for i in xrange(4)]
        stddev = cspad_tbx.CsPadDetector(
          address, evt, ds.env(), sections, quads=quads)
        stddev = flex.double(stddev.astype(np.float64))

        quads = [fake_quad(i, maxall[i*8:(i+1)*8,:,:]) for i in xrange(4)]
        maxall = cspad_tbx.CsPadDetector(
          address, evt, ds.env(), sections, quads=quads)
        maxall = flex.double(maxall.astype(np.float64))

      for data, path in zip([mean, stddev, maxall], dest_paths):
        print "Saving", path

        d = cspad_tbx.dpack(
          active_areas=active_areas,
          address=old_style_address,
          beam_center_x=pixel_size * beam_center[0],
          beam_center_y=pixel_size * beam_center[1],
          data=data,
          distance=distance,
          pixel_size=pixel_size,
          saturated_value=saturated_value,
          timestamp=timestamp,
          wavelength=wavelength)

        easy_pickle.dump(path, d)
    else:
      # load a header only cspad cbf from the slac metrology
      from xfel.cftbx.detector import cspad_cbf_tbx
      import pycbf
      base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(run, address)
      if base_dxtbx is None:
        raise Sorry("Couldn't load calibration file for run %d"%run.run())

      for data, path in zip([mean, stddev, maxall], dest_paths):
        print "Saving", path

        cspad_img = cspad_cbf_tbx.format_object_from_data(base_dxtbx, data, distance, wavelength, timestamp, address)
        cspad_img._cbf_handle.write_widefile(path, pycbf.CBF,\
          pycbf.MIME_HEADERS|pycbf.MSG_DIGEST|pycbf.PAD_4K, 0)
Ejemplo n.º 11
0
    def run(self):
        """ Process all images assigned to this thread """
        params, options = self.parser.parse_args(show_diff_phil=True)

        if params.input.experiment is None or params.input.run_num is None or params.input.address is None:
            raise Usage(self.usage)

        if params.format.file_format == "cbf":
            if params.format.cbf.detz_offset is None:
                raise Usage(self.usage)
        elif params.format.file_format == "pickle":
            if params.format.pickle.cfg is None:
                raise Usage(self.usage)
        else:
            raise Usage(self.usage)

        if not os.path.exists(params.output.output_dir):
            raise Sorry("Output path not found:" + params.output.output_dir)

        # Save the paramters
        self.params = params
        self.options = options

        from mpi4py import MPI

        comm = MPI.COMM_WORLD
        rank = comm.Get_rank()  # each process in MPI has a unique id, 0-indexed
        size = comm.Get_size()  # size: number of processes running in this job

        # set up psana
        if params.format.file_format == "pickle":
            psana.setConfigFile(params.format.pickle.cfg)

        dataset_name = "exp=%s:run=%s:idx" % (params.input.experiment, params.input.run_num)
        ds = psana.DataSource(dataset_name)

        if params.format.file_format == "cbf":
            src = psana.Source("DetInfo(%s)" % params.input.address)
            psana_det = psana.Detector(params.input.address, ds.env())

        # set this to sys.maxint to analyze all events
        if params.dispatch.max_events is None:
            max_events = sys.maxint
        else:
            max_events = params.dispatch.max_events

        for run in ds.runs():
            if params.format.file_format == "cbf":
                # load a header only cspad cbf from the slac metrology
                base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(run, params.input.address)
                if base_dxtbx is None:
                    raise Sorry("Couldn't load calibration file for run %d" % run.run())

                if params.format.cbf.gain_mask_value is not None:
                    gain_mask = psana_det.gain_mask(gain=params.format.cbf.gain_mask_value)

            # list of all events
            times = run.times()
            nevents = min(len(times), max_events)
            # chop the list into pieces, depending on rank.  This assigns each process
            # events such that the get every Nth event where N is the number of processes
            mytimes = [times[i] for i in xrange(nevents) if (i + rank) % size == 0]

            for i in xrange(len(mytimes)):
                evt = run.event(mytimes[i])
                id = evt.get(psana.EventId)
                print "Event #", i, " has id:", id

                timestamp = cspad_tbx.evt_timestamp(cspad_tbx.evt_time(evt))  # human readable format
                if timestamp is None:
                    print "No timestamp, skipping shot"
                    continue
                t = timestamp
                s = t[0:4] + t[5:7] + t[8:10] + t[11:13] + t[14:16] + t[17:19] + t[20:23]
                print "Processing shot", s

                if params.format.file_format == "pickle":
                    if evt.get("skip_event"):
                        print "Skipping event", id
                        continue
                    # the data needs to have already been processed and put into the event by psana
                    data = evt.get(params.format.pickle.out_key)
                    if data is None:
                        print "No data"
                        continue

                    # set output paths according to the templates
                    path = os.path.join(params.output.output_dir, "shot-" + s + ".pickle")

                    print "Saving", path
                    easy_pickle.dump(path, data)

                elif params.format.file_format == "cbf":
                    # get numpy array, 32x185x388
                    data = psana_det.calib(evt)  # applies psana's complex run-dependent calibrations

                    if params.format.cbf.gain_mask_value is not None:
                        # apply gain mask
                        data *= gain_mask

                    distance = cspad_tbx.env_distance(params.input.address, run.env(), params.format.cbf.detz_offset)
                    if distance is None:
                        print "No distance, skipping shot"
                        continue

                    if self.params.format.cbf.override_energy is None:
                        wavelength = cspad_tbx.evt_wavelength(evt)
                        if wavelength is None:
                            print "No wavelength, skipping shot"
                            continue
                    else:
                        wavelength = 12398.4187 / self.params.format.cbf.override_energy

                    # stitch together the header, data and metadata into the final dxtbx format object
                    cspad_img = cspad_cbf_tbx.format_object_from_data(
                        base_dxtbx, data, distance, wavelength, timestamp, params.input.address
                    )
                    path = os.path.join(params.output.output_dir, "shot-" + s + ".cbf")
                    print "Saving", path

                    # write the file
                    import pycbf

                    cspad_img._cbf_handle.write_widefile(
                        path, pycbf.CBF, pycbf.MIME_HEADERS | pycbf.MSG_DIGEST | pycbf.PAD_4K, 0
                    )

            run.end()
        ds.end()
Ejemplo n.º 12
0
    def __init__(self, state):
        self.timestamps = None
        config_file = None
        if('LCLS/PsanaConf' in state):
            config_file = os.path.abspath(state['LCLS/PsanaConf'])
        elif('LCLS' in state and 'PsanaConf' in state['LCLS']):
            config_file = os.path.abspath(state['LCLS']['PsanaConf'])
        if(config_file is not None):
            if(not os.path.isfile(config_file)):
                raise RuntimeError("Could not find [LCLS][PsanaConf]: %s" %
                                   (config_file))
            logging.info("Info: Found configuration file %s.", config_file)
            psana.setConfigFile(config_file)

        if('LCLS/DataSource' in state):
            dsrc = state['LCLS/DataSource']
        elif('LCLS' in state and 'DataSource' in state['LCLS']):
            dsrc = state['LCLS']['DataSource']
        else:
            raise ValueError("You need to set the '[LCLS][DataSource]'"
                             " in the configuration")

        # Cache times of events that shall be extracted from XTC (does not work for stream)
        if 'times' in state or 'fiducials' in state:
            if not ('times' in state and 'fiducials' in state):
                raise ValueError("Times or fiducials missing in state."
                                 " Extraction of selected events expects both event identifiers")                
            if dsrc[:len('exp=')] != 'exp=':
                raise ValueError("Extraction of events with given times and fiducials"
                                 " only works when reading from XTC with index files")
            if dsrc[-len(':idx'):] != ':idx':
                dsrc += ':idx'
            self.times = state['times']
            self.fiducials = state['fiducials']
            self.i = 0
            self.data_source = psana.DataSource(dsrc)
            self.run = self.data_source.runs().next()                        
        elif 'do_full_run' in state:
            if dsrc[-len(':idx'):] != ':idx':
                dsrc += ':idx'
            self.i = 0
            self.data_source = psana.DataSource(dsrc)
            self.run = self.data_source.runs().next()
            self.timestamps = self.run.times()[ipc.mpi.slave_rank()::ipc.mpi.nr_workers()]
        else:
            self.times = None
            self.fiducials = None
            self.i = None
            self.data_source = psana.DataSource(dsrc)
            self.run = None

        # Define how to translate between LCLS types and Hummingbird ones
        self._n2c = {}
        self._n2c[psana.Bld.BldDataFEEGasDetEnergy] = 'pulseEnergies'
        self._n2c[psana.Bld.BldDataFEEGasDetEnergyV1] = 'pulseEnergies'
        self._n2c[psana.Lusi.IpmFexV1] = 'pulseEnergies'
        self._n2c[psana.Camera.FrameV1] = 'camera'
        # Guard against old(er) psana versions
        try:
            self._n2c[psana.Bld.BldDataEBeamV1] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV2] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV3] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV4] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV5] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV6] = 'photonEnergies'
            self._n2c[psana.Bld.BldDataEBeamV7] = 'photonEnergies'
        except AttributeError:
            pass
        self._n2c[psana.CsPad.DataV2] = 'photonPixelDetectors'
        self._n2c[psana.CsPad2x2.ElementV1] = 'photonPixelDetectors'
        # AMO
        self._n2c[psana.PNCCD.FullFrameV1] = 'photonPixelDetectors'
        self._n2c[psana.PNCCD.FramesV1] = 'photonPixelDetectors'
        # --
        self._n2c[psana.Acqiris.DataDescV1] = 'ionTOFs'
        self._n2c[psana.EventId] = 'eventID'
        # Guard against old(er) psana versions
        try:
            self._n2c[psana.EvrData.DataV3] = 'eventCodes'
            self._n2c[psana.EvrData.DataV4] = 'eventCodes'
        except AttributeError:
            pass

        # Calculate the inverse mapping
        self._c2n = {}
        for k, v in self._n2c.iteritems():
            self._c2n[v] = self._c2n.get(v, [])
            self._c2n[v].append(k)

        # Define how to translate between LCLS sources and Hummingbird ones
        self._s2c = {}
        self._s2c['DetInfo(CxiEndstation.0:Opal4000.1)'] = 'Sc2Questar'
        self._s2c['DetInfo(CxiDs1.0:Cspad.0)'] = 'CsPad Ds1'
        self._s2c['DetInfo(CxiDsd.0:Cspad.0)'] = 'CsPad Dsd'
        self._s2c['DetInfo(CxiDs2.0:Cspad.0)'] = 'CsPad Ds2'
        self._s2c['DetInfo(CxiDg3.0:Cspad2x2.0)'] = 'CsPad Dg3'
        self._s2c['DetInfo(CxiDg2.0:Cspad2x2.0)'] = 'CsPad Dg2'
        # AMO
        self._s2c['DetInfo(Camp.0:pnCCD.1)'] = 'pnccdBack'
        self._s2c['DetInfo(Camp.0:pnCCD.0)'] = 'pnccdFront'
        # --
        self._s2c['DetInfo(CxiEndstation.0:Acqiris.0)'] = 'Acqiris 0'
        self._s2c['DetInfo(CxiEndstation.0:Acqiris.1)'] = 'Acqiris 1'
Ejemplo n.º 13
0
  def run(self):
    """ Process all images assigned to this thread """

    params, options = self.parser.parse_args(
      show_diff_phil=True)

    # Configure the logging
    from dials.util import log
    log.config(params.verbosity)

    # Check inputs
    if params.input.experiment is None or \
       params.input.run_num is None or \
       params.input.address is None:
      raise Usage(self.usage)

    if params.format.file_format == "cbf":
      if params.format.cbf.detz_offset is None:
        raise Usage(self.usage)
    elif params.format.file_format == "pickle":
      if params.format.pickle.cfg is None:
        raise Usage(self.usage)
    else:
      raise Usage(self.usage)

    if not os.path.exists(params.output.output_dir):
      raise Sorry("Output path not found:" + params.output.output_dir)

    # The convention is to put %s in the phil parameter to add a time stamp to
    # each output datafile. Save the initial templates here.
    self.strong_filename_template              = params.output.strong_filename
    self.indexed_filename_template             = params.output.indexed_filename
    self.refined_experiments_filename_template = params.output.refined_experiments_filename
    self.integrated_filename_template          = params.output.integrated_filename

    # Don't allow the strong reflections to be written unless there are enough to
    # process
    params.output.strong_filename = None

    # Save the paramters
    self.params_cache = copy.deepcopy(params)
    self.options = options

    if params.mp.method == "mpi":
      from mpi4py import MPI
      comm = MPI.COMM_WORLD
      rank = comm.Get_rank() # each process in MPI has a unique id, 0-indexed
      size = comm.Get_size() # size: number of processes running in this job
    elif params.mp.method == "sge" and \
        'SGE_TASK_ID'    in os.environ and \
        'SGE_TASK_FIRST' in os.environ and \
        'SGE_TASK_LAST'  in os.environ:
      if 'SGE_STEP_SIZE' in os.environ:
        assert int(os.environ['SGE_STEP_SIZE']) == 1
      if os.environ['SGE_TASK_ID'] == 'undefined' or os.environ['SGE_TASK_ID'] == 'undefined' or os.environ['SGE_TASK_ID'] == 'undefined':
        rank = 0
        size = 1
      else:
        rank = int(os.environ['SGE_TASK_ID']) - int(os.environ['SGE_TASK_FIRST'])
        size = int(os.environ['SGE_TASK_LAST']) - int(os.environ['SGE_TASK_FIRST']) + 1
    else:
      rank = 0
      size = 1

    if params.output.logging_dir is not None:
      log_path = os.path.join(params.output.logging_dir, "log_rank%04d.out"%rank)
      error_path = os.path.join(params.output.logging_dir, "error_rank%04d.out"%rank)
      print "Redirecting stdout to %s"%log_path
      print "Redirecting stderr to %s"%error_path
      assert os.path.exists(log_path)
      sys.stdout = open(log_path,'a', buffering=0)
      sys.stderr = open(error_path,'a',buffering=0)
      print "Should be redirected now"

    debug_dir = os.path.join(params.output.output_dir, "debug")
    if not os.path.exists(debug_dir):
      os.makedirs(debug_dir)

    if params.debug.skip_processed_events or params.debug.skip_processed_events or params.debug.skip_bad_events:
      print "Reading debug files..."
      self.known_events = {}
      for filename in os.listdir(debug_dir):
        # format: hostname,timestamp,status
        for line in open(os.path.join(debug_dir, filename)):
          vals = line.strip().split(',')
          if len(vals) == 2:
            self.known_events[vals[1]] = "unknown"
          elif len(vals) == 3:
            self.known_events[vals[1]] = vals[2]

    debug_file_path = os.path.join(debug_dir, "debug_%d.txt"%rank)
    write_newline = os.path.exists(debug_file_path)
    self.debug_file_handle = open(debug_file_path, 'a', 0) # 0 for unbuffered
    if write_newline: # needed if the there was a crash
      self.debug_file_handle.write("\n")

    # set up psana
    if params.format.file_format=="pickle":
      psana.setConfigFile(params.format.pickle.cfg)
    dataset_name = "exp=%s:run=%s:idx"%(params.input.experiment,params.input.run_num)
    if params.input.xtc_dir is not None:
      if params.input.use_ffb:
        raise Sorry("Cannot specify the xtc_dir and use SLAC's ffb system")
      dataset_name += ":dir=%s"%params.input.xtc_dir
    elif params.input.use_ffb:
      # as ffb is only at SLAC, ok to hardcode /reg/d here
      dataset_name += ":dir=/reg/d/ffb/%s/%s/xtc"%(params.input.experiment[0:3],params.input.experiment)
    ds = psana.DataSource(dataset_name)

    if params.format.file_format == "cbf":
      self.psana_det = psana.Detector(params.input.address, ds.env())

    # set this to sys.maxint to analyze all events
    if params.dispatch.max_events is None:
      max_events = sys.maxint
    else:
      max_events = params.dispatch.max_events

    for run in ds.runs():
      if params.format.file_format == "cbf":
        # load a header only cspad cbf from the slac metrology
        self.base_dxtbx = cspad_cbf_tbx.env_dxtbx_from_slac_metrology(run, params.input.address)
        if self.base_dxtbx is None:
          raise Sorry("Couldn't load calibration file for run %d"%run.run())

        if params.format.cbf.gain_mask_value is not None:
          self.gain_mask = self.psana_det.gain_mask(gain=params.format.cbf.gain_mask_value)

      # list of all events
      times = run.times()
      nevents = min(len(times),max_events)
      if params.mp.method == "mpi" and size > 2:
        # use a client/server approach to be sure every process is busy as much as possible
        # only do this if there are more than 2 processes, as one process will be a server
        if rank == 0:
          # server process
          for t in times[:nevents]:
            # a client process will indicate it's ready by sending its rank
            rankreq = comm.recv(source=MPI.ANY_SOURCE)
            comm.send(t,dest=rankreq)
          # send a stop command to each process
          for rankreq in range(size-1):
            rankreq = comm.recv(source=MPI.ANY_SOURCE)
            comm.send('endrun',dest=rankreq)
        else:
          # client process
          while True:
            # inform the server this process is ready for an event
            comm.send(rank,dest=0)
            evttime = comm.recv(source=0)
            if evttime == 'endrun': break
            self.process_event(run, evttime)
      else:
        # chop the list into pieces, depending on rank.  This assigns each process
        # events such that the get every Nth event where N is the number of processes
        mytimes = [times[i] for i in xrange(nevents) if (i+rank)%size == 0]

        for i in xrange(len(mytimes)):
          self.process_event(run, mytimes[i])

      run.end()
    ds.end()
Ejemplo n.º 14
0
#!/usr/bin/env python

"""
Core classes. Based on work from TJ Lane
"""

import sys
import psana
import mpi4py
import mpi4py.MPI
import math
import time
import datetime

cfg_path = '/reg/neh/home2/tjlane/analysis/ssc-com/ssc/psana.cfg' # hardwired for now :(
psana.setConfigFile(cfg_path)



def dirname_from_source_runs(source):
    """
    Returns a directory name based on the source string
    """
    start = source.find('run=') + 4
    stop = source.find(':idx')
    if stop == -1 :        stop = len(source)
    runs = source[start : stop]
    nums = runs.split(',')
    if len(nums) == 0 :
        nums = runs
    dirname  = 'run_' + "_".join(nums)
Ejemplo n.º 15
0
import psana 

from psdata import ImageData
from psdata import IqPlotData

from utilities import *

# =============================================
s_runs = [182, 183]
p_runs = [193, 194]
# ==============================================

# PSANA CONFIG FILE - this must be set before a datasource is created
basedir = os.path.split(os.path.abspath( __file__ ))[0]
config_fn = os.path.join(basedir, "minitti.cfg")
psana.setConfigFile(config_fn)
psana.setOption('psana.l3t-accept-only',0)
print "Loading psana config file:    %s" % config_fn

# Aquire the geometry and mask
geometry_filename = '/reg/neh/home2/tjlane/analysis/xppb0114/geometries/v2/q_geom.npy'
print "Loading geometry from:        %s" % geometry_filename
geometry = np.load(geometry_filename).reshape(32,185,388)

mask_filename = '/reg/neh/home2/tjlane/analysis/xppb0114/geometries/v2/mask_v2.npy'
print "Loading pixel mask from:      %s" % mask_filename
mask = np.load(mask_filename).reshape(32,185,388)


cspad_src  = psana.Source('DetInfo(XppGon.0:Cspad.0)')
evr_src    = psana.Source('DetInfo(NoDetector.0:Evr.0)')
Ejemplo n.º 16
0
import time

from matplotlib import pyplot as plt
import psana

from pypad import cspad
from pypad.read import enforce_raw_img_shape as eris

plt.ion()

psana.setConfigFile('/reg/data/ana14/cxi/cxif7214/res/cfg/cxif7214.cfg')

cspad_ds1_src = psana.Source('DetInfo(CxiDs1.0:Cspad.0)')
evr_src = psana.Source('DetInfo(NoDetector.0:Evr.0)')
aqr_src = psana.Source('DetInfo(CxiEndstation.0:Acqiris.0)')

ds = psana.DataSource('exp=cxif7214:run=64')

d = cspad.CSPad.load(
    '/reg/data/ana14/cxi/cxif7214/scratch/averages/approx.cspad')

for evt in ds.events():

    a = evt.get(psana.Acqiris.DataDescV1, aqr_src)
    ch = a.data(5)

    fifos = evt.get(psana.EvrData.DataV3, evr_src).fifoEvents()
    evrs = [e.eventCode() for e in fifos]

    ds1 = evt.get(psana.ndarray_float32_3, cspad_ds1_src, 'calibrated_ndarr')