Exemplo n.º 1
0
    def init_manager(self, _file):

        # For the larcv manager, using the IOManager to get at the data
        self._driver = larcv.ProcessDriver('ProcessDriver')
        self._driver.configure(self._config)
        self._io_manager = self._driver.io()

        # Meta keeps track of information about number of planes, visible
        # regions, etc.:
        self._meta = event_meta3D()

        # Drawn classes is a list of things getting drawn, as well.
        self._drawnClasses = dict()

        if _file != None:
            flist = larcv.VectorOfString()
            if type(_file) is list:
                for f in _file:
                    flist.push_back(f)
                self._driver.override_input_file(flist)
            else:
                flist.push_back(_file)
                self._driver.override_input_file(flist)

        self._driver.initialize()
        self.go_to_entry(0)

        self.refresh_meta()
Exemplo n.º 2
0
def CosmicSegger(a):
    proc = larcv.ProcessDriver('ProcessDriver')
    #ABSOLUTELY NEED DOUBLE QUOTES??
    if a == 1:
        proc.configure('/user/jhenzerling/work/NEUsoft/Modules/SS/NEWCFG.cfg')
    elif a == 2:
        proc.configure('/user/jhenzerling/work/NEUsoft/Modules/SS/NEWCFG2.cfg')
    else:
        print('why')
    proc.initialize()
    proc.batch_process()
    proc.finalize()
Exemplo n.º 3
0
def deploy_sparselarflow_on_files(larcv_outfile,
                                  larlite_outfile,
                                  filelist,
                                  weightfile,
                                  model_name="dualflow_classvec_v2",
                                  adc_producer="wire",
                                  chstatus_producer='wire',
                                  cropper_cfg="cropflow_processor.cfg",
                                  flow="dual",
                                  devicename="cpu",
                                  run_reco_flowhits=True,
                                  run_truth_flowhits=True,
                                  save_full_adc=False,
                                  save_cropped_adc=False,
                                  save_cropped_trueflow=False,
                                  run_stitcher=False,
                                  has_mc=False,
                                  threshold=10.0,
                                  maxentries=-1):

    import numpy as np
    import torch
    from larlite import larlite
    from larcv import larcv
    from ublarcvapp import ublarcvapp
    from larflow import larflow
    from ROOT import std

    from sparsemodels import load_models
    from load_cropped_sparse_dualflow import load_croppedset_sparse_dualflow_nomc

    device = torch.device(devicename)
    model = load_models(model_name, weight_file=weightfile, device=devicename)
    model.eval()

    out = larcv.IOManager(larcv.IOManager.kWRITE, "stitched")
    out.set_out_file(larcv_outfile)
    out.initialize()

    out_ll = larlite.storage_manager(larlite.storage_manager.kWRITE)
    out_ll.set_out_filename(larlite_outfile)
    out_ll.open()

    dt_tot = 0.0
    dt_net = 0.0  # running the network
    dt_data = 0.0  # preparing data (split/crop)
    dt_aten = 0.0  # turn data into torch tensors
    dt_flow = 0.0  # making flow
    dt_result = 0.0  # preparing output images

    ttot = time.time()

    # first create cfg file if does not exist
    if not os.path.exists(cropper_cfg):
        print("Writing new copper config: ", cropper_cfg)
        from crop_processor_cfg import fullsplit_processor_config
        f = open(cropper_cfg, 'w')
        f.write(fullsplit_processor_config(adc_producer, chstatus_producer))
        f.close()

    splitter = larcv.ProcessDriver("ProcessDriver")
    print("CONFIGURE SPLITTER: ", cropper_cfg)
    splitter.configure(cropper_cfg)

    # add files to iomanager
    io = splitter.io_mutable()

    if type(filelist) is str:
        filelist = [filelist]

    for inputfile in filelist:
        io.add_in_file(inputfile)

    # initialize splitter
    splitter.initialize()
    nentries = io.get_n_entries()
    if maxentries > 0 and maxentries < nentries:
        nentries = maxentries

    nimgs = 0
    nevents = 0
    for ientry in xrange(nentries):

        tdata = time.time()
        io.read_entry(ientry)
        ev_img = io.get_data(larcv.kProductImage2D, adc_producer)

        run = ev_img.run()
        subrun = ev_img.subrun()
        event = ev_img.event()

        print("[Entry {}] {}".format(ientry, (run, subrun, event)))

        adc_v = ev_img.Image2DArray()
        adc_copy_v = std.vector("larcv::Image2D")()
        for i in xrange(adc_v.size()):
            adc_copy_v.push_back(adc_v.at(i))

        splitter.process_entry(ientry, False, False)

        if run_stitcher:
            stitcher = ublarcvapp.UBSparseFlowStitcher(adc_v)

        ev_crops = io.get_data(larcv.kProductImage2D, "croppedadc")
        crop_v = ev_crops.Image2DArray()
        print("  number of crops: {}".format(crop_v.size()))

        # get sparse numpy arrays
        data = load_croppedset_sparse_dualflow_nomc(io)
        dt_data += time.time() - tdata

        # container for network output
        ev_outdualflow_v = out.get_data(larcv.kProductSparseImage,
                                        "cropdualflow")

        # torch tensors
        for iset, sparse_np in enumerate(data["pixadc"]):

            taten = time.time()

            ncoords = sparse_np.shape[0]
            print("deploy net: iset[{}] ncoords={}".format(iset, ncoords))

            # make tensor for coords (row,col,batch)
            coord_t = torch.from_numpy(sparse_np[:, 0:2].astype(
                np.int32)).to(device)

            # tensor for src pixel adcs
            srcpix_t = torch.from_numpy(sparse_np[:, 4].reshape(
                (ncoords, 1))).to(device)
            # tensor for target pixel adcs
            tarpix_flow1_t = torch.from_numpy(sparse_np[:, 2].reshape(
                (ncoords, 1))).to(device)
            if flow == 'dual':
                tarpix_flow2_t = torch.from_numpy(sparse_np[:, 3].reshape(
                    (ncoords, 1))).to(device)
            else:
                tarpix_flow2_t = None

            dt_aten += time.time() - taten

            # Run NETWORK
            tnet = time.time()
            with torch.set_grad_enabled(False):
                predict1_t, predict2_t = model(coord_t, srcpix_t,
                                               tarpix_flow1_t, tarpix_flow2_t,
                                               1)
            dt_net += time.time() - tnet
            #print("predict1_t shape",predict1_t.features.shape)

            # convert class vector output back to flow
            # find max, then subtract off source pix
            if model_name in ['dualflow_classvec_v2']:
                # get arg max
                maxcol1 = torch.argmax(predict1_t.features.detach(), 1)
                maxcol2 = torch.argmax(predict2_t.features.detach(), 1)
                # subtract source column
                flowout1_t = (maxcol1.type(torch.FloatTensor) -
                              coord_t[:, 1].type(torch.FloatTensor)).reshape(
                                  (ncoords, 1))
                flowout2_t = (maxcol2.type(torch.FloatTensor) -
                              coord_t[:, 1].type(torch.FloatTensor)).reshape(
                                  (ncoords, 1))
            else:
                flowout1_t = predict1_t.features
                flowout2_t = predict2_t.features

            # back to numpy array
            tresult = time.time()

            meta_v = std.vector("larcv::ImageMeta")()
            yplane_meta = crop_v.at(iset * 3 + 2).meta()
            meta_v.push_back(yplane_meta)
            meta_v.push_back(yplane_meta)

            result_np = np.zeros((ncoords, 4), dtype=np.float32)
            result_np[:, 0:2] = sparse_np[:, 0:2]
            result_np[:, 2] = flowout1_t.detach().cpu().numpy()[:, 0]
            result_np[:, 3] = flowout2_t.detach().cpu().numpy()[:, 0]

            # store raw result
            sparse_raw = larcv.sparseimg_from_ndarray(result_np, meta_v,
                                                      larcv.msg.kDEBUG)
            ev_outdualflow_v.Append(sparse_raw)

            # prepare for stitcher
            if run_stitcher:
                result_np[:, 2][sparse_np[:, 4] < 10.0] = -1000
                result_np[:, 3][sparse_np[:, 4] < 10.0] = -1000
                sparse_result = larcv.sparseimg_from_ndarray(
                    result_np, meta_v, larcv.msg.kDEBUG)
                stitcher.addSparseData(sparse_result,
                                       crop_v.at(iset * 3 + 0).meta(),
                                       crop_v.at(iset * 3 + 1).meta())

            dt_result += time.time() - tresult
            nimgs += 1

        # make flow hits
        # --------------
        tflow = time.time()
        if run_reco_flowhits:
            print("Make Reco Flow Hits")
            larflowhits_v = larflow.makeFlowHitsFromSparseCrops(
                adc_v, ev_outdualflow_v.SparseImageArray(), threshold,
                "ubcroptrueflow.cfg", larcv.msg.kINFO)

        if has_mc and run_truth_flowhits:
            print("Make Truth Flow Hits")
            ev_chstatus = io.get_data(larcv.kProductChStatus,
                                      chstatus_producer)
            ev_trueflow = io.get_data(larcv.kProductImage2D, "larflow")
            trueflowhits_v = larflow.makeTrueFlowHitsFromWholeImage(
                adc_v, ev_chstatus, ev_trueflow.Image2DArray(), threshold,
                "ubcroptrueflow.cfg", larcv.msg.kINFO)

        dt_flow += time.time() - tflow

        # store
        # --------
        # full image
        tresult = time.time()

        if save_full_adc:
            out_wire = out.get_data(larcv.kProductImage2D, "wire")
            for p in xrange(3):
                out_wire.Append(adc_v.at(p))

        # cropped image
        if save_cropped_adc:
            out_crop = out.get_data(larcv.kProductImage2D, "cropadc")
            for iimg in xrange(crop_v.size()):
                out_crop.Append(crop_v.at(iimg))
            print("saved ", crop_v.size(), " adc crops")

        if save_cropped_trueflow:
            ev_trueflow_crops = io.get_data(larcv.kProductImage2D,
                                            "croppedflow")
            out_trueflow_crops = out.get_data(larcv.kProductImage2D,
                                              "croptrueflow")
            for iimg in xrange(ev_trueflow_crops.Image2DArray().size()):
                out_trueflow_crops.Append(
                    ev_trueflow_crops.Image2DArray().at(iimg))
            print("saved ",
                  out_trueflow_crops.Image2DArray().size(), " true flow crops")

        # save stitched output
        if run_stitcher:
            out_y2u = out.get_data(larcv.kProductImage2D, "larflowy2u")
            out_y2u.Append(stitcher._outimg_v.at(0))
            out_y2v = out.get_data(larcv.kProductImage2D, "larflowy2v")
            out_y2v.Append(stitcher._outimg_v.at(1))

        # save larflow hits
        if run_reco_flowhits:
            ev_larflowhits = out_ll.get_data(larlite.data.kLArFlow3DHit,
                                             "flowhits")
            for ihit in xrange(larflowhits_v.size()):
                ev_larflowhits.push_back(larflowhits_v.at(ihit))
        if has_mc and run_truth_flowhits:
            ev_trueflowhits = out_ll.get_data(larlite.data.kLArFlow3DHit,
                                              "trueflowhits")
            for ihit in xrange(trueflowhits_v.size()):
                ev_trueflowhits.push_back(trueflowhits_v.at(ihit))

        # set id
        out.set_id(run, subrun, event)
        out_ll.set_id(run, subrun, event)

        # save entry
        out.save_entry()
        out_ll.next_event()

        dt_result = time.time() - tresult

        # clear processor iomanager of  the entry
        io.clear_entry()
        nevents += 1

    dt_tot = time.time() - ttot

    print("Total run time: %.3f secs" % (dt_tot))
    print("  Data loading time: %.3f secs (%.3f secs/event)" %
          (dt_data, dt_data / nevents))
    print("  Prepare data for net: %.3f secs (%.3f secs/image)" %
          (dt_aten, dt_aten / nevents))
    print("  Net running time: %.3f secs (%.3f secs/event, %.3f secs/image)" %
          (dt_net, dt_net / nevents, dt_net / nimgs))
    print("  FlowHits running time: %.3f secs (%.3f secs/image)" %
          (dt_flow, dt_flow / nevents))
    print("  Result conversion: %.3f secs (%.3f secs/image)" %
          (dt_result, dt_result / nevents))

    out.finalize()
    out_ll.close()
    splitter.finalize()

    return None
Exemplo n.º 4
0
from larcv import larcv
import ROOT, sys
from ROOT import std

if len(sys.argv) < 2:
    print 'Usage: python', sys.argv[
        0], 'CONFIG_FILE [LARCV_FILE1 LARCV_FILE2 ...]'
    sys.exit(1)

proc = larcv.ProcessDriver('ProcessDriver')
print "ARGS: ", str(sys.argv)
print "Loading config... ", sys.argv[1]
proc.configure(sys.argv[1])
print "Loaded"
print sys.argv
if len(sys.argv) > 1:
    flist = ROOT.std.vector('std::string')()
    for x in xrange(len(sys.argv) - 4):
        print "Pushing back...", sys.argv[x + 4]
        flist.push_back(sys.argv[x + 4])

    proc.override_input_file(flist)

proc.override_ana_file(sys.argv[2] + ".root")
proc.override_output_file(sys.argv[3] + ".root")
proc.initialize()

larbys_cheater_id = proc.process_id("LArbysImageCheater")
larbys_cheater = proc.process_ptr(larbys_cheater_id)
print "GOT: ", larbys_cheater, "@ id=", larbys_cheater_id
Exemplo n.º 5
0
    def __init__(self,
                 input_rootfile,
                 output_rootfile,
                 batch_size,
                 identity,
                 broker_ipaddress,
                 product_dict,
                 random_access=False,
                 copy_input=False,
                 port=5559,
                 timeout_secs=30,
                 max_tries=3,
                 do_compress=True,
                 process_croi=False,
                 print_msg_sizes=False,
                 input_croiprocessor_cfg="input_croiprocessor_default.cfg",
                 input_precropped_cfg="input_precropped_default.cfg",
                 output_croiprocessor_cfg="output_croiprocessor_default.cfg"):

        # Call Mother
        super(CaffeLArCV1Client, self).__init__(identity,
                                                broker_ipaddress,
                                                port=port,
                                                timeout_secs=timeout_secs,
                                                max_tries=max_tries,
                                                do_compress=do_compress)

        # Save paths for ROOT Files
        # ----------------------
        if type(input_rootfile) is str:
            self.input_rootfiles = [input_rootfile]
        elif type(input_rootfile) is list:
            self.input_rootfiles = [input_rootfile]
        else:
            raise ValueError("input_rootfile argument should be str or list")
        self.output_rootfile = output_rootfile
        if self.output_rootfile in self.input_rootfiles:
            raise ValueError("input and output rootfiles are the same!")

        # number of planes
        # ----------------
        self.NPLANES = 3

        # setup input
        # -----------
        # we handle input via a larcv::ProcessDriver. how we process input depends on if process_croi is True.
        # [to do] we want to be able to override the producer names in the file
        #   so we use a template. but if user supplies config, we just trust they set things up correctly.
        if process_croi:
            # we assume we have to crop out of the CROI regions
            self.input_processor_cfg = input_croiprocessor_cfg
        else:
            self.input_processor_cfg = input_precropped_cfg
        self.in_proc = larcv.ProcessDriver('InputProcessDriver')
        self.in_proc.configure(self.input_processor_cfg)
        infiles_v = std.vector("string")()
        for infile in self.input_rootfiles:
            infiles_v.push_back(infile)
        self.in_proc.override_input_file(infiles_v)
        self.in_proc.initialize()
        self.io = self.in_proc.io()
        if process_croi:
            # get access to processors. we will get our cropped images from them
            self.croicroppers = []
            for p in range(self.NPLANES):
                self.croicroppers.append(
                    self.in_proc.process_ptr(
                        self.in_proc.process_id("MultiROICropperP%d" % (p))))

        # setup output
        # ------------
        if process_croi:
            # if we are processing CROIs, we need to use the PyStitcher Processor
            self.output_processor_cfg = output_croiprocessor_cfg
            self.out_proc = larcv.ProcessDriver('OutputProcessDriver')
            self.out_proc.configure(self.output_processor_cfg)
            self.py_image_makers = []
            for p in range(self.NPLANES):
                self.py_image_makers.append(
                    self.out_proc.process_ptr(
                        self.out_proc.process_id("PyImageStitcherP%d" % (p))))
                self.py_image_makers[p].set_producer_name('uburn_plane%d' %
                                                          (p))
            self.out_proc.override_output_file(self.output_rootfile)
            self.out_proc.initialize()
            self.io_out = self.out_proc.io()
        else:
            # if we are just processing precropped images
            if not copy_input:
                self.io_out = larcv.IOManager(larcv.IOManager.kWRITE)
            else:
                self.io_out = larcv.IOManager(larcv.IOManager.kBOTH)
                self.io_out.add_in_file(self.input_rootfile)
            # set output file
            self.io_out.set_out_file(self.output_rootfile)
            self.io_out.initialize()

        # other parameters
        # ----------------
        self.batch_size = batch_size
        self.nentries = self.io.get_n_entries()
        self.randomize = random_access
        self.last_entry = 0
        self.delivered = 0
        self.permuted = None
        self.totserved = 0
        self.compression_level = 6
        self.print_msg_sizes = print_msg_sizes
        self.process_croi = process_croi
        if process_croi:
            # if processing croi, we override the 'batchsize' to be 1,
            # i.e. we process one event at a time
            self.batch_size = 1

        if type(product_dict) is dict:
            self.product_dict = product_dict
        elif product_dict is None:
            pass
        else:
            raise ValueError(
                "product_dict should be a dictionary of {larcv product enum:producer name }"
            )

        # dictonary used to store event/image data
        # -----------------------------------------
        self.imgdata_shape = {}
        self.imgdata_dict = {}
        self.imgmeta_dict = {}
        self.batch2rse = None
        self.current_rse = None
        for ktype, producer_name in self.product_dict.items():
            if ktype != larcv.kProductImage2D:
                raise RuntimeError(
                    "Does not support loading product id=%d, currently" %
                    (ktype))
            self.imgdata_shape[(ktype, producer_name)] = None
            self.imgdata_dict[(ktype, producer_name)] = None
            self.imgmeta_dict[producer_name] = None

        # timing trackers to monitor performance
        # -----------------------------------------
        self._ttracker["getbatch::indexing"] = 0.0
        self._ttracker["getbatch::total"] = 0.0
        self._ttracker["getbatch::fileio"] = 0.0
        self._ttracker["getbatch::fill"] = 0.0
        self._ttracker["makemessage::total"] = 0.0
        self._ttracker["savereply::total"] = 0.0
Exemplo n.º 6
0
    if argv == 'plane1':
        PLANEID = argv
        MODEL = MODELMAP['plane1']
    if argv == 'plane2':
        PLANEID = argv
        MODEL = MODELMAP['plane2']

if not PLANEID or not MODEL:
    print 'Valid plane id not provided!'
    raise Exception()

INCFG = 'pyana_in_%s.cfg' % PLANEID
print "Using input config:", INCFG
debug = 'debug' in sys.argv

out_proc = larcv.ProcessDriver('OutputProcessDriver')
out_proc.configure(OUTCFG)
py_image_maker = out_proc.process_ptr(out_proc.process_id("PyImageStitcher"))
py_image_maker.set_producer_name('uburn_%s' % PLANEID)
out_proc.override_output_file('larcv_fcn_plane%s.root' % PLANEID)
out_proc.initialize()

in_proc = larcv.ProcessDriver('InputProcessDriver')
in_proc.configure(INCFG)
in_proc.override_input_file(flist)
in_proc.initialize()
cropper = in_proc.process_ptr(in_proc.process_id("WholeImageCropper"))
il._rows = cropper.target_rows()
il._cols = cropper.target_cols()

net = caffe.Net(PROTO, MODEL, caffe.TEST)
Exemplo n.º 7
0
args = parser.parse_args(sys.argv[1:])

import ROOT as rt
from ROOT import std
from larcv import larcv
larcv.load_pyutil()
from ublarcvapp import ublarcvapp
from larflow import larflow

rt.gStyle.SetOptStat(0)

inputfiles = std.vector("string")()
for inputfile in args.input_larcv:
    inputfiles.push_back(inputfile)

driver = larcv.ProcessDriver("ProcessDriver")
driver.configure(args.config)
driver.override_input_file(inputfiles)
driver.override_ana_file(args.out)

processors = driver.process_map()
tripletmaker = driver.process_ptr(
    processors.find("MatchTripletProcessor").second)

driver.initialize()

nentries = driver.io().get_n_entries()
if args.num_events is not None and nentries > args.num_events:
    nentries = args.num_events

for ientry in xrange(nentries):
Exemplo n.º 8
0
    def __init__(self,
                 broker_address,
                 larcv_supera_file,
                 output_larcv_filename,
                 adc_producer="wire",
                 input_mode=0,
                 cropper_cfg=None,
                 larlite_opreco_file=None,
                 opflash_producer="simpleFlashBeam",
                 tick_backwards=False,
                 sparseout_tree_name="uresnet",
                 use_compression=False,
                 use_sparseimg=True,
                 planes=None,
                 intimewin_min_tick=190,
                 intimewin_max_tick=320,
                 **kwargs):
        """
        broker_address        [str]   IP address and port of broker. e.g.: tcp://my.server.somwhere:6000
        larcv_supera_file     [str]   path to LArCV root file with whole images
        output_larcv_filename [str]   path to LArCV root where we will write output
        adc_producer          [str]   name of Tree containing input images. e.g. 'wire'
        skip_detsplit         [bool]  if true, process whole image at once. if false, process crops.
        opflash_producer      [str]   name of tree carrying opflash information (used to make CROI from flash) (deprecated)
        tick_backwards        [bool]  if true, expect input LArCV images to be stored in tick-backward format
        mrcnn_tree_name       [str]   name of output tree contaning MRCN
        use_compression       [bool]  if false (default), do not compress byte string sent and received
        use_sparseimg         [bool]  if false (default), do not convert whole image into sparse. otherwisek, do. 
                                      To save bytes transferred.
        planes                [list of int] if not None, only run in the specified planes
        intime_min_tick       [int]   Start of Time window  for trigger
        intime_max_tick       [int]   End of Time window  for trigger
        """
        super(SparseSSNetClient, self).__init__(broker_address, **kwargs)

        # setup the input and output larcv iomanager, input larlite manager
        tick_direction = larcv.IOManager.kTickForward
        if tick_backwards:
            tick_direction = larcv.IOManager.kTickBackward

        self._input_mode = input_mode
        self._use_compression = use_compression
        self._cropper_cfg = cropper_cfg
        self._planes = planes

        if self._input_mode == SparseSSNetClient.SPLIT:
            # SPLIT WHOLEVIEW IMAGE

            if cropper_cfg == None:
                # create config
                default_cfg = """ProcessDriver: {
  Verbosity: 0
  RandomAccess: false
  EnableFilter: false
  InputFiles: [""]
  IOManager: {
    IOMode: 2
    Name: "larflowinput"
    OutFileName: "tmp_out.root"
  }
  ProcessName: ["ubsplit"]
  ProcessType: ["UBSplitDetector"]

  ProcessList: {
    ubsplit: {
      Verbosity: 0
      InputProducer:\"%s\"
      OutputBBox2DProducer: \"detsplit\"
      CropInModule: true
      OutputCroppedProducer: \"detsplit\"
      BBoxPixelHeight: 512
      BBoxPixelWidth:  832
      CoveredZWidth: 310
      FillCroppedYImageCompletely: true
      DebugImage: false
      MaxImageS: -1
      RandomizeCrops: false
      MaxRandomAttempts: 1
      MinFracPixelsInCrop: 0.0
   }
 }
}
""" % (adc_producer)
                print(default_cfg, file=open("default_ubsplit.cfg", 'w'))
                cropper_cfg = "default_ubsplit.cfg"

            self._splitter = larcv.ProcessDriver("ProcessDriver")
            self._splitter.configure(cropper_cfg)
            infiles = std.vector("std::string")()
            infiles.push_back(larcv_supera_file)
            self._splitter.override_input_file(infiles)
            self._splitter.initialize()
            self._inlarcv = self._splitter.io_mutable()
        else:
            self._inlarcv = larcv.IOManager(larcv.IOManager.kREAD, "",
                                            tick_direction)
            self._inlarcv.add_in_file(larcv_supera_file)
            self._inlarcv.initialize()

        # LARLITE INPUT (used when cropping based on OPFLASH)
        self._inlarlite = None
        if self._input_mode == SparseSSNetClient.OPFLASH_ROI:
            if larlite_opreco_file is None or os.path.exists(
                    larlite_opreco_file):
                raise ValueError(
                    "larlite opreco file needed or not found when input mode is OPFLASH_ROI"
                )
            self._inlarlite = LArliteManager(larlite.storage_manager.kREAD)
            self._inlarlite.add_in_filename(larlite_opreco_file)
            self._inlarlite.open()
            #self._inlarlite.set_verbosity(0)

        self._outlarcv = larcv.IOManager(larcv.IOManager.kWRITE)
        self._outlarcv.set_out_file(output_larcv_filename)
        self._outlarcv.set_verbosity(larcv.msg.kDEBUG)
        self._outlarcv.initialize()
        self._log = logging.getLogger(__name__)

        #FixedCROIFromFlash = ublarcvapp.ubdllee.FixedCROIFromFlashAlgo
        self._sparseout_tree_name = sparseout_tree_name
        self._adc_producer = adc_producer
        self._opflash_producer = opflash_producer

        self._ubsplitdet = None

        # MESSAGES TO LOOKFOR
        self._ERROR_NOMSGS = "ERROR:nomessages".encode('utf-8')
Exemplo n.º 9
0
    def __init__(self,
                 broker_address,
                 larcv_supera_file,
                 output_larcv_filename,
                 adc_producer="wire",
                 sparseimage_input_producer="larflow",
                 sparseimage_output_producer="dualflow",
                 has_sparseimage_data=False,
                 save_as_sparseimg=False,
                 tick_backwards=False,
                 use_compression=False,
                 cropper_cfg="ubcrop.cfg",
                 **kwargs):
        """
        this class loads either larcv::sparseimage or larcv::image2d data from
        the input file, prepares the data into a binary json (bson) message to
        be sent to the broker. when the broker replies with worker output,
        save it to larcv root file.
        """
        super(UBSparseLArFlowClient, self).__init__(broker_address, **kwargs)

        # setup the input iomanager
        tick_direction = larcv.IOManager.kTickForward
        if tick_backwards:
            tick_direction = larcv.IOManager.kTickBackward

        # setup splitter: for processing wholeview images
        if not has_sparseimage_data:
            self.splitter = larcv.ProcessDriver("ProcessDriver")
            self.splitter.configure(cropper_cfg)
            infiles = std.vector("std::string")()
            infiles.push_back(larcv_supera_file)
            self.splitter.override_input_file(infiles)
            self.splitter.initialize()
            self._inlarcv = self.splitter.io_mutable()
        else:
            self._inlarcv = larcv.IOManager(larcv.IOManager.kREAD, "",
                                            tick_direction)
            self._inlarcv.add_in_file(larcv_supera_file)
            self._inlarcv.initialize()

        # setup output iomanager
        self._outlarcv = larcv.IOManager(larcv.IOManager.kWRITE)
        self._outlarcv.set_out_file(output_larcv_filename)
        self._outlarcv.initialize()

        # setup config
        self._adc_producer = adc_producer
        self._sparseimage_input_producer = sparseimage_input_producer
        self._sparseimage_output_producer = sparseimage_output_producer
        self._use_sparseimage_data = has_sparseimage_data
        self._save_as_sparseimg = save_as_sparseimg
        self._use_compression = use_compression

        # thresholds: adc values must be above this value to be included
        self._threshold_v = std.vector("float")(3, 10.0)
        # cuton flag: if pixel passes in the given plane, we save values in all three
        #             this is required because of the submanifold structure
        self._cuton_pixel_v = std.vector("int")(3, 1)

        # setup logger
        self._log = logging.getLogger(__name__)
Exemplo n.º 10
0
from ROOT import std
from larcv import larcv
from ublarcvapp import ublarcvapp
factory = ublarcvapp.dltagger.DLTaggerProcessFactory(
)  # load the library, which registers the factory
print factory

inputfiles = std.vector("std::string")()
if type(args.input_larcv) is str:
    #inputfiles.push_back( "testset1/out_larcv_test.root" )
    inputfiles.push_back(args.input_larcv)
elif type(args.input_larcv) is list:
    for f in args.input_larcv:
        inputfiles.push_back(f)

driver = larcv.ProcessDriver("DLTagger")
driver.configure(args.config)
driver.override_input_file(inputfiles)
driver.override_output_file(args.out_larcv)

# get processor, add larlite files
processors = driver.process_map()
it_process = processors.find("DLTaggerProcess")
dltagger = driver.process_ptr(it_process.second)
dltagger.add_larlite_infile(args.opreco)
dltagger.set_larlite_outfile(args.out_larlite)

driver.initialize()

nentries = driver.io().get_n_entries()