Exemplo n.º 1
0
    def __init__(self, args):
        self.args = args
        if not self.args.training:
            self._larcv_interface = queueloader.queue_interface(
                random_access_mode="serial_access")
        else:
            self._larcv_interface = queueloader.queue_interface(
                random_access_mode="random_blocks")

        self._iteration = 0
        self._global_step = -1

        self._cleanup = []
Exemplo n.º 2
0
    def __init__(self,
                 mode,
                 distributed,
                 image_mode,
                 label_mode,
                 input_dimension,
                 seed=None):

        if mode not in ['train', 'inference', 'iotest']:
            raise Exception("Larcv Fetcher can't handle mode ", mode)

        if mode == "inference":
            random_access_mode = "serial_access"
        else:
            random_access_mode = "random_blocks"

        if distributed:
            from larcv import distributed_queue_interface
            self._larcv_interface = distributed_queue_interface.queue_interface(
                random_access_mode=random_access_mode, seed=seed)
        else:
            from larcv import queueloader
            self._larcv_interface = queueloader.queue_interface(
                random_access_mode=random_access_mode, seed=seed)

        self.mode = mode
        self.image_mode = image_mode
        self.label_mode = label_mode
        self.input_dimension = input_dimension

        self.writer = None
Exemplo n.º 3
0
    def __init__(self, mode, distributed, dataset, data_format, seed=None):

        if mode not in ['train', 'inference', 'iotest']:
            raise Exception("Larcv Fetcher can't handle mode ", mode)

        random_access_mode = dataset.access_mode

        if random_access_mode != "serial_access" and mode == "inference":
            logger.warn("Using random blocks in inference - possible bug!")

        if distributed:
            from larcv import distributed_queue_interface
            self._larcv_interface = distributed_queue_interface.queue_interface(
                random_access_mode=random_access_mode, seed=seed)
        else:
            from larcv import queueloader
            self._larcv_interface = queueloader.queue_interface(
                random_access_mode=random_access_mode, seed=seed)

        self.mode = mode
        self.image_mode = data_format
        self.input_dimension = dataset.dimension
        self.distributed = distributed

        self.writer = None
Exemplo n.º 4
0
    def __init__(self, ):
        ## Replace of larcv_interface.larcv_interface() for queueloader.queue_interface()
        self._larcv_interface = queueloader.queue_interface()
        self._iteration = 0
        self._global_step = -1

        self._cleanup = []
Exemplo n.º 5
0
    def __init__(self,
                 mode,
                 distributed,
                 downsample,
                 dataformat,
                 synthetic,
                 sparse,
                 seed=None):

        if mode not in ['train', 'inference', 'iotest']:
            raise Exception("Larcv Fetcher can't handle mode ", mode)

        if not synthetic:

            if distributed:
                from larcv import distributed_queue_interface
                self._larcv_interface = distributed_queue_interface.queue_interface(
                )
            else:
                from larcv import queueloader
                if mode == "inference":
                    self._larcv_interface = queueloader.queue_interface(
                        random_access_mode="serial_access", seed=seed)
                elif mode == "train" or mode == "iotest":
                    self._larcv_interface = queueloader.queue_interface(
                        random_access_mode="random_blocks", seed=seed)
                else:
                    # Must be synthetic
                    self._larcv_interface = None

        self.mode = mode
        self.downsample = downsample
        self.dataformat = dataformat
        self.synthetic = synthetic
        self.sparse = sparse

        self.writer = None

        # Compute the realized image shape:
        self.full_image_shape = [
            self.FULL_RESOLUTION_H, self.FULL_RESOLUTION_W
        ]
        self.ds = 2**downsample

        self.image_shape = [int(i / self.ds) for i in self.full_image_shape]
Exemplo n.º 6
0
    def __init__(self,):
        if FLAGS.MODE == 'inference':
            mode = 'serial_access'
        else:
            mode = 'random_blocks'
        self._larcv_interface = queueloader.queue_interface(random_access_mode=mode)
        self._iteration       = 0
        self._global_step     = -1

        self._cleanup         = []
def create_interface_object(args):

    config = build_config_file(args)

    if args.distributed:
        if args.io_mode == 'queue':
            larcv_interface = distributed_queue_interface.queue_interface(
                random_access_mode=args.event_order)
        else:
            larcv_interface = distributed_larcv_interface.thread_interface()
    else:
        if args.io_mode == 'queue':
            larcv_interface = queueloader.queue_interface(
                random_access_mode=args.event_order)
        else:
            larcv_interface = threadloader.thread_interface()

    # Generate a named temp file:
    main_file = tempfile.NamedTemporaryFile(mode='w', delete=False)
    main_file.write(config)

    main_file.close()

    # Prepare data managers:
    io_config = {
        'filler_name': 'IOTest',
        'filler_cfg': main_file.name,
        'verbosity': 5,
        'make_copy': True
    }

    # By default, fetching data and label as the keywords from the file:
    data_keys = OrderedDict({'image': 'data', 'label': 'label'})

    if args.distributed:
        if args.io_mode == 'queue':
            larcv_interface.prepare_manager('primary',
                                            io_config,
                                            COMM.Get_size() *
                                            args.local_batch_size,
                                            data_keys,
                                            color=0)
        else:
            larcv_interface.prepare_manager(
                'primary', io_config,
                COMM.Get_size() * args.local_batch_size, data_keys)
    else:
        if args.io_mode == 'queue':
            larcv_interface.prepare_manager('primary', io_config,
                                            args.local_batch_size, data_keys)
        else:
            larcv_interface.prepare_manager('primary', io_config,
                                            args.local_batch_size, data_keys)

    return larcv_interface
    def __init__(self, distributed, dataset, seed=0):

        random_access_mode = dataset.access_mode

        if distributed:
            from larcv import distributed_queue_interface
            self._larcv_interface = distributed_queue_interface.queue_interface(
                random_access_mode=random_access_mode.name, seed=seed)
        else:
            from larcv import queueloader
            self._larcv_interface = queueloader.queue_interface(
                random_access_mode=random_access_mode.name, seed=seed)

        self.distributed = distributed
        self.dataset = dataset

        self.writer = None
Exemplo n.º 9
0
    def __init__(self, distributed, seed=None, inference=False):

        self._cleanup = []
        self._eventID_labels   = {}
        self._eventID_energies = {}

        if inference:
            random_access_mode = "serial_access"
        else:
            random_access_mode = "random_blocks"

        self._color = None
        if distributed:
            from larcv import distributed_queue_interface
            self._larcv_interface = distributed_queue_interface.queue_interface(random_access_mode=random_access_mode)
            self._color = 0
        else:
            from larcv import queueloader
            self._larcv_interface = queueloader.queue_interface(random_access_mode=random_access_mode, seed=None)

        self.inference = inference