def _connect_shmem_cli(self, tag): # ShmemClients open a connection in connect() and close it in # the destructor. By creating a new client every time, we ensure # that python call the destructor in the gc routine. self.shmem_cli = PyShmemClient() for retry in range(SHMEM_CONN_MAX_RETRIES): #establish connection to available server - blocking status = int(self.shmem_cli.connect(tag, 0)) if status == 0: break time.sleep(0.01) assert not status, 'shmem connect failure %d' % status #wait for first configure datagram - blocking view = self.shmem_cli.get(self.shmem_kwargs) assert view # Release shmem buffer after copying Transition data # cpo: copy L1Accepts too because some shmem # applications like AMI's pickN can hold references # to dgrams for a long time, consuming the shmem buffers # and creating a deadlock situation. could revisit this # later and only deep-copy arrays inside pickN, for example # but would be more fragile. barray = bytes(view[:_dgSize(view)]) self.shmem_cli.freeByIndex(self.shmem_kwargs['index'], self.shmem_kwargs['size']) view = memoryview(barray) return view
def __init__(self, xtc_files, configs=[], fds=[], tag=None, run=None): """ Opens xtc_files and stores configs. If file descriptors (fds) is given, reuse the given file descriptors. """ self.xtc_files = [] self.shmem_cli = None self.shmem_kwargs = {'index': -1, 'size': 0, 'cli_cptr': None} self.configs = [] self._timestamps = [] # built when iterating self._run = run if isinstance(xtc_files, (str)): self.xtc_files = np.array([xtc_files], dtype='U%s' % FN_L) elif isinstance(xtc_files, (list, np.ndarray)): if len(xtc_files) > 0: # handles smalldata-only case if xtc_files[0] == 'shmem': self.shmem_cli = PyShmemClient() #establish connection to available server - blocking status = int(self.shmem_cli.connect(tag, 0)) assert not status, 'shmem connect failure %d' % status #wait for first configure datagram - blocking view = self.shmem_cli.get(self.shmem_kwargs) assert view # Release shmem buffer after copying Transition data if _service(view) != TransitionId.L1Accept: barray = bytes(view[:_dgSize(view)]) self.shmem_cli.freeByIndex(self.shmem_kwargs['index'], self.shmem_kwargs['size']) view = memoryview(barray) d = dgram.Dgram(view=view, \ shmem_index=self.shmem_kwargs['index'], \ shmem_size=self.shmem_kwargs['size'], \ shmem_cli_cptr=self.shmem_kwargs['cli_cptr'], \ shmem_cli_pyobj=self.shmem_cli) self.configs += [d] else: self.xtc_files = np.asarray(xtc_files, dtype='U%s' % FN_L) self.given_fds = True if len(fds) > 0 else False if self.given_fds: self.fds = np.asarray(fds, dtype=np.int32) else: self.fds = np.array([ os.open(xtc_file, os.O_RDONLY) for xtc_file in self.xtc_files ], dtype=np.int32) given_configs = True if len(configs) > 0 else False if given_configs: self.configs = configs elif xtc_files[0] != 'shmem': self.configs = [dgram.Dgram(file_descriptor=fd) for fd in self.fds] self.det_classes, self.xtc_info, self.det_info_table = self.get_det_class_table( ) self.calibconst = { } # initialize to empty dict - will be populated by run class
def __init__(self, xtc_files, configs=[], fds=[], tag=None, run=None, max_retries=0): """ Opens xtc_files and stores configs. If file descriptors (fds) is given, reuse the given file descriptors. """ self.xtc_files = [] self.shmem_cli = None self.shmem_kwargs = {'index':-1,'size':0,'cli_cptr':None} self.configs = [] self._timestamps = [] # built when iterating self._run = run self.found_endrun = True self.buffered_beginruns = [] self.max_retries = max_retries if isinstance(xtc_files, (str)): self.xtc_files = np.array([xtc_files], dtype='U%s'%FN_L) elif isinstance(xtc_files, (list, np.ndarray)): if len(xtc_files) > 0: # handles smalldata-only case if xtc_files[0] == 'shmem': self.shmem_cli = PyShmemClient() #establish connection to available server - blocking status = int(self.shmem_cli.connect(tag,0)) assert not status,'shmem connect failure %d' % status #wait for first configure datagram - blocking view = self.shmem_cli.get(self.shmem_kwargs) assert view # Release shmem buffer after copying Transition data # cpo: copy L1Accepts too because some shmem # applications like AMI's pickN can hold references # to dgrams for a long time, consuming the shmem buffers # and creating a deadlock situation. could revisit this # later and only deep-copy arrays inside pickN, for example # but would be more fragile. barray = bytes(view[:_dgSize(view)]) self.shmem_cli.freeByIndex(self.shmem_kwargs['index'], self.shmem_kwargs['size']) view = memoryview(barray) d = dgram.Dgram(view=view) self.configs += [d] else: self.xtc_files = np.asarray(xtc_files, dtype='U%s'%FN_L) self.given_fds = True if len(fds) > 0 else False if self.given_fds: self.fds = np.asarray(fds, dtype=np.int32) else: self.fds = np.array([os.open(xtc_file, os.O_RDONLY) for xtc_file in self.xtc_files], dtype=np.int32) given_configs = True if len(configs) > 0 else False if given_configs: self.configs = configs elif xtc_files[0] != 'shmem': self.configs = [dgram.Dgram(file_descriptor=fd, max_retries=self.max_retries) for fd in self.fds] self.calibconst = {} # initialize to empty dict - will be populated by run class
def __init__(self, xtc_files, configs=[], tag=None, run=None): """ Opens xtc_files and stores configs.""" self.xtc_files = [] self.shmem_cli = None self.shmem_kwargs = {'index':-1,'size':0,'cli_cptr':None} self.configs = [] self.fds = [] self._timestamps = [] # built when iterating self._run = run if isinstance(xtc_files, (str)): self.xtc_files = np.array([xtc_files], dtype='U%s'%FN_L) assert len(self.xtc_files) > 0 elif isinstance(xtc_files, (list, np.ndarray)): if len(xtc_files) > 0: # handles smalldata-only case if xtc_files[0] == 'shmem': self.shmem_cli = PyShmemClient() #establish connection to available server - blocking status = int(self.shmem_cli.connect(tag,0)) assert not status,'shmem connect failure %d' % status #wait for first configure datagram - blocking view = self.shmem_cli.get(self.shmem_kwargs) assert view d = dgram.Dgram(view=view, \ shmem_index=self.shmem_kwargs['index'], \ shmem_size=self.shmem_kwargs['size'], \ shmem_cli_cptr=self.shmem_kwargs['cli_cptr'], \ shmem_cli_pyobj=self.shmem_cli) self.configs += [d] else: self.xtc_files = np.asarray(xtc_files, dtype='U%s'%FN_L) assert len(self.xtc_files) > 0 given_configs = True if len(configs) > 0 else False if given_configs: self.configs = configs for i, xtcdata_filename in enumerate(self.xtc_files): self.fds.append(os.open(xtcdata_filename, os.O_RDONLY)) if not given_configs: d = dgram.Dgram(file_descriptor=self.fds[-1]) self.configs += [d] self.det_class_table, self.xtc_info = self.get_det_class_table() self.calibs = {} # initialize to empty dict - will be populated by run class
class DgramManager(object): def __init__(self, xtc_files, configs=[], fds=[], tag=None, run=None, max_retries=0): """ Opens xtc_files and stores configs. If file descriptors (fds) is given, reuse the given file descriptors. """ self.xtc_files = [] self.shmem_cli = None self.shmem_kwargs = {'index':-1,'size':0,'cli_cptr':None} self.configs = [] self._timestamps = [] # built when iterating self._run = run self.found_endrun = True self.buffered_beginruns = [] self.max_retries = max_retries if isinstance(xtc_files, (str)): self.xtc_files = np.array([xtc_files], dtype='U%s'%FN_L) elif isinstance(xtc_files, (list, np.ndarray)): if len(xtc_files) > 0: # handles smalldata-only case if xtc_files[0] == 'shmem': self.shmem_cli = PyShmemClient() #establish connection to available server - blocking status = int(self.shmem_cli.connect(tag,0)) assert not status,'shmem connect failure %d' % status #wait for first configure datagram - blocking view = self.shmem_cli.get(self.shmem_kwargs) assert view # Release shmem buffer after copying Transition data # cpo: copy L1Accepts too because some shmem # applications like AMI's pickN can hold references # to dgrams for a long time, consuming the shmem buffers # and creating a deadlock situation. could revisit this # later and only deep-copy arrays inside pickN, for example # but would be more fragile. barray = bytes(view[:_dgSize(view)]) self.shmem_cli.freeByIndex(self.shmem_kwargs['index'], self.shmem_kwargs['size']) view = memoryview(barray) d = dgram.Dgram(view=view) self.configs += [d] else: self.xtc_files = np.asarray(xtc_files, dtype='U%s'%FN_L) self.given_fds = True if len(fds) > 0 else False if self.given_fds: self.fds = np.asarray(fds, dtype=np.int32) else: self.fds = np.array([os.open(xtc_file, os.O_RDONLY) for xtc_file in self.xtc_files], dtype=np.int32) given_configs = True if len(configs) > 0 else False if given_configs: self.configs = configs elif xtc_files[0] != 'shmem': self.configs = [dgram.Dgram(file_descriptor=fd, max_retries=self.max_retries) for fd in self.fds] self.calibconst = {} # initialize to empty dict - will be populated by run class def close(self): if not self.given_fds: for fd in self.fds: os.close(fd) def __iter__(self): return self def _check_missing_endrun(self, beginruns=None): fake_endruns = None if not self.found_endrun: # there's no previous EndRun sec = (self._timestamps[-1] >> 32) & 0xffffffff usec = int((self._timestamps[-1] & 0xffffffff) * 1e3 + 1) if beginruns: self.buffered_beginruns = [dgram.Dgram(config=config, view=d, offset=0, size=d._size) for d, config in zip(beginruns, self.configs)] fake_endruns = [dgram.Dgram(config=config, fake_endrun=1, \ fake_endrun_sec=sec, fake_endrun_usec=usec) \ for config in self.configs] self.found_endrun = True else: self.found_endrun = False return fake_endruns def __next__(self): """ only support sequential read - no event building""" if self.buffered_beginruns: self.found_endrun = False evt = Event(self.buffered_beginruns, run=self.run()) self._timestamps += [evt.timestamp] self.buffered_beginruns = [] return evt if self.shmem_cli: view = self.shmem_cli.get(self.shmem_kwargs) if view: # Release shmem buffer after copying Transition data # cpo: copy L1Accepts too because some shmem # applications like AMI's pickN can hold references # to dgrams for a long time, consuming the shmem buffers # and creating a deadlock situation. could revisit this # later and only deep-copy arrays inside pickN, for example # but would be more fragile. barray = bytes(view[:_dgSize(view)]) self.shmem_cli.freeByIndex(self.shmem_kwargs['index'], self.shmem_kwargs['size']) view = memoryview(barray) # use the most recent configure datagram config = self.configs[len(self.configs)-1] d = dgram.Dgram(config=config,view=view) dgrams = [d] else: raise StopIteration else: try: dgrams = [dgram.Dgram(config=config, max_retries=self.max_retries) for config in self.configs] except StopIteration: fake_endruns = self._check_missing_endrun() if fake_endruns: dgrams = fake_endruns else: raise StopIteration # Check BeginRun - EndRun pairing service = dgrams[0].service() if service == TransitionId.BeginRun: fake_endruns = self._check_missing_endrun(beginruns=dgrams) if fake_endruns: dgrams = fake_endruns if service == TransitionId.EndRun: self.found_endrun = True evt = Event(dgrams, run=self.get_run()) self._timestamps += [evt.timestamp] return evt def jumps(self, dgram_i, offset, size): if offset == 0 and size == 0: d = None else: try: d = dgram.Dgram(file_descriptor=self.fds[dgram_i], config=self.configs[dgram_i], offset=offset, size=size, max_retries=self.max_retries) except StopIteration: d = None return d def jump(self, offsets, sizes): """ Jumps to the offset and reads out dgram on each xtc file. This is used in normal mode (multiple detectors with MPI). """ assert len(offsets) > 0 and len(sizes) > 0 dgrams = [self.jumps(dgram_i, offset, size) for dgram_i, (offset, size) in enumerate(zip(offsets, sizes))] evt = Event(dgrams, run=self._run) return evt def get_timestamps(self): return np.asarray(self._timestamps, dtype=np.uint64) # return numpy array for easy search later def set_run(self, run): self._run = run def get_run(self): return self._run
class DgramManager(): def __init__(self, xtc_files, configs=[], fds=[], tag=None, run=None): """ Opens xtc_files and stores configs. If file descriptors (fds) is given, reuse the given file descriptors. """ self.xtc_files = [] self.shmem_cli = None self.shmem_kwargs = {'index': -1, 'size': 0, 'cli_cptr': None} self.configs = [] self._timestamps = [] # built when iterating self._run = run if isinstance(xtc_files, (str)): self.xtc_files = np.array([xtc_files], dtype='U%s' % FN_L) elif isinstance(xtc_files, (list, np.ndarray)): if len(xtc_files) > 0: # handles smalldata-only case if xtc_files[0] == 'shmem': self.shmem_cli = PyShmemClient() #establish connection to available server - blocking status = int(self.shmem_cli.connect(tag, 0)) assert not status, 'shmem connect failure %d' % status #wait for first configure datagram - blocking view = self.shmem_cli.get(self.shmem_kwargs) assert view # Release shmem buffer after copying Transition data if _service(view) != TransitionId.L1Accept: barray = bytes(view[:_dgSize(view)]) self.shmem_cli.freeByIndex(self.shmem_kwargs['index'], self.shmem_kwargs['size']) view = memoryview(barray) d = dgram.Dgram(view=view, \ shmem_index=self.shmem_kwargs['index'], \ shmem_size=self.shmem_kwargs['size'], \ shmem_cli_cptr=self.shmem_kwargs['cli_cptr'], \ shmem_cli_pyobj=self.shmem_cli) self.configs += [d] else: self.xtc_files = np.asarray(xtc_files, dtype='U%s' % FN_L) self.given_fds = True if len(fds) > 0 else False if self.given_fds: self.fds = np.asarray(fds, dtype=np.int32) else: self.fds = np.array([ os.open(xtc_file, os.O_RDONLY) for xtc_file in self.xtc_files ], dtype=np.int32) given_configs = True if len(configs) > 0 else False if given_configs: self.configs = configs elif xtc_files[0] != 'shmem': self.configs = [dgram.Dgram(file_descriptor=fd) for fd in self.fds] self.det_classes, self.xtc_info, self.det_info_table = self.get_det_class_table( ) self.calibconst = { } # initialize to empty dict - will be populated by run class def close(self): if not self.given_fds: for fd in self.fds: os.close(fd) def __iter__(self): return self def __next__(self): """ only support sequential read - no event building""" if self.shmem_cli: view = self.shmem_cli.get(self.shmem_kwargs) if view: # Release shmem buffer after copying Transition data if _service(view) != TransitionId.L1Accept: barray = bytes(view[:_dgSize(view)]) self.shmem_cli.freeByIndex(self.shmem_kwargs['index'], self.shmem_kwargs['size']) view = memoryview(barray) # use the most recent configure datagram config = self.configs[len(self.configs) - 1] d = dgram.Dgram(config=config,view=view, \ shmem_index=self.shmem_kwargs['index'], \ shmem_size=self.shmem_kwargs['size'], \ shmem_cli_cptr=self.shmem_kwargs['cli_cptr'], \ shmem_cli_pyobj=self.shmem_cli) dgrams = [d] else: raise StopIteration else: dgrams = [dgram.Dgram(config=config) for config in self.configs] evt = Event(dgrams, run=self.run()) self._timestamps += [evt.timestamp] return evt def jump(self, offsets, sizes): """ Jumps to the offset and reads out dgram on each xtc file. This is used in normal mode (multiple detectors with MPI). """ assert len(offsets) > 0 and len(sizes) > 0 dgrams = [] for fd, config, offset, size in zip(self.fds, self.configs, offsets, sizes): if offset == 0 and size == 0: d = None else: d = dgram.Dgram(file_descriptor=fd, config=config, offset=offset, size=size) dgrams += [d] evt = Event(dgrams, run=self.run()) return evt def get_det_class_table(self): """ this function gets the version number for a (det, drp_class) combo maps (dettype,software,version) to associated python class and detector info for a det_name maps to dettype, detid tuple. """ det_classes = {'epics': {}, 'scan': {}, 'normal': {}} xtc_info = [] det_info_table = {} # loop over the dgrams in the configuration # if a detector/drp_class combo exists in two cfg dgrams # it will be OK... they should give the same final Detector class for cfg_dgram in self.configs: for det_name, det_dict in cfg_dgram.software.__dict__.items(): # go find the class of the first segment in the dict # they should all be identical first_key = next(iter(det_dict.keys())) det = det_dict[first_key] if det_name not in det_classes: det_class_table = det_classes['normal'] else: det_class_table = det_classes[det_name] dettype, detid = (None, None) for drp_class_name, drp_class in det.__dict__.items(): # collect detname maps to dettype and detid if drp_class_name == 'dettype': dettype = drp_class continue if drp_class_name == 'detid': detid = drp_class continue # FIXME: we want to skip '_'-prefixed drp_classes # but this needs to be fixed upstream if drp_class_name.startswith('_'): continue # use this info to look up the desired Detector class versionstring = [str(v) for v in drp_class.version] class_name = '_'.join([det.dettype, drp_class.software] + versionstring) xtc_entry = (det_name, det.dettype, drp_class_name, '_'.join(versionstring)) if xtc_entry not in xtc_info: xtc_info.append(xtc_entry) if hasattr(detectors, class_name): DetectorClass = getattr( detectors, class_name) # return the class object det_class_table[(det_name, drp_class_name)] = DetectorClass else: pass det_info_table[det_name] = (dettype, detid) return det_classes, xtc_info, det_info_table def get_timestamps(self): return np.asarray( self._timestamps, dtype=np.uint64) # return numpy array for easy search later def run(self): return self._run
class DgramManager(): def __init__(self, xtc_files, configs=[], tag=None): """ Opens xtc_files and stores configs.""" self.xtc_files = [] self.shmem = None self.shmem_kwargs = {'index': -1, 'size': 0, 'cli': None} self.configs = [] self.fds = [] self._timestamps = [] # built when iterating if isinstance(xtc_files, (str)): self.xtc_files = np.array([xtc_files], dtype='U%s' % FN_L) assert len(self.xtc_files) > 0 elif isinstance(xtc_files, (list, np.ndarray)): if len(xtc_files) > 0: # handles smalldata-only case if xtc_files[0] == 'shmem': self.shmem = PyShmemClient() #establish connection to available server - blocking status = int(self.shmem.connect(tag, 0)) assert not status, 'shmem connect failure %d' % status #wait for first configure datagram - blocking view = self.shmem.get(self.shmem_kwargs) assert view d = dgram.Dgram(view=view, \ shmem_index=self.shmem_kwargs['index'], \ shmem_size=self.shmem_kwargs['size'], \ shmem_cli=self.shmem_kwargs['cli']) self.configs += [d] else: self.xtc_files = np.asarray(xtc_files, dtype='U%s' % FN_L) assert len(self.xtc_files) > 0 given_configs = True if len(configs) > 0 else False if given_configs: self.configs = configs for i, xtcdata_filename in enumerate(self.xtc_files): self.fds.append(os.open(xtcdata_filename, os.O_RDONLY)) if not given_configs: d = dgram.Dgram(file_descriptor=self.fds[-1]) self.configs += [d] self.det_class_table, self.xtc_info = self.get_det_class_table() self.calibs = { } # initialize to empty dict - will be populated by run class def __del__(self): if self.fds: for fd in self.fds: os.close(fd) def __iter__(self): return self def __next__(self): return self.next() def next(self): """ only support sequential read - no event building""" if self.shmem: view = self.shmem.get(self.shmem_kwargs) if view: # use the most recent configure datagram config = self.configs[len(self.configs) - 1] d = dgram.Dgram(config=config,view=view, \ shmem_index=self.shmem_kwargs['index'], \ shmem_size=self.shmem_kwargs['size'], \ shmem_cli=self.shmem_kwargs['cli']) dgrams = [d] else: raise StopIteration else: dgrams = [dgram.Dgram(config=config) for config in self.configs] evt = Event(dgrams) self._timestamps += [evt.timestamp] return evt def jump(self, offsets, sizes): """ Jumps to the offset and reads out dgram on each xtc file. This is used in normal mode (multiple detectors with MPI). """ assert len(offsets) > 0 and len(sizes) > 0 dgrams = [] for fd, config, offset, size in zip(self.fds, self.configs, offsets, sizes): d = dgram.Dgram(file_descriptor=fd, config=config, offset=offset, size=size) dgrams += [d] evt = Event(dgrams) return evt def get_det_class_table(self): """ this function gets the version number for a (det, drp_class) combo maps (dettype,software,version) to associated python class """ det_class_table = {} xtc_info = [] # loop over the dgrams in the configuration # if a detector/drp_class combo exists in two cfg dgrams # it will be OK... they should give the same final Detector class for cfg_dgram in self.configs: for det_name, det_dict in cfg_dgram.software.__dict__.items(): # go find the class of the first segment in the dict # they should all be identical first_key = next(iter(det_dict.keys())) det = det_dict[first_key] for drp_class_name, drp_class in det.__dict__.items(): # FIXME: we want to skip '_'-prefixed drp_classes # but this needs to be fixed upstream if drp_class_name in ['dettype', 'detid']: continue if drp_class_name.startswith('_'): continue # use this info to look up the desired Detector class versionstring = [str(v) for v in drp_class.version] class_name = '_'.join([det.dettype, drp_class.software] + versionstring) xtc_entry = (det_name, det.dettype, drp_class_name, '_'.join(versionstring)) if xtc_entry not in xtc_info: xtc_info.append(xtc_entry) if hasattr(detectors, class_name): DetectorClass = getattr( detectors, class_name) # return the class object det_class_table[(det_name, drp_class_name)] = DetectorClass else: pass return det_class_table, xtc_info def get_timestamps(self): return np.asarray( self._timestamps, dtype=np.uint64) # return numpy array for easy search later
class DgramManager(object): def __init__(self, xtc_files, configs=[], fds=[], tag=None, run=None, max_retries=0, config_consumers=[]): """ Opens xtc_files and stores configs. If file descriptors (fds) is given, reuse the given file descriptors. """ self.xtc_files = [] self.shmem_cli = None self.shmem_kwargs = {'index': -1, 'size': 0, 'cli_cptr': None} self.configs = [] self._timestamps = [] # built when iterating self._run = run self.found_endrun = True self.buffered_beginruns = [] self.max_retries = max_retries self.chunk_ids = [] self.config_consumers = config_consumers self.tag = tag if isinstance(xtc_files, (str)): self.xtc_files = np.array([xtc_files], dtype='U%s' % FN_L) elif isinstance(xtc_files, (list, np.ndarray)): if len(xtc_files) > 0: # handles smalldata-only case if xtc_files[0] == 'shmem': view = self._connect_shmem_cli(self.tag) d = dgram.Dgram(view=view) #self.configs += [d] # The above line is kept to note that prior to the change below, # the configs are saved as a list. Note that only the most recent # one is used. Mona changed this to "replace" so at a time, there's # only one config. self._set_configs([d]) else: self.xtc_files = np.asarray(xtc_files, dtype='U%s' % FN_L) self.given_fds = True if len(fds) > 0 else False if self.given_fds: self.fds = np.asarray(fds, dtype=np.int32) else: self.fds = np.array([ os.open(xtc_file, os.O_RDONLY) for xtc_file in self.xtc_files ], dtype=np.int32) self.fds_map = {} for fd, xtc_file in zip(self.fds, self.xtc_files): self.fds_map[fd] = xtc_file given_configs = True if len(configs) > 0 else False if given_configs: self._set_configs(configs) elif xtc_files[0] != 'shmem': self._set_configs([ dgram.Dgram(file_descriptor=fd, max_retries=self.max_retries) for fd in self.fds ]) self.calibconst = { } # initialize to empty dict - will be populated by run class self.n_files = len(self.xtc_files) self.set_chunk_ids() def _connect_shmem_cli(self, tag): # ShmemClients open a connection in connect() and close it in # the destructor. By creating a new client every time, we ensure # that python call the destructor in the gc routine. self.shmem_cli = PyShmemClient() for retry in range(SHMEM_CONN_MAX_RETRIES): #establish connection to available server - blocking status = int(self.shmem_cli.connect(tag, 0)) if status == 0: break time.sleep(0.01) assert not status, 'shmem connect failure %d' % status #wait for first configure datagram - blocking view = self.shmem_cli.get(self.shmem_kwargs) assert view # Release shmem buffer after copying Transition data # cpo: copy L1Accepts too because some shmem # applications like AMI's pickN can hold references # to dgrams for a long time, consuming the shmem buffers # and creating a deadlock situation. could revisit this # later and only deep-copy arrays inside pickN, for example # but would be more fragile. barray = bytes(view[:_dgSize(view)]) self.shmem_cli.freeByIndex(self.shmem_kwargs['index'], self.shmem_kwargs['size']) view = memoryview(barray) return view def _set_configs(self, dgrams): """Save and setup given dgrams class configs.""" self.configs = dgrams self._setup_det_class_table() self._set_configinfo() def _setup_det_class_table(self): """ this function gets the version number for a (det, drp_class) combo maps (dettype,software,version) to associated python class and detector info for a det_name maps to dettype, detid tuple. """ det_classes = {'epics': {}, 'scan': {}, 'step': {}, 'normal': {}} xtc_info = [] det_info_table = {} # collect corresponding stream id for a detector (first found) det_stream_id_table = {} # loop over the dgrams in the configuration # if a detector/drp_class combo exists in two cfg dgrams # it will be OK... they should give the same final Detector class for i, cfg_dgram in enumerate(self.configs): for det_name, det_dict in cfg_dgram.software.__dict__.items(): # go find the class of the first segment in the dict # they should all be identical first_key = next(iter(det_dict.keys())) det = det_dict[first_key] if det_name not in det_classes: det_class_table = det_classes['normal'] else: det_class_table = det_classes[det_name] dettype, detid = (None, None) for drp_class_name, drp_class in det.__dict__.items(): # collect detname maps to dettype and detid if drp_class_name == 'dettype': dettype = drp_class continue if drp_class_name == 'detid': detid = drp_class continue # FIXME: we want to skip '_'-prefixed drp_classes # but this needs to be fixed upstream if drp_class_name.startswith('_'): continue # use this info to look up the desired Detector class versionstring = [str(v) for v in drp_class.version] class_name = '_'.join([det.dettype, drp_class.software] + versionstring) xtc_entry = (det_name, det.dettype, drp_class_name, '_'.join(versionstring)) if xtc_entry not in xtc_info: xtc_info.append(xtc_entry) if hasattr(detectors, class_name): DetectorClass = getattr( detectors, class_name) # return the class object det_class_table[(det_name, drp_class_name)] = DetectorClass else: pass det_info_table[det_name] = (dettype, detid) if det_name not in det_stream_id_table: det_stream_id_table[det_name] = i # Add products of this function to itself and the consumers for config_consumer in [self] + self.config_consumers: setattr(config_consumer, 'det_classes', det_classes) setattr(config_consumer, 'xtc_info', xtc_info) setattr(config_consumer, 'det_info_table', det_info_table) setattr(config_consumer, 'det_stream_id_table', det_stream_id_table) def _set_configinfo(self): """ From configs, we generate a dictionary lookup with det_name as a key. The information stored the value field contains: - configs specific to that detector - sorted_segment_ids used by Detector cls for checking if an event has correct no. of segments - detid_dict has segment_id as a key - dettype - uniqueid """ configinfo_dict = {} for detcls_name, det_class in self.det_classes.items( ): # det_class is either normal or envstore ('epics', 'scan', 'step') for (det_name, _), _ in det_class.items(): # we lose a "one-to-one" correspondence with event dgrams. we may have # to put in None placeholders at some point? - mona and cpo det_configs = [ cfg for cfg in self.configs if hasattr(cfg.software, det_name) ] sorted_segment_ids = [] # a dictionary of the ids (a.k.a. serial-number) of each segment detid_dict = {} dettype = "" uniqueid = "" for config in det_configs: seg_dict = getattr(config.software, det_name) sorted_segment_ids += list(seg_dict.keys()) for segment, det in seg_dict.items(): detid_dict[segment] = det.detid dettype = det.dettype sorted_segment_ids.sort() uniqueid = dettype for segid in sorted_segment_ids: uniqueid += '_' + detid_dict[segid] configinfo_dict[det_name] = type("ConfigInfo", (), {\ "configs": det_configs, \ "sorted_segment_ids": sorted_segment_ids, \ "detid_dict": detid_dict, \ "dettype": dettype, \ "uniqueid": uniqueid}) for config_consumer in [self] + self.config_consumers: setattr(config_consumer, 'configinfo_dict', configinfo_dict) def set_chunk_ids(self): """ Generates a list of chunk ids for all stream files Chunk Id is extracted from data file name (e.g. xpptut15-r0001-s000-c000[.smd].xtc2). This is a string of length three after -c converted to int. """ if len(self.xtc_files) == 0: return if self.xtc_files[0] == 'shmem': return for xtc_file in self.xtc_files: filename = os.path.basename(xtc_file) found = filename.find('-c') if found >= 0: self.chunk_ids.append(int(filename[found + 2:found + 5])) def get_chunk_id(self, ind): if not self.chunk_ids: return None return self.chunk_ids[ind] def set_chunk_id(self, ind, new_chunk_id): self.chunk_ids[ind] = new_chunk_id def close(self): if not self.given_fds: for fd in self.fds: os.close(fd) def __iter__(self): return self def _check_missing_endrun(self, beginruns=None): fake_endruns = None if not self.found_endrun: # there's no previous EndRun sec = (self._timestamps[-1] >> 32) & 0xffffffff usec = int((self._timestamps[-1] & 0xffffffff) * 1e3 + 1) if beginruns: self.buffered_beginruns = [ dgram.Dgram(config=config, view=d, offset=0, size=d._size) for d, config in zip(beginruns, self.configs) ] fake_endruns = [dgram.Dgram(config=config, fake_endrun=1, \ fake_endrun_sec=sec, fake_endrun_usec=usec) \ for config in self.configs] self.found_endrun = True else: self.found_endrun = False return fake_endruns def __next__(self): """ only support sequential read - no event building""" if self.buffered_beginruns: self.found_endrun = False evt = Event(self.buffered_beginruns, run=self.run()) self._timestamps += [evt.timestamp] self.buffered_beginruns = [] return evt if self.shmem_cli: view = self.shmem_cli.get(self.shmem_kwargs) if view: # Release shmem buffer after copying Transition data # cpo: copy L1Accepts too because some shmem # applications like AMI's pickN can hold references # to dgrams for a long time, consuming the shmem buffers # and creating a deadlock situation. could revisit this # later and only deep-copy arrays inside pickN, for example # but would be more fragile. barray = bytes(view[:_dgSize(view)]) self.shmem_cli.freeByIndex(self.shmem_kwargs['index'], self.shmem_kwargs['size']) view = memoryview(barray) # use the most recent configure datagram config = self.configs[len(self.configs) - 1] d = dgram.Dgram(config=config, view=view) else: view = self._connect_shmem_cli(self.tag) config = self.configs[len(self.configs) - 1] d = dgram.Dgram(config=config, view=view) if d.service() == TransitionId.Configure: self._set_configs([d]) else: raise RuntimeError( f"Configure expected, got {d.service()}") dgrams = [d] else: try: dgrams = [ dgram.Dgram(config=config, max_retries=self.max_retries) for config in self.configs ] except StopIteration as err: fake_endruns = self._check_missing_endrun() if fake_endruns: dgrams = fake_endruns else: print(err) raise StopIteration # Check BeginRun - EndRun pairing service = dgrams[0].service() if service == TransitionId.BeginRun: fake_endruns = self._check_missing_endrun(beginruns=dgrams) if fake_endruns: dgrams = fake_endruns if service == TransitionId.EndRun: self.found_endrun = True if service == TransitionId.Configure: self._set_configs(dgrams) return self.__next__() evt = Event(dgrams, run=self.get_run()) self._timestamps += [evt.timestamp] return evt def jumps(self, dgram_i, offset, size): if offset == 0 and size == 0: d = None else: try: d = dgram.Dgram(file_descriptor=self.fds[dgram_i], config=self.configs[dgram_i], offset=offset, size=size, max_retries=self.max_retries) except StopIteration: d = None return d def jump(self, offsets, sizes): """ Jumps to the offset and reads out dgram on each xtc file. This is used in normal mode (multiple detectors with MPI). """ assert len(offsets) > 0 and len(sizes) > 0 dgrams = [ self.jumps(dgram_i, offset, size) for dgram_i, (offset, size) in enumerate(zip(offsets, sizes)) ] evt = Event(dgrams, run=self._run) return evt def get_timestamps(self): return np.asarray( self._timestamps, dtype=np.uint64) # return numpy array for easy search later def set_run(self, run): self._run = run def get_run(self): return self._run