def readUSANSNexus(input_file, file_obj=None, metadata_lookup=metadata_lookup, det_deadtime=0.0, trans_deadtime=0.0): """ Load all entries from the NeXus file into sans data sets. """ datasets = [] file = h5_open_zip(input_file, file_obj) for entryname, entry in file.items(): metadata = OrderedDict([ ("run.filename", _s(entry["DAS_logs/trajectoryData/fileName"][0])), ("analysis.intent", _s(entry["DAS_logs/trajectoryData/intent"][0])), ("sample.name", _s(entry["DAS_logs/sample/name"][0])), ("run.filePrefix", _s(entry["DAS_logs/trajectoryData/filePrefix"][0])), ("run.instFileNum", int(entry["DAS_logs/trajectoryData/instFileNum"][0])), ("start_time", _s(entry["start_time"][0])), ("end_time",_s(entry["end_time"][0])), ("entry", _s(entryname)), ("dQv", 0.117), # constant of the instrument. Should it be in the nexus def? ]) counts = entry['DAS_logs/linearDetector/counts'][()] countTime = entry['DAS_logs/counter/liveTime'][()] trans_counts = entry['DAS_logs/transDetector/counts'][()] detCts = (counts / (1.0 - (counts*det_deadtime/countTime[:,None]))).sum(axis=1) transCts = (trans_counts / (1.0 - (trans_counts*trans_deadtime/countTime[:,None]))).sum(axis=1) monCts = entry['DAS_logs/counter/liveMonitor'][()] Q = entry['DAS_logs/analyzerRotation/softPosition'][()] dataset = USansData(metadata=metadata, countTime=countTime, detCts=detCts, transCts=transCts, monCts=monCts, Q=Q) datasets.append(dataset) return datasets
def readSANSNexuz(input_file, file_obj=None, metadata_lookup=metadata_lookup): """ Load all entries from the NeXus file into sans data sets. """ datasets = [] file = h5_open_zip(input_file, file_obj) for entryname, entry in file.items(): multiplicity = 1 for i in range(multiplicity): metadata = load_metadata(entry, multiplicity, i, metadata_lookup=metadata_lookup, unit_specifiers=unit_specifiers) #print(metadata) detector_keys = ['detector'] detectors = dict([(k, load_detector(entry['instrument'][k])) for k in detector_keys]) metadata['entry'] = entryname # hack to remove configuration from sample label (it is still stored in run.configuration) metadata['sample.description'] = _s( metadata["sample.labl"]).replace( _s(metadata["run.configuration"]), "") dataset = RawSANSData(metadata=metadata, detectors=detectors) datasets.append(dataset) return datasets
def readSANSNexuz_old(input_file, file_obj=None): """ Load all entries from the NeXus file into sans data sets. """ datasets = [] file = h5_open_zip(input_file, file_obj) for entryname, entry in file.items(): areaDetector = entry['data/areaDetector'].value shape = areaDetector.shape if len(shape) < 2 or len(shape) > 3: raise ValueError("areaDetector data must have dimension 2 or 3") return if len(shape) == 2: # add another dimension at the front shape = (1, ) + shape areaDetector = areaDetector.reshape(shape) for i in range(shape[0]): metadata = {} for mkey in metadata_lookup: field = entry.get(metadata_lookup[mkey], None) if field is not None: if mkey in unit_specifiers: field = data_as(field, unit_specifiers[mkey]) else: field = field.value if field.dtype.kind == 'f': field = field.astype("float") elif field.dtype.kind == 'i': field = field.astype("int") if len(field) == shape[0]: metadata[mkey] = field[i] else: metadata[mkey] = field else: metadata[mkey] = field metadata['entry'] = entryname dataset = SansData(data=areaDetector[i].copy(), metadata=metadata) datasets.append(dataset) return datasets
def readVSANSNexuz(input_file, file_obj=None, metadata_lookup=metadata_lookup): """ Load all entries from the NeXus file into sans data sets. """ datasets = [] file = h5_open_zip(input_file, file_obj) for entryname, entry in file.items(): #areaDetector = entry['data/areaDetector'].value #shape = areaDetector.shape #if len(shape) < 2 or len(shape) > 3: # raise ValueError("areaDetector data must have dimension 2 or 3") # return #if len(shape) == 2: # add another dimension at the front # shape = (1,) + shape # areaDetector = areaDetector.reshape(shape) multiplicity = 1 for i in range(multiplicity): metadata = load_metadata(entry, multiplicity, i, metadata_lookup=metadata_lookup, unit_specifiers=unit_specifiers) #print(metadata) detector_keys = [ n for n in entry['instrument'] if n.startswith('detector_') ] detectors = dict([(k, load_detector(entry['instrument'][k])) for k in detector_keys]) metadata['entry'] = entryname if metadata.get('sample.labl', None) is not None and metadata.get( 'run.configuration', None) is not None: metadata['sample.description'] = _s( metadata["sample.labl"]).replace( _s(metadata["run.configuration"]), "") if metadata.get('run.filename', None) is None: metadata['run.filename'] = input_file dataset = RawVSANSData(metadata=metadata, detectors=detectors) datasets.append(dataset) return datasets
def load_nexus_entries(filename, file_obj=None, entries=None, meta_only=False, entry_loader=None): """ Load the summary info for all entries in a NeXus file. """ handle = h5_open.h5_open_zip(filename, file_obj) measurements = [] for name, entry in handle.items(): if entries is not None and name not in entries: continue if _s(entry.attrs.get('NX_class', None)) == 'NXentry': data = entry_loader(entry, name, filename) if not meta_only: data.load(entry) measurements.append(data) if file_obj is None: handle.close() return measurements