def save_temporary_test_data(mpl_datasets): '''save temporary test data sets''' from spec2nexus import eznx hdf5_file = os.path.join(localConfig.LOCAL_WWW_LIVEDATA_DIR, 'testdata.h5') f = eznx.makeFile(hdf5_file) for i, ds in enumerate(mpl_datasets): nxentry = eznx.makeGroup(f, 'entry_' + str(i), 'NXentry') eznx.makeDataset(nxentry, "title", ds.label) nxdata = eznx.makeGroup(nxentry, 'data', 'NXdata', signal='R', axes='Q') eznx.makeDataset(nxdata, "Q", ds.Q, units='1/A') eznx.makeDataset(nxdata, "R", ds.I, units='a.u.') f.close()
def process_SAScollimation(self, xml_parent, nx_parent): ''' process any SAScollimation groups Should these be NXslit instead? ''' xml_node_list = xml_parent.findall('cs:SAScollimation', self.ns) for i, sas_group in enumerate(xml_node_list): if isinstance(sas_group.tag, str): # avoid XML Comments nm = sas_group.attrib.get('name', 'sascollimation') nm_clean = self.unique_name(nm, nx_parent) nxcoll = eznx.makeGroup(nx_parent, nm_clean, 'NXcollimator', canSAS_class='SAScollimation', canSAS_name=nm) # note: canSAS aperture does not map well into NXcollimator # might be better under SASinstrument but this is the defined location self.process_aperture(sas_group, nxcoll) for xmlnode in sas_group: if isinstance(xmlnode.tag, str): # avoid XML Comments if xmlnode.tag.endswith('}length'): ds = self.field_float(xmlnode, nxcoll) comment = 'Amount/length of collimation inserted (on a SANS instrument)' eznx.addAttributes(ds, comment=comment) elif xmlnode.tag.endswith('}aperture'): pass # handled above else: self.process_unexpected_xml_element( xmlnode, nxcoll)
def process_aperture(self, xml_parent, nx_parent): ''' process an aperture XML element ''' # note: canSAS aperture does not map well into NXcollimator # could be NXpinhole, NXslit, or NXaperture xml_node_list = xml_parent.findall('cs:aperture', self.ns) for i, xml_group in enumerate(xml_node_list): if isinstance(xml_group.tag, str): # avoid XML Comments nm = xml_group.attrib.get('name', 'aperture') nm_clean = self.unique_name(nm, nx_parent) # treat ALL as generic NXaperture nxaperture = eznx.makeGroup(nx_parent, nm_clean, 'NXaperture', canSAS_class='aperture', canSAS_name=nm) shape = xml_group.attrib.get('type', 'not specified') eznx.makeDataset(nxaperture, 'shape', shape) for xmlnode in xml_group: if isinstance(xmlnode.tag, str): if xmlnode.tag.endswith('}size'): self.axis_values(xmlnode, nxaperture, '%s_gap') elif xmlnode.tag.endswith('}distance'): self.field_float(xmlnode, nxaperture) else: self.process_unexpected_xml_element( xmlnode, nxaperture)
def process_collection_group(self, xml_parent, nx_parent): ''' process any collection group XML element In NXcollection, the content does not have to be NeXus. Could use plain hdf5 groups and datasets, both with attributes. But, it's more consistent to stay in NeXus structures, so nest NXcollections. ''' if len(xml_parent ) == 0: # just a text field, don't assume it is a number tag = ns_strip(xml_parent) nm = self.unique_hdf5_name(nx_parent, xml_parent, tag) ds = self.field_text(xml_parent, nx_parent, node_name=nm) if ds is not None: eznx.addAttributes(ds, tag=tag) self.copy_attributes(xml_parent, nx_parent) else: for xmlnode in xml_parent: if len(xmlnode) == 0: self.process_collection_group(xmlnode, nx_parent) else: tag = ns_strip(xmlnode) nm = xmlnode.attrib.get('name', tag) nm = self.unique_hdf5_name(nx_parent, xmlnode, nm) nm_clean = self.unique_name(nm, nx_parent) nxgroup = eznx.makeGroup(nx_parent, nm_clean, 'NXcollection', canSAS_name=nm) self.copy_attributes(xmlnode, nxgroup) eznx.addAttributes(nxgroup, tag=tag) self.process_collection_group(xmlnode, nxgroup)
def process_SASinstrument(self, sasinstrument, nx_parent): ''' process the SASinstrument group, should be ONLY one ''' nm = sasinstrument.attrib.get('name', 'sasinstrument') nm_clean = self.unique_name(nm, nx_parent) nxinstrument = eznx.makeGroup(nx_parent, nm_clean, 'NXinstrument', canSAS_class='SASinstrument', canSAS_name=nm) # process the groups that may appear more than once self.process_SAScollimation(sasinstrument, nxinstrument) self.process_SASdetector(sasinstrument, nxinstrument) for xmlnode in sasinstrument: tag = str(xmlnode.tag) if tag.endswith('}name'): self.field_text(xmlnode, nxinstrument) elif tag.endswith('}SASsource'): self.process_SASsource(xmlnode, nxinstrument) elif tag.endswith('}SAScollimation'): pass # handled above elif tag.endswith('}SASdetector'): pass # handled above else: self.process_unexpected_xml_element(xmlnode, nxinstrument)
def test_makeExternalLink(self): external = eznx.makeFile('external.h5', creator='eznx', default='entry') eznx.write_dataset(external, "text", "some text") root = eznx.makeFile('test.h5', creator='eznx', default='entry') nxentry = eznx.makeGroup(root, 'entry', 'NXentry', default='data') eznx.makeExternalLink(root, 'external.h5', "/text", nxentry.name + "/external_text") # check the external file first with h5py.File("external.h5", "r") as hp: root = hp["/"] self.assertTrue("text" in root) ds = root["text"] value = ds[()] # ds.value deprecated in h5py self.assertEqual(value, [b"some text"]) # check the file with the external link with h5py.File("test.h5", "r") as hp: root = hp["/"] nxentry = root["entry"] self.assertTrue("external_text" in nxentry) value = eznx.read_nexus_field(nxentry, "external_text") self.assertEqual(value, b"some text") value = eznx.read_nexus_field(nxentry, "external_text", astype=str) self.assertEqual(value, "some text")
def process_SAStransmission_spectrum(self, xml_parent, nx_parent): ''' process any SAStransmission_spectrum groups These are handled similar to SASdata but with different nouns Shouldn't this be located (in NeXus) at /NXentry/NXsample/transmission? ''' nx_node_list = [] xml_node_list = xml_parent.findall('cs:SAStransmission_spectrum', self.ns) for i, sas_ts in enumerate(xml_node_list): nm = 'transmission_spectrum' if len(xml_node_list) > 1: nm += '_' + str(i) nm_clean = self.unique_name(nm, nx_parent) nxdata = eznx.makeGroup(nx_parent, nm_clean, 'NXdata', canSAS_class='SAStransmission_spectrum', ) nm = sas_ts.attrib.get('name') if nm is not None: eznx.addAttributes(nxdata, name=nm) nx_node_list.append(nxdata) # collect the data arrays data = {} units = {} for xmlnode in sas_ts: if isinstance(xmlnode.tag, str): # avoid XML Comments if str(xmlnode.tag).endswith('}Tdata'): for xmldata in xmlnode: try: tag = ns_strip(xmldata) except AttributeError as _exc: continue # an XML comment triggered this if tag not in data: data[tag] = [] units[tag] = xmldata.get('unit', 'none') data[tag].append(xmldata.text) else: self.process_unexpected_xml_element(xmlnode, nxdata) # write the data arrays nx_obj = {} for nm, arr in data.items(): try: nx_obj[nm] = eznx.makeDataset(nxdata, nm, map(float, data[nm]), units=units[nm]) except TypeError as _exc: pass # set the NeXus plottable data attributes if 'T' in data: eznx.addAttributes(nxdata, signal='T') if 'Lambda' in data: eznx.addAttributes(nxdata, axes='Lambda') # NeXus if 'Tdev' in data: eznx.addAttributes(nx_obj['T'], uncertainties='Tdev') # NeXus return nx_node_list
def process_aperture(self, xml_parent, nx_parent): ''' process an aperture XML element ''' # note: canSAS aperture does not map well into NXcollimator # could be NXpinhole, NXslit, or NXaperture xml_node_list = xml_parent.findall('cs:aperture', self.ns) for i, xml_group in enumerate(xml_node_list): if isinstance(xml_group.tag, str): # avoid XML Comments nm = xml_group.attrib.get('name', 'aperture') nm_clean = self.unique_name(nm, nx_parent) # treat ALL as generic NXaperture nxaperture = eznx.makeGroup(nx_parent, nm_clean, 'NXaperture', canSAS_class='aperture', canSAS_name=nm) shape = xml_group.attrib.get('type', 'not specified') eznx.makeDataset(nxaperture, 'shape', shape) for xmlnode in xml_group: if isinstance(xmlnode.tag, str): if xmlnode.tag.endswith('}size'): self.axis_values(xmlnode, nxaperture, '%s_gap') elif xmlnode.tag.endswith('}distance'): self.field_float(xmlnode, nxaperture) else: self.process_unexpected_xml_element(xmlnode, nxaperture)
def _prepare_to_acquire(self): '''connect to EPICS and create the HDF5 file and structure''' # connect to EPICS PVs for pv_spec in pv_registry.values(): pv_spec.pv = epics.PV(pv_spec.pvname) # create the file for key, xture in sorted(group_registry.items()): if key == '/': # create the file and internal structure f = eznx.makeFile( self.hdf5_file_name, # the following are attributes to the root element of the HDF5 file file_name=self.hdf5_file_name, creator=__file__, creator_version=self.creator_version, creator_config_file=self.config_file, HDF5_Version=h5py.version.hdf5_version, h5py_version=h5py.version.version, ) xture.hdf5_group = f else: hdf5_parent = xture.group_parent.hdf5_group xture.hdf5_group = eznx.makeGroup(hdf5_parent, xture.name, xture.nx_class) eznx.addAttributes(xture.hdf5_group, **xture.attrib) for field in field_registry.values(): ds = eznx.makeDataset(field.group_parent.hdf5_group, field.name, [field.text]) eznx.addAttributes(ds, **field.attrib)
def process_SAScollimation(self, xml_parent, nx_parent): ''' process any SAScollimation groups Should these be NXslit instead? ''' xml_node_list = xml_parent.findall('cs:SAScollimation', self.ns) for i, sas_group in enumerate(xml_node_list): if isinstance(sas_group.tag, str): # avoid XML Comments nm = sas_group.attrib.get('name', 'sascollimation') nm_clean = self.unique_name(nm, nx_parent) nxcoll = eznx.makeGroup(nx_parent, nm_clean, 'NXcollimator', canSAS_class='SAScollimation', canSAS_name=nm) # note: canSAS aperture does not map well into NXcollimator # might be better under SASinstrument but this is the defined location self.process_aperture(sas_group, nxcoll) for xmlnode in sas_group: if isinstance(xmlnode.tag, str): # avoid XML Comments if xmlnode.tag.endswith('}length'): ds = self.field_float(xmlnode, nxcoll) comment = 'Amount/length of collimation inserted (on a SANS instrument)' eznx.addAttributes(ds, comment=comment) elif xmlnode.tag.endswith('}aperture'): pass # handled above else: self.process_unexpected_xml_element(xmlnode, nxcoll)
def process_collection_group(self, xml_parent, nx_parent): ''' process any collection group XML element In NXcollection, the content does not have to be NeXus. Could use plain hdf5 groups and datasets, both with attributes. But, it's more consistent to stay in NeXus structures, so nest NXcollections. ''' if len(xml_parent) == 0: # just a text field, don't assume it is a number tag = ns_strip(xml_parent) nm = self.unique_hdf5_name(nx_parent, xml_parent, tag) ds = self.field_text(xml_parent, nx_parent, node_name=nm) if ds is not None: eznx.addAttributes(ds, tag = tag) self.copy_attributes(xml_parent, nx_parent) else: for xmlnode in xml_parent: if len(xmlnode) == 0: self.process_collection_group(xmlnode, nx_parent) else: tag = ns_strip(xmlnode) nm = xmlnode.attrib.get('name', tag) nm = self.unique_hdf5_name(nx_parent, xmlnode, nm) nm_clean = self.unique_name(nm, nx_parent) nxgroup = eznx.makeGroup(nx_parent, nm_clean, 'NXcollection', canSAS_name=nm) self.copy_attributes(xmlnode, nxgroup) eznx.addAttributes(nxgroup, tag=tag) self.process_collection_group(xmlnode, nxgroup)
def writer(self, h5parent, writer, scan, nxclass=None, *args, **kws): ''' Describe how to write VE data''' desc = "XPCS VE parameters" group = makeGroup(h5parent, 'VE', nxclass, description=desc) dd = {} for item, value in scan.VE.items(): dd[item] = map(str, value.split()) writer.save_dict(group, dd)
def writer(self, h5parent, writer, scan, nxclass=None, *args, **kws): ''' Describe how to write VE data''' desc = "XPCS VE parameters" group = makeGroup(h5parent, 'VE', nxclass,description=desc) dd = {} for item, value in scan.VE.items(): dd[item] = map(str, value.split()) writer.save_dict(group, dd)
def writer(self, h5parent, writer, scan, nxclass=None, *args, **kwargs): '''Describe how to write X data''' desc = 'Sector4 Experiment Parameters' group = makeGroup(h5parent, 'X', nxclass, description=desc) dd = {} for item, value in scan.X.items(): dd[item] = map(str, value.split()) writer.save_dict(group, dd)
def writer(self, h5parent, writer, scan, nxclass=None, *args, **kws): '''write the data in a NeXus group named ``unrecognized``''' desc = "SPEC data file control lines not otherwise recognized" nxclass = 'NXnote' group = makeGroup(h5parent, 'unrecognized', nxclass, description=desc) dd = OrderedDict() for i, value in enumerate(scan._unrecognized): dd['u' + str(i)] = value writer.save_dict(group, dd)
def writer(self, h5parent, writer, scan, nxclass=None, *args, **kws): '''Describe how to store this data in an HDF5 NeXus file''' # e.g.: SPECD/four.mac # http://certif.com/spec_manual/fourc_4_9.html desc = "SPEC geometry arrays, meanings defined by SPEC diffractometer support" group = makeGroup(h5parent, 'G', nxclass, description=desc) dd = {} for item, value in scan.G.items(): dd[item] = map(float, value.split()) writer.save_dict(group, dd)
def process_SASentry(self, xml_parent, nx_parent): ''' process any SASentry groups ''' nx_node_list = [] xml_node_list = xml_parent.findall('cs:SASentry', self.ns) for i, sasentry in enumerate(xml_node_list): nm = 'sasentry' if len(xml_node_list) > 1: nm += '_' + str(i) nm = sasentry.attrib.get('name', nm) nm_clean = self.unique_name(nm, nx_parent) nxentry = eznx.makeGroup(nx_parent, nm_clean, 'NXentry', canSAS_class='SASentry', canSAS_name=nm) nx_node_list.append(nxentry) eznx.makeDataset(nxentry, 'definition', 'NXcanSAS') # process the groups that may appear more than once group_list = self.process_SASdata(sasentry, nxentry) if len(group_list) > 0: default = group_list[0].name.split('/')[-1] eznx.addAttributes(nxentry, default=default) self.process_Run(sasentry, nxentry) self.process_SAStransmission_spectrum(sasentry, nxentry) self.process_SASprocess(sasentry, nxentry) self.process_SASnote(sasentry, nxentry) # process any other items for xmlnode in sasentry: tag = str(xmlnode.tag) if tag.endswith('}Title'): self.field_text(xmlnode, nxentry, node_name='title') elif tag.endswith('}Run'): pass # handled above elif tag.endswith('}SASdata'): pass # handled above elif tag.endswith('}SASsample'): self.process_SASsample(xmlnode, nxentry) elif tag.endswith('}SASinstrument'): self.process_SASinstrument(xmlnode, nxentry) elif tag.endswith('}SASprocess'): pass # handled above elif tag.endswith('}SASnote'): pass # handled above elif tag.endswith('}SAStransmission_spectrum'): pass # handled above else: self.process_unexpected_xml_element(xmlnode, nxentry) return nx_node_list
def writer(self, h5parent, writer, header, nxclass=None, *args, **kws): '''Describe how to store this data in an HDF5 NeXus file''' if not hasattr(header, 'positioner_xref'): header.counter_xref = {} # mnemonic:name desc = 'cross-reference SPEC positioner mnemonics and names' comment = 'keys are SPEC positioner mnemonics, values are SPEC positioner names' if nxclass is None: nxclass = CONTAINER_CLASS group = makeGroup(h5parent, "positioner_cross_reference", nxclass, description=desc, comment=comment) for key, value in sorted(header.positioner_xref.items()): write_dataset(group, key, value)
def test_write_dataset_existing(self): root = eznx.makeFile('test.h5', creator='eznx', default='entry') nxentry = eznx.makeGroup(root, 'entry', 'NXentry', default='data') eznx.write_dataset(nxentry, "text", "some text") eznx.write_dataset(nxentry, "text", "replacement text") with h5py.File("test.h5", "r") as hp: root = hp["/"] nxentry = root["entry"] self.assertTrue("text" in nxentry) value = eznx.read_nexus_field(nxentry, "text", astype=str) self.assertEqual(value, "replacement text")
def process_SASprocess(self, xml_parent, nx_parent): ''' process any SASprocess groups ''' xml_node_list = xml_parent.findall('cs:SASprocess', self.ns) for i, xml_group in enumerate(xml_node_list): nm = 'sasprocess' if len(xml_node_list) > 1: nm += '_' + str(i) nm = xml_group.attrib.get('name', nm) nm_clean = self.unique_name(nm, nx_parent) nxprocess = eznx.makeGroup(nx_parent, nm_clean, 'NXprocess', canSAS_class='SASprocess', canSAS_name=nm) term_counter = 0 for xmlnode in xml_group: if isinstance(xmlnode.tag, str): if xmlnode.tag.endswith('}name'): self.field_text(xmlnode, nxprocess) elif xmlnode.tag.endswith('}date'): # TODO: test for ISO-8601? # need to convert from arbitrary representations # 01-DEC-2008 04:30:25 # 1-Jul-1998 14:57:37 # 04-Sep-2007 18:12:27 # Tue, May 20, 2008 1:39:23 PM # Tue, Aug 21, 2007 # 1999-01-04 20:15:45 # 1999-01-04T20:15:45 self.field_text(xmlnode, nxprocess) elif xmlnode.tag.endswith('}description'): self.field_text(xmlnode, nxprocess) elif xmlnode.tag.endswith('}term'): nm = 'term_' + str(term_counter) term_counter += 1 ds = self.field_text(xmlnode, nxprocess, node_name=nm) self.copy_attributes(xmlnode, ds) units = xmlnode.attrib.get('unit') if units is not None: eznx.addAttributes(ds, units=units) del ds.attrs[ 'unit'] # remove the canSAS singular name elif xmlnode.tag.endswith('}SASprocessnote'): pass # handled below else: self.process_unexpected_xml_element(xmlnode, nxprocess) self.process_SASprocessnote(xml_group, nxprocess)
def process_SASprocess(self, xml_parent, nx_parent): ''' process any SASprocess groups ''' xml_node_list = xml_parent.findall('cs:SASprocess', self.ns) for i, xml_group in enumerate(xml_node_list): nm = 'sasprocess' if len(xml_node_list) > 1: nm += '_' + str(i) nm = xml_group.attrib.get('name', nm) nm_clean = self.unique_name(nm, nx_parent) nxprocess = eznx.makeGroup(nx_parent, nm_clean, 'NXprocess', canSAS_class='SASprocess', canSAS_name=nm) term_counter = 0 for xmlnode in xml_group: if isinstance(xmlnode.tag, str): if xmlnode.tag.endswith('}name'): self.field_text(xmlnode, nxprocess) elif xmlnode.tag.endswith('}date'): # TODO: test for ISO-8601? # need to convert from arbitrary representations # 01-DEC-2008 04:30:25 # 1-Jul-1998 14:57:37 # 04-Sep-2007 18:12:27 # Tue, May 20, 2008 1:39:23 PM # Tue, Aug 21, 2007 # 1999-01-04 20:15:45 # 1999-01-04T20:15:45 self.field_text(xmlnode, nxprocess) elif xmlnode.tag.endswith('}description'): self.field_text(xmlnode, nxprocess) elif xmlnode.tag.endswith('}term'): nm = 'term_'+str(term_counter) term_counter += 1 ds = self.field_text(xmlnode, nxprocess, node_name=nm) self.copy_attributes(xmlnode, ds) units = xmlnode.attrib.get('unit') if units is not None: eznx.addAttributes(ds, units=units) del ds.attrs['unit'] # remove the canSAS singular name elif xmlnode.tag.endswith('}SASprocessnote'): pass # handled below else: self.process_unexpected_xml_element(xmlnode, nxprocess) self.process_SASprocessnote(xml_group, nxprocess)
def test_create_dataset_None(self): root = eznx.makeFile('test.h5', creator='eznx', default='entry') nxentry = eznx.makeGroup(root, 'entry', 'NXentry', default='data') ds = eznx.makeDataset(nxentry, "data_is_None", None) with h5py.File("test.h5", "r") as hp: root = hp["/"] nxentry = root["entry"] self.assertTrue("data_is_None" in nxentry) ds = nxentry["data_is_None"] value = ds[()] # ds.value deprecated in h5py self.assertEqual(len(value), 0) self.assertEqual(value, "") self.assertTrue("NOTE" in ds.attrs) note = "no data supplied, value set to empty string" self.assertEqual(ds.attrs["NOTE"], note)
def process_SASnote(self, xml_parent, nx_parent): ''' process any SASnote groups ''' xml_node_list = xml_parent.findall('cs:SASnote', self.ns) for i, xml_group in enumerate(xml_node_list): nm = 'sasnote' if len(xml_node_list) > 1: nm += '_' + str(i) nm = xml_group.attrib.get('name', nm) nm_clean = self.unique_name(nm, nx_parent) nxnote = eznx.makeGroup(nx_parent, nm_clean, 'NXnote', canSAS_class='SASnote', canSAS_name=nm) self.process_collection_group(xml_group, nxnote)
def process_SASdetector(self, xml_parent, nx_parent): ''' process any SASdetector groups ''' xml_node_list = xml_parent.findall('cs:SASdetector', self.ns) for i, sas_group in enumerate(xml_node_list): if isinstance(sas_group.tag, str): # avoid XML Comments nm = sas_group.attrib.get('name', 'sasdetector') nm_clean = self.unique_name(nm, nx_parent) nxdetector = eznx.makeGroup(nx_parent, nm_clean, 'NXdetector', canSAS_class='SASdetector', canSAS_name=nm) for xmlnode in sas_group: if isinstance(xmlnode.tag, str): # avoid XML Comments if xmlnode.tag.endswith('}name'): eznx.makeDataset(nxdetector, 'name', (xmlnode.text or '').strip()) elif xmlnode.tag.endswith('}SDD'): ds = self.field_float(xmlnode, nxdetector) comment = 'Distance between sample and detector' eznx.addAttributes(ds, comment=comment) elif xmlnode.tag.endswith('}offset'): self.axis_values(xmlnode, nxdetector, '%s_position') elif xmlnode.tag.endswith('}orientation'): self.axis_values(xmlnode, nxdetector) elif xmlnode.tag.endswith('}beam_center'): self.axis_values(xmlnode, nxdetector, 'beam_center_%s') elif xmlnode.tag.endswith('}pixel_size'): self.axis_values(xmlnode, nxdetector, '%s_pixel_size') elif xmlnode.tag.endswith('}slit_length'): ds = self.field_float(xmlnode, nxdetector) comment = 'Slit length of the instrument for this detector, ' comment += 'expressed in the same units as Q' eznx.addAttributes(ds, comment=comment) else: self.process_unexpected_xml_element( xmlnode, nxdetector)
def process_SASsample(self, xml_parent, nx_parent): ''' process the SASsample group, should be ONLY one ''' nm = xml_parent.attrib.get('name', 'sassample') nm_clean = self.unique_name(nm, nx_parent) nxsample = eznx.makeGroup(nx_parent, nm_clean, 'NXsample', canSAS_class='SASsample', canSAS_name=nm) self.copy_attributes(xml_parent, nx_parent) details = [] # report all *details* in a single multi-line string for xmlnode in xml_parent: tag = str(xmlnode.tag) if tag.endswith('}ID'): if xmlnode.text is None: text = '' else: text = xmlnode.text.strip() eznx.makeDataset(nxsample, 'ID', text) elif tag.endswith('}thickness'): self.field_float(xmlnode, nxsample, default_units='none') elif tag.endswith('}transmission'): self.field_float(xmlnode, nxsample, default_units='dimensionless') elif tag.endswith('}temperature'): self.field_float(xmlnode, nxsample, default_units='unknown') elif tag.endswith('}position'): self.axis_values(xmlnode, nxsample, '%s_position') elif tag.endswith('}orientation'): self.axis_values(xmlnode, nxsample) elif tag.endswith('}details'): details.append(xmlnode.text) else: self.process_unexpected_xml_element(xmlnode, nxsample) if len(details) > 0: eznx.makeDataset(nxsample, 'details', '\n'.join(details))
def process_SASsource(self, sassource, nx_parent): ''' process the SASsource group, should be ONLY one ''' nm = sassource.attrib.get('name', 'sassource') nm_clean = self.unique_name(nm, nx_parent) nxsource = eznx.makeGroup(nx_parent, nm_clean, 'NXsource', canSAS_class='SASsource', canSAS_name=nm) for xmlnode in sassource: if isinstance(xmlnode.tag, str): # avoid XML Comments tag = str(xmlnode.tag) if tag.endswith('}radiation'): self.field_text(xmlnode, nxsource) elif tag.endswith('}beam_size'): for subnode in xmlnode: nm = ns_strip(subnode).lower() if nm not in ('x', 'y'): msg = 'unexpected tag: ' + subnode.tag msg += '\n in SASsource group in file: ' + self.xmlFile raise ValueError(msg) self.field_float(subnode, nxsource, node_name='beam_size_' + nm) elif xmlnode.tag.endswith('}beam_shape'): self.field_text(xmlnode, nxsource) elif xmlnode.tag.endswith('}wavelength'): self.field_float(xmlnode, nxsource, node_name='incident_wavelength') elif xmlnode.tag.endswith('}wavelength_min'): self.field_float(xmlnode, nxsource) elif xmlnode.tag.endswith('}wavelength_max'): self.field_float(xmlnode, nxsource) elif xmlnode.tag.endswith('}wavelength_spread'): self.field_float(xmlnode, nxsource) else: self.process_unexpected_xml_element(xmlnode, nxsource)
def process_SASsample(self, xml_parent, nx_parent): ''' process the SASsample group, should be ONLY one ''' nm = xml_parent.attrib.get('name', 'sassample') nm_clean = self.unique_name(nm, nx_parent) nxsample = eznx.makeGroup(nx_parent, nm_clean, 'NXsample', canSAS_class='SASsample', canSAS_name=nm ) self.copy_attributes(xml_parent, nx_parent) details = [] # report all *details* in a single multi-line string for xmlnode in xml_parent: tag = str(xmlnode.tag) if tag.endswith('}ID'): if xmlnode.text is None: text = '' else: text = xmlnode.text.strip() eznx.makeDataset(nxsample, 'ID', text) elif tag.endswith('}thickness'): self.field_float(xmlnode, nxsample, default_units='none') elif tag.endswith('}transmission'): self.field_float(xmlnode, nxsample, default_units='dimensionless') elif tag.endswith('}temperature'): self.field_float(xmlnode, nxsample, default_units='unknown') elif tag.endswith('}position'): self.axis_values(xmlnode, nxsample, '%s_position') elif tag.endswith('}orientation'): self.axis_values(xmlnode, nxsample) elif tag.endswith('}details'): details.append(xmlnode.text) else: self.process_unexpected_xml_element(xmlnode, nxsample) if len(details) > 0: eznx.makeDataset(nxsample, 'details', '\n'.join(details))
def process_SASdetector(self, xml_parent, nx_parent): ''' process any SASdetector groups ''' xml_node_list = xml_parent.findall('cs:SASdetector', self.ns) for i, sas_group in enumerate(xml_node_list): if isinstance(sas_group.tag, str): # avoid XML Comments nm = sas_group.attrib.get('name', 'sasdetector') nm_clean = self.unique_name(nm, nx_parent) nxdetector = eznx.makeGroup(nx_parent, nm_clean, 'NXdetector', canSAS_class='SASdetector', canSAS_name=nm) for xmlnode in sas_group: if isinstance(xmlnode.tag, str): # avoid XML Comments if xmlnode.tag.endswith('}name'): eznx.makeDataset(nxdetector, 'name', (xmlnode.text or '').strip()) elif xmlnode.tag.endswith('}SDD'): ds = self.field_float(xmlnode, nxdetector) comment = 'Distance between sample and detector' eznx.addAttributes(ds, comment=comment) elif xmlnode.tag.endswith('}offset'): self.axis_values(xmlnode, nxdetector, '%s_position') elif xmlnode.tag.endswith('}orientation'): self.axis_values(xmlnode, nxdetector) elif xmlnode.tag.endswith('}beam_center'): self.axis_values(xmlnode, nxdetector, 'beam_center_%s') elif xmlnode.tag.endswith('}pixel_size'): self.axis_values(xmlnode, nxdetector, '%s_pixel_size') elif xmlnode.tag.endswith('}slit_length'): ds = self.field_float(xmlnode, nxdetector) comment = 'Slit length of the instrument for this detector, ' comment += 'expressed in the same units as Q' eznx.addAttributes(ds, comment=comment) else: self.process_unexpected_xml_element(xmlnode, nxdetector)
def test_read_nexus_field_alternatives(self): root = eznx.makeFile('test.h5', creator='eznx', default='entry') nxentry = eznx.makeGroup(root, 'entry', 'NXentry', default='data') eznx.write_dataset(nxentry, "text", "some text") eznx.write_dataset(nxentry, "number", 42) eznx.write_dataset(nxentry, "array", [[1,2,3], [4,5,6]]) # check the file with the external link with h5py.File("test.h5", "r") as hp: root = hp["/"] nxentry = root["entry"] value = eznx.read_nexus_field(nxentry, "key_error") self.assertEqual(value, None) value = eznx.read_nexus_field(nxentry, "text") self.assertEqual(value, b"some text") value = eznx.read_nexus_field(nxentry, "text", astype=str) self.assertEqual(value, "some text") value = eznx.read_nexus_field(nxentry, "number") self.assertEqual(value, 42) value = eznx.read_nexus_field(nxentry, "number", astype=float) self.assertEqual(value, 42) value = eznx.read_nexus_field(nxentry, "number", astype=str) self.assertEqual(value, "42") ds = nxentry["array"] value = ds[()] # ds.value deprecated in h5py expected = numpy.array([[1,2,3], [4,5,6]]) self.assertTrue((value == expected).any()) with self.assertRaises(RuntimeError) as context: value = eznx.read_nexus_field(nxentry, "array") received = str(context.exception) expected = "unexpected 2-D data" self.assertTrue(received.startswith(expected))
def process_SASdata(self, xml_parent, nx_parent): ''' process any SASdata groups ''' nx_node_list = [] xml_node_list = xml_parent.findall('cs:SASdata', self.ns) for i, sasdata in enumerate(xml_node_list): nm = 'sasdata' if len(xml_node_list) > 1: nm += '_' + str(i) nm = sasdata.attrib.get('name', nm) nm_clean = self.unique_name(nm, nx_parent) nxdata = eznx.makeGroup(nx_parent, nm_clean, 'NXdata', canSAS_class='SASdata', canSAS_name=nm) nx_node_list.append(nxdata) # collect the SAS data arrays data = {} units = {} for xmlnode in sasdata: if isinstance(xmlnode.tag, str): # avoid XML Comments if str(xmlnode.tag).endswith('}Idata'): for xmldata in xmlnode: if isinstance(xmldata.tag, str): tag = ns_strip(xmldata) if tag not in data: data[tag] = [] units[tag] = xmldata.get('unit', 'none') data[tag].append(xmldata.text) else: self.process_unexpected_xml_element(xmlnode, nxdata) # write the data arrays nx_obj = {} for nm, arr in data.items(): try: nx_obj[nm] = eznx.makeDataset(nxdata, nm, map(float, data[nm]), units=units[nm]) except TypeError as _exc: pass # set the NeXus plottable data attributes if 'I' in data: eznx.addAttributes(nxdata, signal='I') if 'Q' in data: eznx.addAttributes(nxdata, axes='Q') # NeXus if 'Idev' in data: eznx.addAttributes(nx_obj['I'], uncertainties='Idev') # NeXus if 'Qdev' in data: eznx.addAttributes(nx_obj['Q'], resolutions='Qdev') # NeXus if 'dQw' in data and 'dQl' in data: # not a common occurrence # consider: Qdev or dQw & dQl # http://cansas-org.github.io/canSAS2012/notation.html?highlight=uncertainty if 'Qdev' not in data: # canSAS1d rules say either Qdev OR (dQw, dQl), not both eznx.addAttributes(nx_obj['Q'], resolutions=['dQw', 'dQl']) return nx_node_list
def data_lines_writer(h5parent, writer, scan, *args, **kws): '''Describe how to store scan data in an HDF5 NeXus file''' desc = 'SPEC scan data' nxdata = makeGroup(h5parent, 'data', 'NXdata', description=desc) writer.save_data(nxdata, scan)
def writer(self, h5parent, writer, scan, nxclass=None, *args, **kws): '''Describe how to store this data in an HDF5 NeXus file''' desc='SPEC positioners (#P & #O lines)' group = makeGroup(h5parent, 'positioners', nxclass, description=desc) writer.save_dict(group, scan.positioner)
def writer(self, h5parent, writer, scan, nxclass=None, *args, **kws): '''Describe how to store this data in an HDF5 NeXus file''' desc = 'SPEC positioners (#P & #O lines)' group = makeGroup(h5parent, 'positioners', nxclass, description=desc) writer.save_dict(group, scan.positioner)
def writer(self, h5parent, writer, scan, nxclass=None, *args, **kws): '''Describe how to store this data in an HDF5 NeXus file''' if hasattr(scan, 'metadata') and len(scan.metadata) > 0: desc='SPEC metadata (UNICAT-style #H & #V lines)' group = eznx.makeGroup(h5parent, 'metadata', nxclass, description=desc) writer.save_dict(group, scan.metadata)
def process_SAStransmission_spectrum(self, xml_parent, nx_parent): ''' process any SAStransmission_spectrum groups These are handled similar to SASdata but with different nouns Shouldn't this be located (in NeXus) at /NXentry/NXsample/transmission? ''' nx_node_list = [] xml_node_list = xml_parent.findall('cs:SAStransmission_spectrum', self.ns) for i, sas_ts in enumerate(xml_node_list): nm = 'transmission_spectrum' if len(xml_node_list) > 1: nm += '_' + str(i) nm_clean = self.unique_name(nm, nx_parent) nxdata = eznx.makeGroup( nx_parent, nm_clean, 'NXdata', canSAS_class='SAStransmission_spectrum', ) nm = sas_ts.attrib.get('name') if nm is not None: eznx.addAttributes(nxdata, name=nm) nx_node_list.append(nxdata) # collect the data arrays data = {} units = {} for xmlnode in sas_ts: if isinstance(xmlnode.tag, str): # avoid XML Comments if str(xmlnode.tag).endswith('}Tdata'): for xmldata in xmlnode: try: tag = ns_strip(xmldata) except AttributeError as _exc: continue # an XML comment triggered this if tag not in data: data[tag] = [] units[tag] = xmldata.get('unit', 'none') data[tag].append(xmldata.text) else: self.process_unexpected_xml_element(xmlnode, nxdata) # write the data arrays nx_obj = {} for nm, arr in data.items(): try: nx_obj[nm] = eznx.makeDataset(nxdata, nm, map(float, data[nm]), units=units[nm]) except TypeError as _exc: pass # set the NeXus plottable data attributes if 'T' in data: eznx.addAttributes(nxdata, signal='T') if 'Lambda' in data: eznx.addAttributes(nxdata, axes='Lambda') # NeXus if 'Tdev' in data: eznx.addAttributes(nx_obj['T'], uncertainties='Tdev') # NeXus return nx_node_list
def test_example(self): root = eznx.makeFile('test.h5', creator='eznx', default='entry') nxentry = eznx.makeGroup(root, 'entry', 'NXentry', default='data') ds = eznx.write_dataset(nxentry, 'title', 'simple test data') nxdata = eznx.makeGroup(nxentry, 'data', 'NXdata', signal='counts', axes='tth', tth_indices=0) ds = eznx.write_dataset(nxdata, 'tth', [10.0, 10.1, 10.2, 10.3], units='degrees') ds = eznx.write_dataset(nxdata, 'counts', [1, 50, 1000, 5], units='counts', axes="tth") root.close() """ Test the data file for this structure:: test.h5:NeXus data file @creator = eznx @default = 'entry' entry:NXentry @NX_class = NXentry @default = 'data' title:NX_CHAR = simple test data data:NXdata @NX_class = NXdata @signal = 'counts' @axes = 'tth' @axes_indices = 0 counts:NX_INT64[4] = [1, 50, 1000, 5] @units = counts @axes = tth tth:NX_FLOAT64[4] = [10.0, 10.1, 10.199999999999999, 10.300000000000001] @units = degrees """ self.assertTrue(os.path.exists("test.h5")) with h5py.File("test.h5", "r") as hp: root = hp["/"] self.assertEqual(root.attrs.get("creator"), "eznx") self.assertEqual(root.attrs.get("default"), "entry") nxentry = root["entry"] self.assertEqual(nxentry.attrs.get("NX_class"), "NXentry") self.assertEqual(nxentry.attrs.get("default"), "data") self.assertEqual( eznx.read_nexus_field(nxentry, "title").decode('utf8'), "simple test data") nxdata = nxentry["data"] self.assertEqual(nxdata.attrs.get("NX_class"), "NXdata") self.assertEqual(nxdata.attrs.get("signal"), "counts") self.assertEqual(nxdata.attrs.get("axes"), "tth") self.assertEqual(nxdata.attrs.get("tth_indices"), 0) # test the HDF5 structure counts = nxdata["counts"] self.assertEqual(counts.attrs.get("units"), "counts") self.assertEqual(counts.attrs.get("axes"), "tth") tth = nxdata["tth"] self.assertEqual(tth.attrs.get("units"), "degrees") # test the data fields = eznx.read_nexus_group_fields(nxentry, "data", "counts tth".split()) counts = fields["counts"] self.assertEqual(len(counts), 4) self.assertEqual(counts[2], [1, 50, 1000, 5][2]) tth = fields["tth"] self.assertEqual(len(tth), 4) self.assertEqual(tth[2], [10.0, 10.1, 10.2, 10.3][2])
frame_set, names, h5_files, monitor, sample_times = [], [], [], [], [] for fname in FILE_SET: h5 = h5py.File(fname, 'r') frame_set.append(h5['/entry/data/data']) monitor.append(h5['/entry/EPICS_PV_metadata/I0_cts_gated'][0]) names.append(os.path.split(fname)[-1]) sample_times.append( int(str(h5['/entry/sample/name'].value[0]).split('_')[-1][:-3])) # do not close the HDF5 files yet h5_files.append(h5) h5_blank = h5py.File(BLANK_FILE, 'r') # create file and group structure root = eznx.makeFile(TARGET_FILE, default='entry') nxentry = eznx.makeGroup(root, 'entry', 'NXentry', default='data') nxdata = eznx.makeGroup( nxentry, 'data', 'NXdata', signal='frames', ) nxinstrument = eznx.makeGroup(nxentry, 'instrument', 'NXinstrument') nxdetector = eznx.makeGroup(nxinstrument, 'detector', 'NXdetector') nxsource = eznx.makeGroup(nxinstrument, 'source', 'NXsource') nxmonochromator = eznx.makeGroup(nxinstrument, 'monochromator', 'NXmonochromator') nxcollimator = eznx.makeGroup(nxinstrument, 'collimator', 'NXcollimator') nxgeometry_slit = eznx.makeGroup(nxcollimator, 'geometry', 'NXgeometry') nxshape_slit = eznx.makeGroup(nxgeometry_slit, 'shape', 'NXshape') nxsample = eznx.makeGroup(nxinstrument, 'sample', 'NXsample')
17.92358 66206 17.92308 64129 17.92258 56795 17.92208 29315 17.92158 6622 17.92108 1321 """ #--------------------------- tthData, countsData = zip( *[map(float, _.split()) for _ in I_v_TTH_DATA.strip().splitlines()]) f = eznx.makeFile(HDF5_FILE) # create the HDF5 NeXus file f.attrs['default'] = 'entry' nxentry = eznx.makeGroup(f, 'entry', 'NXentry', default='data') nxinstrument = eznx.makeGroup(nxentry, 'instrument', 'NXinstrument') nxdetector = eznx.makeGroup(nxinstrument, 'detector', 'NXdetector') tth = eznx.makeDataset(nxdetector, "two_theta", tthData, units='degrees') counts = eznx.makeDataset(nxdetector, "counts", countsData, units='counts') nxdata = eznx.makeGroup(nxentry, 'data', 'NXdata', signal=1, axes='two_theta', two_theta_indices=0) eznx.makeLink(nxdetector, tth, nxdata.name + '/two_theta') eznx.makeLink(nxdetector, counts, nxdata.name + '/counts')
frame_set, names, h5_files, monitor, sample_times = [], [], [], [], [] for fname in FILE_SET: h5 = h5py.File(fname, 'r') frame_set.append(h5['/entry/data/data']) monitor.append(h5['/entry/EPICS_PV_metadata/I0_cts_gated'][0]) names.append(os.path.split(fname)[-1]) sample_times.append(int(str(h5['/entry/sample/name'].value[0]).split('_')[-1][:-3])) # do not close the HDF5 files yet h5_files.append(h5) h5_blank = h5py.File(BLANK_FILE, 'r') # create file and group structure root = eznx.makeFile(TARGET_FILE, default='entry') nxentry = eznx.makeGroup(root, 'entry', 'NXentry', default='data') nxdata = eznx.makeGroup(nxentry, 'data', 'NXdata', signal='frames', ) nxinstrument = eznx.makeGroup(nxentry, 'instrument', 'NXinstrument') nxdetector = eznx.makeGroup(nxinstrument, 'detector', 'NXdetector') nxsource = eznx.makeGroup(nxinstrument, 'source', 'NXsource') nxmonochromator = eznx.makeGroup(nxinstrument, 'monochromator', 'NXmonochromator') nxcollimator = eznx.makeGroup(nxinstrument, 'collimator', 'NXcollimator') nxgeometry_slit = eznx.makeGroup(nxcollimator, 'geometry', 'NXgeometry') nxshape_slit = eznx.makeGroup(nxgeometry_slit, 'shape', 'NXshape') nxsample = eznx.makeGroup(nxinstrument, 'sample', 'NXsample') nxmonitor = eznx.makeGroup(nxinstrument, 'control', 'NXmonitor') # various metadata eznx.addAttributes(root, creator=h5.attrs['creator'] + ' and spec2nexus.eznx') eznx.write_dataset(nxentry, 'title', 'NeXus NXsas example') eznx.write_dataset(nxentry, 'definition', 'NXsas', URL='http://download.nexusformat.org/doc/html/classes/applications/NXsas.html')