def process_aperture(self, xml_parent, nx_parent): ''' process an aperture XML element ''' # note: canSAS aperture does not map well into NXcollimator # could be NXpinhole, NXslit, or NXaperture xml_node_list = xml_parent.findall('cs:aperture', self.ns) for i, xml_group in enumerate(xml_node_list): if isinstance(xml_group.tag, str): # avoid XML Comments nm = xml_group.attrib.get('name', 'aperture') nm_clean = self.unique_name(nm, nx_parent) # treat ALL as generic NXaperture nxaperture = eznx.makeGroup(nx_parent, nm_clean, 'NXaperture', canSAS_class='aperture', canSAS_name=nm) shape = xml_group.attrib.get('type', 'not specified') eznx.makeDataset(nxaperture, 'shape', shape) for xmlnode in xml_group: if isinstance(xmlnode.tag, str): if xmlnode.tag.endswith('}size'): self.axis_values(xmlnode, nxaperture, '%s_gap') elif xmlnode.tag.endswith('}distance'): self.field_float(xmlnode, nxaperture) else: self.process_unexpected_xml_element(xmlnode, nxaperture)
def preliminaryWriteFile(self): '''write all preliminary data to the file while fly scan is running''' for pv_spec in pv_registry.values(): if pv_spec.acquire_after_scan: continue if pv_spec.as_string: value = pv_spec.pv.get(as_string=True) else: value = pv_spec.pv.get() if value is [None]: value = 'no data' if not isinstance(value, numpy.ndarray): value = [value] else: if pv_spec.length_limit and pv_spec.length_limit in pv_registry: length_limit = pv_registry[pv_spec.length_limit].pv.get() if len(value) > length_limit: value = value[:length_limit] hdf5_parent = pv_spec.group_parent.hdf5_group try: ds = eznx.makeDataset(hdf5_parent, pv_spec.label, value) self._attachEpicsAttributes(ds, pv_spec.pv) eznx.addAttributes(ds, **pv_spec.attrib) except Exception as e: print "ERROR: ", pv_spec.label, value print "MESSAGE: ", e print "RESOLUTION: writing as error message string" eznx.makeDataset(hdf5_parent, pv_spec.label, [str(e)])
def process_aperture(self, xml_parent, nx_parent): ''' process an aperture XML element ''' # note: canSAS aperture does not map well into NXcollimator # could be NXpinhole, NXslit, or NXaperture xml_node_list = xml_parent.findall('cs:aperture', self.ns) for i, xml_group in enumerate(xml_node_list): if isinstance(xml_group.tag, str): # avoid XML Comments nm = xml_group.attrib.get('name', 'aperture') nm_clean = self.unique_name(nm, nx_parent) # treat ALL as generic NXaperture nxaperture = eznx.makeGroup(nx_parent, nm_clean, 'NXaperture', canSAS_class='aperture', canSAS_name=nm) shape = xml_group.attrib.get('type', 'not specified') eznx.makeDataset(nxaperture, 'shape', shape) for xmlnode in xml_group: if isinstance(xmlnode.tag, str): if xmlnode.tag.endswith('}size'): self.axis_values(xmlnode, nxaperture, '%s_gap') elif xmlnode.tag.endswith('}distance'): self.field_float(xmlnode, nxaperture) else: self.process_unexpected_xml_element( xmlnode, nxaperture)
def process_SASentry(self, xml_parent, nx_parent): ''' process any SASentry groups ''' nx_node_list = [] xml_node_list = xml_parent.findall('cs:SASentry', self.ns) for i, sasentry in enumerate(xml_node_list): nm = 'sasentry' if len(xml_node_list) > 1: nm += '_' + str(i) nm = sasentry.attrib.get('name', nm) nm_clean = self.unique_name(nm, nx_parent) nxentry = eznx.makeGroup(nx_parent, nm_clean, 'NXentry', canSAS_class='SASentry', canSAS_name=nm) nx_node_list.append(nxentry) eznx.makeDataset(nxentry, 'definition', 'NXcanSAS') # process the groups that may appear more than once group_list = self.process_SASdata(sasentry, nxentry) if len(group_list) > 0: default = group_list[0].name.split('/')[-1] eznx.addAttributes(nxentry, default=default) self.process_Run(sasentry, nxentry) self.process_SAStransmission_spectrum(sasentry, nxentry) self.process_SASprocess(sasentry, nxentry) self.process_SASnote(sasentry, nxentry) # process any other items for xmlnode in sasentry: tag = str(xmlnode.tag) if tag.endswith('}Title'): self.field_text(xmlnode, nxentry, node_name='title') elif tag.endswith('}Run'): pass # handled above elif tag.endswith('}SASdata'): pass # handled above elif tag.endswith('}SASsample'): self.process_SASsample(xmlnode, nxentry) elif tag.endswith('}SASinstrument'): self.process_SASinstrument(xmlnode, nxentry) elif tag.endswith('}SASprocess'): pass # handled above elif tag.endswith('}SASnote'): pass # handled above elif tag.endswith('}SAStransmission_spectrum'): pass # handled above else: self.process_unexpected_xml_element(xmlnode, nxentry) return nx_node_list
def _prepare_to_acquire(self): '''connect to EPICS and create the HDF5 file and structure''' # connect to EPICS PVs for pv_spec in pv_registry.values(): pv_spec.pv = epics.PV(pv_spec.pvname) # create the file for key, xture in sorted(group_registry.items()): if key == '/': # create the file and internal structure f = eznx.makeFile( self.hdf5_file_name, # the following are attributes to the root element of the HDF5 file file_name=self.hdf5_file_name, creator=__file__, creator_version=self.creator_version, creator_config_file=self.config_file, HDF5_Version=h5py.version.hdf5_version, h5py_version=h5py.version.version, ) xture.hdf5_group = f else: hdf5_parent = xture.group_parent.hdf5_group xture.hdf5_group = eznx.makeGroup(hdf5_parent, xture.name, xture.nx_class) eznx.addAttributes(xture.hdf5_group, **xture.attrib) for field in field_registry.values(): ds = eznx.makeDataset(field.group_parent.hdf5_group, field.name, [field.text]) eznx.addAttributes(ds, **field.attrib)
def process_SAStransmission_spectrum(self, xml_parent, nx_parent): ''' process any SAStransmission_spectrum groups These are handled similar to SASdata but with different nouns Shouldn't this be located (in NeXus) at /NXentry/NXsample/transmission? ''' nx_node_list = [] xml_node_list = xml_parent.findall('cs:SAStransmission_spectrum', self.ns) for i, sas_ts in enumerate(xml_node_list): nm = 'transmission_spectrum' if len(xml_node_list) > 1: nm += '_' + str(i) nm_clean = self.unique_name(nm, nx_parent) nxdata = eznx.makeGroup(nx_parent, nm_clean, 'NXdata', canSAS_class='SAStransmission_spectrum', ) nm = sas_ts.attrib.get('name') if nm is not None: eznx.addAttributes(nxdata, name=nm) nx_node_list.append(nxdata) # collect the data arrays data = {} units = {} for xmlnode in sas_ts: if isinstance(xmlnode.tag, str): # avoid XML Comments if str(xmlnode.tag).endswith('}Tdata'): for xmldata in xmlnode: try: tag = ns_strip(xmldata) except AttributeError as _exc: continue # an XML comment triggered this if tag not in data: data[tag] = [] units[tag] = xmldata.get('unit', 'none') data[tag].append(xmldata.text) else: self.process_unexpected_xml_element(xmlnode, nxdata) # write the data arrays nx_obj = {} for nm, arr in data.items(): try: nx_obj[nm] = eznx.makeDataset(nxdata, nm, map(float, data[nm]), units=units[nm]) except TypeError as _exc: pass # set the NeXus plottable data attributes if 'T' in data: eznx.addAttributes(nxdata, signal='T') if 'Lambda' in data: eznx.addAttributes(nxdata, axes='Lambda') # NeXus if 'Tdev' in data: eznx.addAttributes(nx_obj['T'], uncertainties='Tdev') # NeXus return nx_node_list
def field_text(self, xmlnode, nx_parent, node_name=None): ''' get the text from xmlnode and write it to nxparent ''' nm = node_name or ns_strip(xmlnode) if nm not in nx_parent: ds = eznx.makeDataset(nx_parent, nm, (xmlnode.text or '').strip()) return ds
def process_SASdetector(self, xml_parent, nx_parent): ''' process any SASdetector groups ''' xml_node_list = xml_parent.findall('cs:SASdetector', self.ns) for i, sas_group in enumerate(xml_node_list): if isinstance(sas_group.tag, str): # avoid XML Comments nm = sas_group.attrib.get('name', 'sasdetector') nm_clean = self.unique_name(nm, nx_parent) nxdetector = eznx.makeGroup(nx_parent, nm_clean, 'NXdetector', canSAS_class='SASdetector', canSAS_name=nm) for xmlnode in sas_group: if isinstance(xmlnode.tag, str): # avoid XML Comments if xmlnode.tag.endswith('}name'): eznx.makeDataset(nxdetector, 'name', (xmlnode.text or '').strip()) elif xmlnode.tag.endswith('}SDD'): ds = self.field_float(xmlnode, nxdetector) comment = 'Distance between sample and detector' eznx.addAttributes(ds, comment=comment) elif xmlnode.tag.endswith('}offset'): self.axis_values(xmlnode, nxdetector, '%s_position') elif xmlnode.tag.endswith('}orientation'): self.axis_values(xmlnode, nxdetector) elif xmlnode.tag.endswith('}beam_center'): self.axis_values(xmlnode, nxdetector, 'beam_center_%s') elif xmlnode.tag.endswith('}pixel_size'): self.axis_values(xmlnode, nxdetector, '%s_pixel_size') elif xmlnode.tag.endswith('}slit_length'): ds = self.field_float(xmlnode, nxdetector) comment = 'Slit length of the instrument for this detector, ' comment += 'expressed in the same units as Q' eznx.addAttributes(ds, comment=comment) else: self.process_unexpected_xml_element( xmlnode, nxdetector)
def saveFile(self): '''write all desired data to the file and exit this code''' t = datetime.datetime.now() #timestamp = ' '.join((t.strftime("%Y-%m-%d"), t.strftime("%H:%M:%S"))) timestamp = str(t).split('.')[0] f = group_registry['/'].hdf5_group eznx.addAttributes(f, timestamp=timestamp) # TODO: will len(caget(array)) = NORD or NELM? (useful data or full array) for pv_spec in pv_registry.values(): if not pv_spec.acquire_after_scan: continue if pv_spec.as_string: value = pv_spec.pv.get(as_string=True) else: value = pv_spec.pv.get() if value is [None]: value = 'no data' if not isinstance(value, numpy.ndarray): value = [value] else: if pv_spec.length_limit and pv_spec.length_limit in pv_registry: length_limit = pv_registry[pv_spec.length_limit].pv.get() if len(value) > length_limit: value = value[:length_limit] hdf5_parent = pv_spec.group_parent.hdf5_group try: ds = eznx.makeDataset(hdf5_parent, pv_spec.label, value) self._attachEpicsAttributes(ds, pv_spec.pv) eznx.addAttributes(ds, **pv_spec.attrib) except Exception as e: print "ERROR: ", pv_spec.label, value print "MESSAGE: ", e print "RESOLUTION: writing as error message string" eznx.makeDataset(hdf5_parent, pv_spec.label, [str(e)]) #raise # as the final step, make all the links as directed for _k, v in link_registry.items(): v.make_link(f) f.close() # be CERTAIN to close the file
def process_Run(self, xml_parent, nx_parent): ''' process any Run elements ''' xml_node_list = xml_parent.findall('cs:Run', self.ns) for i, xmlnode in enumerate(xml_node_list): nm = 'run' if len(xml_node_list) > 1: nm += '_' + str(i) ds = eznx.makeDataset(nx_parent, nm, xmlnode.text) self.copy_attributes(xml_parent, ds)
def field_float(self, xmlnode, nx_parent, node_name=None, default_units='unknown'): ''' get a float value from xmlnode and write it to nxparent ''' nm = node_name or ns_strip(xmlnode) units = xmlnode.attrib.get('unit', default_units) ds = eznx.makeDataset(nx_parent, nm, float(xmlnode.text), units=units) return ds
def process_SASsample(self, xml_parent, nx_parent): ''' process the SASsample group, should be ONLY one ''' nm = xml_parent.attrib.get('name', 'sassample') nm_clean = self.unique_name(nm, nx_parent) nxsample = eznx.makeGroup(nx_parent, nm_clean, 'NXsample', canSAS_class='SASsample', canSAS_name=nm) self.copy_attributes(xml_parent, nx_parent) details = [] # report all *details* in a single multi-line string for xmlnode in xml_parent: tag = str(xmlnode.tag) if tag.endswith('}ID'): if xmlnode.text is None: text = '' else: text = xmlnode.text.strip() eznx.makeDataset(nxsample, 'ID', text) elif tag.endswith('}thickness'): self.field_float(xmlnode, nxsample, default_units='none') elif tag.endswith('}transmission'): self.field_float(xmlnode, nxsample, default_units='dimensionless') elif tag.endswith('}temperature'): self.field_float(xmlnode, nxsample, default_units='unknown') elif tag.endswith('}position'): self.axis_values(xmlnode, nxsample, '%s_position') elif tag.endswith('}orientation'): self.axis_values(xmlnode, nxsample) elif tag.endswith('}details'): details.append(xmlnode.text) else: self.process_unexpected_xml_element(xmlnode, nxsample) if len(details) > 0: eznx.makeDataset(nxsample, 'details', '\n'.join(details))
def process_SASsample(self, xml_parent, nx_parent): ''' process the SASsample group, should be ONLY one ''' nm = xml_parent.attrib.get('name', 'sassample') nm_clean = self.unique_name(nm, nx_parent) nxsample = eznx.makeGroup(nx_parent, nm_clean, 'NXsample', canSAS_class='SASsample', canSAS_name=nm ) self.copy_attributes(xml_parent, nx_parent) details = [] # report all *details* in a single multi-line string for xmlnode in xml_parent: tag = str(xmlnode.tag) if tag.endswith('}ID'): if xmlnode.text is None: text = '' else: text = xmlnode.text.strip() eznx.makeDataset(nxsample, 'ID', text) elif tag.endswith('}thickness'): self.field_float(xmlnode, nxsample, default_units='none') elif tag.endswith('}transmission'): self.field_float(xmlnode, nxsample, default_units='dimensionless') elif tag.endswith('}temperature'): self.field_float(xmlnode, nxsample, default_units='unknown') elif tag.endswith('}position'): self.axis_values(xmlnode, nxsample, '%s_position') elif tag.endswith('}orientation'): self.axis_values(xmlnode, nxsample) elif tag.endswith('}details'): details.append(xmlnode.text) else: self.process_unexpected_xml_element(xmlnode, nxsample) if len(details) > 0: eznx.makeDataset(nxsample, 'details', '\n'.join(details))
def process_SASdetector(self, xml_parent, nx_parent): ''' process any SASdetector groups ''' xml_node_list = xml_parent.findall('cs:SASdetector', self.ns) for i, sas_group in enumerate(xml_node_list): if isinstance(sas_group.tag, str): # avoid XML Comments nm = sas_group.attrib.get('name', 'sasdetector') nm_clean = self.unique_name(nm, nx_parent) nxdetector = eznx.makeGroup(nx_parent, nm_clean, 'NXdetector', canSAS_class='SASdetector', canSAS_name=nm) for xmlnode in sas_group: if isinstance(xmlnode.tag, str): # avoid XML Comments if xmlnode.tag.endswith('}name'): eznx.makeDataset(nxdetector, 'name', (xmlnode.text or '').strip()) elif xmlnode.tag.endswith('}SDD'): ds = self.field_float(xmlnode, nxdetector) comment = 'Distance between sample and detector' eznx.addAttributes(ds, comment=comment) elif xmlnode.tag.endswith('}offset'): self.axis_values(xmlnode, nxdetector, '%s_position') elif xmlnode.tag.endswith('}orientation'): self.axis_values(xmlnode, nxdetector) elif xmlnode.tag.endswith('}beam_center'): self.axis_values(xmlnode, nxdetector, 'beam_center_%s') elif xmlnode.tag.endswith('}pixel_size'): self.axis_values(xmlnode, nxdetector, '%s_pixel_size') elif xmlnode.tag.endswith('}slit_length'): ds = self.field_float(xmlnode, nxdetector) comment = 'Slit length of the instrument for this detector, ' comment += 'expressed in the same units as Q' eznx.addAttributes(ds, comment=comment) else: self.process_unexpected_xml_element(xmlnode, nxdetector)
def process_unexpected_xml_element(self, xml_parent, nx_parent): ''' process any unexpected XML element ''' # TODO: is it a group or a field? Assume field, at first. # Expected coverage>90% of usage. This will eventually fail. # BUT, need examples to show usage that to be handled. # If it is a group, it should be in an NXnote _field_or_group_trigger_ = xml_parent.text # If _field_or_group_trigger_ is None, then xml_parent MUST be a group? # But what about complexContent? (i.e. text content AND element content? if isinstance(xml_parent, lxml.etree._Comment): return ns, nm = ns_split(xml_parent) ds = eznx.makeDataset(nx_parent, nm, xml_parent.text, xml_namespace=ns) self.copy_attributes(xml_parent, nx_parent)
def test_create_dataset_None(self): root = eznx.makeFile('test.h5', creator='eznx', default='entry') nxentry = eznx.makeGroup(root, 'entry', 'NXentry', default='data') ds = eznx.makeDataset(nxentry, "data_is_None", None) with h5py.File("test.h5", "r") as hp: root = hp["/"] nxentry = root["entry"] self.assertTrue("data_is_None" in nxentry) ds = nxentry["data_is_None"] value = ds[()] # ds.value deprecated in h5py self.assertEqual(len(value), 0) self.assertEqual(value, "") self.assertTrue("NOTE" in ds.attrs) note = "no data supplied, value set to empty string" self.assertEqual(ds.attrs["NOTE"], note)
def save_temporary_test_data(mpl_datasets): '''save temporary test data sets''' from spec2nexus import eznx hdf5_file = os.path.join(localConfig.LOCAL_WWW_LIVEDATA_DIR, 'testdata.h5') f = eznx.makeFile(hdf5_file) for i, ds in enumerate(mpl_datasets): nxentry = eznx.makeGroup(f, 'entry_' + str(i), 'NXentry') eznx.makeDataset(nxentry, "title", ds.label) nxdata = eznx.makeGroup(nxentry, 'data', 'NXdata', signal='R', axes='Q') eznx.makeDataset(nxdata, "Q", ds.Q, units='1/A') eznx.makeDataset(nxdata, "R", ds.I, units='a.u.') f.close()
def process_SAStransmission_spectrum(self, xml_parent, nx_parent): ''' process any SAStransmission_spectrum groups These are handled similar to SASdata but with different nouns Shouldn't this be located (in NeXus) at /NXentry/NXsample/transmission? ''' nx_node_list = [] xml_node_list = xml_parent.findall('cs:SAStransmission_spectrum', self.ns) for i, sas_ts in enumerate(xml_node_list): nm = 'transmission_spectrum' if len(xml_node_list) > 1: nm += '_' + str(i) nm_clean = self.unique_name(nm, nx_parent) nxdata = eznx.makeGroup( nx_parent, nm_clean, 'NXdata', canSAS_class='SAStransmission_spectrum', ) nm = sas_ts.attrib.get('name') if nm is not None: eznx.addAttributes(nxdata, name=nm) nx_node_list.append(nxdata) # collect the data arrays data = {} units = {} for xmlnode in sas_ts: if isinstance(xmlnode.tag, str): # avoid XML Comments if str(xmlnode.tag).endswith('}Tdata'): for xmldata in xmlnode: try: tag = ns_strip(xmldata) except AttributeError as _exc: continue # an XML comment triggered this if tag not in data: data[tag] = [] units[tag] = xmldata.get('unit', 'none') data[tag].append(xmldata.text) else: self.process_unexpected_xml_element(xmlnode, nxdata) # write the data arrays nx_obj = {} for nm, arr in data.items(): try: nx_obj[nm] = eznx.makeDataset(nxdata, nm, map(float, data[nm]), units=units[nm]) except TypeError as _exc: pass # set the NeXus plottable data attributes if 'T' in data: eznx.addAttributes(nxdata, signal='T') if 'Lambda' in data: eznx.addAttributes(nxdata, axes='Lambda') # NeXus if 'Tdev' in data: eznx.addAttributes(nx_obj['T'], uncertainties='Tdev') # NeXus return nx_node_list
def process_SASdata(self, xml_parent, nx_parent): ''' process any SASdata groups ''' nx_node_list = [] xml_node_list = xml_parent.findall('cs:SASdata', self.ns) for i, sasdata in enumerate(xml_node_list): nm = 'sasdata' if len(xml_node_list) > 1: nm += '_' + str(i) nm = sasdata.attrib.get('name', nm) nm_clean = self.unique_name(nm, nx_parent) nxdata = eznx.makeGroup(nx_parent, nm_clean, 'NXdata', canSAS_class='SASdata', canSAS_name=nm) nx_node_list.append(nxdata) # collect the SAS data arrays data = {} units = {} for xmlnode in sasdata: if isinstance(xmlnode.tag, str): # avoid XML Comments if str(xmlnode.tag).endswith('}Idata'): for xmldata in xmlnode: if isinstance(xmldata.tag, str): tag = ns_strip(xmldata) if tag not in data: data[tag] = [] units[tag] = xmldata.get('unit', 'none') data[tag].append(xmldata.text) else: self.process_unexpected_xml_element(xmlnode, nxdata) # write the data arrays nx_obj = {} for nm, arr in data.items(): try: nx_obj[nm] = eznx.makeDataset(nxdata, nm, map(float, data[nm]), units=units[nm]) except TypeError as _exc: pass # set the NeXus plottable data attributes if 'I' in data: eznx.addAttributes(nxdata, signal='I') if 'Q' in data: eznx.addAttributes(nxdata, axes='Q') # NeXus if 'Idev' in data: eznx.addAttributes(nx_obj['I'], uncertainties='Idev') # NeXus if 'Qdev' in data: eznx.addAttributes(nx_obj['Q'], resolutions='Qdev') # NeXus if 'dQw' in data and 'dQl' in data: # not a common occurrence # consider: Qdev or dQw & dQl # http://cansas-org.github.io/canSAS2012/notation.html?highlight=uncertainty if 'Qdev' not in data: # canSAS1d rules say either Qdev OR (dQw, dQl), not both eznx.addAttributes(nx_obj['Q'], resolutions=['dQw', 'dQl']) return nx_node_list
def __init__(self, xml_node): self.name = xml_node.get('name') ds = eznx.makeDataset(None, self.name, 'test') pass
17.92258 56795 17.92208 29315 17.92158 6622 17.92108 1321 """ #--------------------------- tthData, countsData = zip( *[map(float, _.split()) for _ in I_v_TTH_DATA.strip().splitlines()]) f = eznx.makeFile(HDF5_FILE) # create the HDF5 NeXus file f.attrs['default'] = 'entry' nxentry = eznx.makeGroup(f, 'entry', 'NXentry', default='data') nxinstrument = eznx.makeGroup(nxentry, 'instrument', 'NXinstrument') nxdetector = eznx.makeGroup(nxinstrument, 'detector', 'NXdetector') tth = eznx.makeDataset(nxdetector, "two_theta", tthData, units='degrees') counts = eznx.makeDataset(nxdetector, "counts", countsData, units='counts') nxdata = eznx.makeGroup(nxentry, 'data', 'NXdata', signal=1, axes='two_theta', two_theta_indices=0) eznx.makeLink(nxdetector, tth, nxdata.name + '/two_theta') eznx.makeLink(nxdetector, counts, nxdata.name + '/counts') f.close() # be CERTAIN to close the file
def __init__(self, xml_node): self.name = xml_node.get("name") ds = eznx.makeDataset(None, self.name, "test") pass