def build_attribute_xml(self): root_el = ET.Element("Attributes") for index, s_type in enumerate(self.extra_attributes.value.sourceType): if s_type == SourceType.PV: dbr_type = self.extra_attributes.value.dataType[index] if dbr_type == DataType.INT: dbr_type = "DBR_LONG" elif dbr_type == DataType.DOUBLE: dbr_type = "DBR_DOUBLE" elif dbr_type == DataType.STRING: dbr_type = "DBR_STRING" else: dbr_type = dbr_type.value ET.SubElement( root_el, "Attribute", name=self.extra_attributes.value.name[index], type="EPICS_PV", dbrtype=dbr_type, description=self.extra_attributes.value.description[index], source=self.extra_attributes.value.sourceId[index], ) elif s_type == SourceType.PARAM: ET.SubElement( root_el, "Attribute", name=self.extra_attributes.value.name[index], type="PARAM", datatype=self.extra_attributes.value.dataType[index].value, description=self.extra_attributes.value.description[index], source=self.extra_attributes.value.sourceId[index], ) return et_to_string(root_el)
def _make_attributes_xml(self): # Make a root element with an NXEntry root_el = ET.Element("Attributes") ET.SubElement(root_el, "Attribute", addr="0", datatype="DOUBLE", type="PARAM", description="%s of the array" % self.statistic.name.title(), name=self.statistic_attr(), source=self.statistic.value) xml = et_to_string(root_el) return xml
def _make_attributes_xml(self): # Make a root element with an NXEntry root_el = ET.Element("Attributes") ET.SubElement( root_el, "Attribute", addr="0", datatype="DOUBLE", type="PARAM", description="Sum of the array", name="StatsTotal", source="TOTAL", ) xml = et_to_string(root_el) return xml
def _set_lut_icon(self, block_name): icon_attr = self._blocks_parts[block_name]["icon"].attr with open(os.path.join(SVG_DIR, "LUT.svg")) as f: svg_text = f.read() fnum = int(self.client.get_field(block_name, "FUNC.RAW")) invis = self._get_lut_icon_elements(fnum) root = ET.fromstring(svg_text) for i in invis: # Find the first parent which has a child with id i parent = root.find('.//*[@id=%r]/..' % i) # Find the child and remove it child = parent.find('./*[@id=%r]' % i) parent.remove(child) svg_text = et_to_string(root) icon_attr.set_value(svg_text)
def _set_lut_icon(self, block_name): icon_attr = self._blocks_parts[block_name]["icon"].attr with open(os.path.join(SVG_DIR, "LUT.svg")) as f: svg_text = f.read() fnum = int(self.client.get_field(block_name, "FUNC.RAW"), 0) invis = self._get_lut_icon_elements(fnum) # https://stackoverflow.com/a/8998773 ET.register_namespace('', "http://www.w3.org/2000/svg") root = ET.fromstring(svg_text) for i in invis: # Find the first parent which has a child with id i parent = root.find('.//*[@id=%r]/..' % i) # Find the child and remove it child = parent.find('./*[@id=%r]' % i) parent.remove(child) svg_text = et_to_string(root) icon_attr.set_value(svg_text)
def _make_xml(self, start_index): # type: (int) -> Tuple[str, int] # Make xml root root_el = ET.Element("pos_layout") dimensions_el = ET.SubElement(root_el, "dimensions") # Make an index for every hdf index for i in range(len(self.generator.dimensions)): ET.SubElement(dimensions_el, "dimension", name="d%d" % i) # Add the a file close command for the HDF writer ET.SubElement(dimensions_el, "dimension", name="FilePluginClose") # Add the actual positions positions_el = ET.SubElement(root_el, "positions") end_index = start_index + POSITIONS_PER_XML if end_index > self.steps_up_to: end_index = self.steps_up_to for i in range(start_index, end_index): point = self.generator.get_point(i) if i == self.generator.size - 1: do_close = True else: do_close = False positions = dict(FilePluginClose="%d" % do_close) for j, value in enumerate(point.indexes): positions["d%d" % j] = str(value) position_el = ET.Element("position", **positions) positions_el.append(position_el) xml = et_to_string(root_el) xml_length = len(xml) assert xml_length < XML_MAX_SIZE, "XML size %d too big" % xml_length return xml, end_index
def _make_layout_xml(self, generator, part_info): # Check that there is only one primary source of detector data primary_infos = self._get_dataset_infos(part_info, primary=True) if not primary_infos: # Still need to put the data in the file, so manufacture something primary_rank = 1 else: primary_rank = primary_infos[0].rank # Always put it in /entry/detector/detector primary_info = DatasetSourceInfo( name="detector", type="primary", rank=primary_rank) root_el = ET.Element("hdf5_layout") entry_el = ET.SubElement(root_el, "group", name="entry") ET.SubElement(entry_el, "attribute", name="NX_class", source="constant", value="NXentry", type="string") # Make an nxdata element with the detector data in it data_el = self._make_nxdata(primary_info, entry_el, generator) det_el = ET.SubElement(data_el, "dataset", name=primary_info.name, source="detector", det_default="true") ET.SubElement(det_el, "attribute", name="NX_class", source="constant", value="SDS", type="string") # Now add some additional sources of data for dataset_info in self._get_dataset_infos(part_info, primary=False): # if we are a secondary source, use the same rank as the det if dataset_info.type == "secondary": dataset_info.rank = primary_rank attr_el = self._make_nxdata( dataset_info, entry_el, generator, link=True) ET.SubElement(attr_el, "dataset", name=dataset_info.name, source="ndattribute", ndattribute=dataset_info.attr) # Add a group for attributes NDAttributes_el = ET.SubElement(entry_el, "group", name="NDAttributes", ndattr_default="true") ET.SubElement(NDAttributes_el, "attribute", name="NX_class", source="constant", value="NXcollection", type="string") xml = et_to_string(root_el) return xml
def _make_xml(self, start_index): # Make xml root root_el = ET.Element("pos_layout") dimensions_el = ET.SubElement(root_el, "dimensions") # Make an index for every hdf index for index_name in sorted(self.generator.index_names): ET.SubElement(dimensions_el, "dimension", name=index_name) # Add the a file close command for the HDF writer ET.SubElement(dimensions_el, "dimension", name="FilePluginClose") # Add the actual positions positions_el = ET.SubElement(root_el, "positions") end_index = start_index + POSITIONS_PER_XML if end_index > self.steps_up_to: end_index = self.steps_up_to for i in range(start_index, end_index): point = self.generator.get_point(i) if i == self.generator.num - 1: do_close = True else: do_close = False positions = dict(FilePluginClose="%d" % do_close) for name, value in zip(self.generator.index_names, point.indexes): positions[name] = str(value) position_el = ET.Element("position", **positions) positions_el.append(position_el) xml = et_to_string(root_el) xml_length = len(xml) assert xml_length < XML_MAX_SIZE, "XML size %d too big" % xml_length return xml, end_index
def _make_layout_xml(self, generator, part_info): # Make a root element with an NXEntry root_el = ET.Element("hdf5_layout") entry_el = ET.SubElement(root_el, "group", name="entry") ET.SubElement(entry_el, "attribute", name="NX_class", source="constant", value="NXentry", type="string") # Check that there is only one primary source of detector data ndarray_infos = NDArrayDatasetInfo.filter_values(part_info) if not ndarray_infos: # Still need to put the data in the file, so manufacture something primary_rank = 1 else: primary_rank = ndarray_infos[0].rank # Make an NXData element with the detector data in it in # /entry/detector/detector data_el = self._make_nxdata("detector", primary_rank, entry_el, generator) det_el = ET.SubElement(data_el, "dataset", name="detector", source="detector", det_default="true") ET.SubElement(det_el, "attribute", name="NX_class", source="constant", value="SDS", type="string") # Now add any calculated sources of data for dataset_info in \ CalculatedNDAttributeDatasetInfo.filter_values(part_info): # if we are a secondary source, use the same rank as the det attr_el = self._make_nxdata(dataset_info.name, primary_rank, entry_el, generator, link=True) ET.SubElement(attr_el, "dataset", name=dataset_info.name, source="ndattribute", ndattribute=dataset_info.attr) # And then any other attribute sources of data for dataset_info in NDAttributeDatasetInfo.filter_values(part_info): # if we are a secondary source, use the same rank as the det attr_el = self._make_nxdata(dataset_info.name, dataset_info.rank, entry_el, generator, link=True) ET.SubElement(attr_el, "dataset", name=dataset_info.name, source="ndattribute", ndattribute=dataset_info.attr) # Add a group for attributes NDAttributes_el = ET.SubElement(entry_el, "group", name="NDAttributes", ndattr_default="true") ET.SubElement(NDAttributes_el, "attribute", name="NX_class", source="constant", value="NXcollection", type="string") xml = et_to_string(root_el) return xml
def make_layout_xml( generator: CompoundGenerator, part_info: scanning.hooks.APartInfo, write_all_nd_attributes: bool = False, ) -> str: # Make a root element with an NXEntry root_el = ET.Element("hdf5_layout", auto_ndattr_default="false") entry_el = ET.SubElement(root_el, "group", name="entry") ET.SubElement( entry_el, "attribute", name="NX_class", source="constant", value="NXentry", type="string", ) # Check that there is only one primary source of detector data ndarray_infos: List[NDArrayDatasetInfo] = NDArrayDatasetInfo.filter_values( part_info) if not ndarray_infos: # Still need to put the data in the file, so manufacture something primary_rank = 2 else: primary_rank = ndarray_infos[0].rank # Make an NXData element with the detector data in it in # /entry/detector/detector data_el = make_nxdata("detector", primary_rank, entry_el, generator) det_el = ET.SubElement(data_el, "dataset", name="detector", source="detector", det_default="true") ET.SubElement( det_el, "attribute", name="NX_class", source="constant", value="SDS", type="string", ) # Now add any calculated sources of data calc_dataset_infos: List[ CalculatedNDAttributeDatasetInfo] = CalculatedNDAttributeDatasetInfo.filter_values( part_info) for calc_dataset_info in calc_dataset_infos: # if we are a secondary source, use the same rank as the det attr_el = make_nxdata(calc_dataset_info.name, primary_rank, entry_el, generator, link=True) ET.SubElement( attr_el, "dataset", name=calc_dataset_info.name, source="ndattribute", ndattribute=calc_dataset_info.attr, ) # And then any other attribute sources of data dataset_infos: List[ NDAttributeDatasetInfo] = NDAttributeDatasetInfo.filter_values( part_info) for dataset_info in dataset_infos: # if we are a secondary source, use the same rank as the det attr_el = make_nxdata(dataset_info.name, primary_rank, entry_el, generator, link=True) ET.SubElement( attr_el, "dataset", name=dataset_info.name, source="ndattribute", ndattribute=dataset_info.attr, ) # Add a group for attributes ndattr_default = "true" if write_all_nd_attributes else "false" nd_attributes_el = ET.SubElement(entry_el, "group", name="NDAttributes", ndattr_default=ndattr_default) ET.SubElement( nd_attributes_el, "attribute", name="NX_class", source="constant", value="NXcollection", type="string", ) ET.SubElement( nd_attributes_el, "dataset", name="NDArrayUniqueId", source="ndattribute", ndattribute="NDArrayUniqueId", ) ET.SubElement( nd_attributes_el, "dataset", name="NDArrayTimeStamp", source="ndattribute", ndattribute="NDArrayTimeStamp", ) xml = et_to_string(root_el) return xml
def __str__(self): return et_to_string(self.root)