def add_incremental_group(self, group_type, where, title='', settings_as_xml='', metadata=dict([])): """ Add a node in the h5 file tree of the group type with an increment in the given name Parameters ---------- group_type: (str) one of the possible values of **group_types** where: (str or node) parent node where to create the new group settings_as_xml: (str) XML string containing Parameters representation (see custom_Tree) metadata: (dict) extra metadata to be saved with this new group node Returns ------- (node): newly created group node """ if group_type not in group_types: raise Exception('Invalid group type') nodes = list(self.h5_file.get_node(where)._v_children.keys()) nodes_tmp = [] for node in nodes: if utils.capitalize(group_type) in node: nodes_tmp.append(node) nodes_tmp.sort() if len(nodes_tmp) ==0: ind_group = -1 else: ind_group = int(nodes_tmp[-1][-3:]) group = self.get_set_group(where, utils.capitalize(group_type)+'{:03d}'.format(ind_group + 1), title) group._v_attrs['settings'] = settings_as_xml if group_type.lower() != 'ch': group._v_attrs['type'] = group_type.lower() else: group._v_attrs['type'] = '' for metadat in metadata: group._v_attrs[metadat] = metadata[metadat] return group
def add_group(self, group_name, group_type, where, title='', settings_as_xml='', metadata=dict([])): """ Add a node in the h5 file tree of the group type Parameters ---------- group_name: (str) a custom name for this group group_type: (str) one of the possible values of **group_types** where: (str or node) parent node where to create the new group settings_as_xml: (str) XML string containing Parameters representation (see custom_Tree) metadata: (dict) extra metadata to be saved with this new group node Returns ------- (node): newly created group node """ if group_type not in group_types: raise Exception('Invalid group type') try: node = self.h5_file.get_node(where, utils.capitalize(group_name)) except tables.NoSuchNodeError as e: node = None if node is None: node = self.get_set_group(where, utils.capitalize(group_name), title) node._v_attrs['settings'] = settings_as_xml node._v_attrs['type'] = group_type.lower() for metadat in metadata: node._v_attrs[metadat] = metadata[metadat] return node
def add_array(self, where, name, data_type, data_shape=(1, ), data_dimension='0D', scan_type='', scan_shape=[], title='', array_to_save=None, array_type=np.float, enlargeable=False, metadata=dict([]), init=False, add_scan_dim=False): if data_dimension not in data_dimensions: raise Exception('Invalid data dimension') if data_type not in data_types: raise Exception('Invalid data type') if scan_type != '': scan_type = utils.uncapitalize(scan_type) if scan_type not in scan_types: raise Exception('Invalid scan type') if enlargeable: shape = [0] if data_shape != (1, ): shape.extend(data_shape) shape = tuple(shape) array = self.h5_file.create_earray(where, utils.capitalize(name), tables.Atom.from_dtype( np.dtype(array_type)), shape=shape, title=title, filters=self.filters) array._v_attrs['shape'] = shape else: if add_scan_dim: #means it is an array initialization to zero shape = scan_shape[:] shape.extend(data_shape) if init or array_to_save is None: array_to_save = np.zeros(shape) array = self.h5_file.create_carray(where, utils.capitalize(name), obj=array_to_save, title=title, filters=self.filters) array._v_attrs['shape'] = array_to_save.shape array._v_attrs['type'] = data_type array._v_attrs['data_dimension'] = data_dimension array._v_attrs['scan_type'] = scan_type for metadat in metadata: array._v_attrs[metadat] = metadata[metadat] return array
def test_hierarchy(self, get_h5saver_scan, tmp_path): h5saver = get_h5saver_scan base_path = tmp_path h5saver.settings.child(('base_path')).setValue(base_path) h5saver.init_file(update_h5=True) scan_group = h5saver.add_scan_group() h5saver.add_det_group(scan_group) h5saver.add_det_group(scan_group) move_group = h5saver.add_move_group(scan_group) assert h5saver.get_node_path( move_group) == f'/Raw_datas/Scan000/Move000' det_group = h5saver.add_det_group(scan_group) for data_type in group_data_types: data_group = h5saver.add_data_group(det_group, data_type) assert h5saver.get_node_name(data_group) == utils.capitalize( data_type) CH_group0 = h5saver.add_CH_group(data_group) assert h5saver.get_node_path( CH_group0 ) == f'/Raw_datas/Scan000/Detector002/{utils.capitalize(data_type)}/Ch000' CH_group1 = h5saver.add_CH_group(data_group) assert h5saver.get_node_path( CH_group1 ) == f'/Raw_datas/Scan000/Detector002/{utils.capitalize(data_type)}/Ch001' live_group = h5saver.add_live_scan_group(scan_group, '0D') assert h5saver.get_node_path( live_group) == f'/Raw_datas/Scan000/Live_scan_0D'
def do_save_continuous(self, datas): """ method used to perform continuous saving of data, for instance for logging. Will save datas as a function of time in a h5 file set when *continuous_saving* parameter as been set. Parameters ---------- datas: list of OrderedDict as exported by detector plugins """ try: det_name = datas['name'] if self.logger_type == 'h5saver': det_group = self.data_logger.get_group_by_title(self.data_logger.raw_group, det_name) time_array = self.data_logger.get_node(det_group, 'Logger_time_axis') time_array.append(np.array([datas['acq_time_s']])) data_types = ['data0D', 'data1D'] if self.data_logger.settings.child(('save_2D')).value(): data_types.extend(['data2D', 'dataND']) for data_type in data_types: if data_type in datas.keys() and len(datas[data_type]) != 0: if not self.data_logger.is_node_in_group(det_group, data_type): data_group = self.data_logger.add_data_group(det_group, data_type, metadata=dict(type='scan')) else: data_group = self.data_logger.get_node(det_group, utils.capitalize(data_type)) for ind_channel, channel in enumerate(datas[data_type]): channel_group = self.data_logger.get_group_by_title(data_group, channel) if channel_group is None: channel_group = self.data_logger.add_CH_group(data_group, title=channel) data_array = self.data_logger.add_data(channel_group, datas[data_type][channel], scan_type='scan1D', enlargeable=True) else: data_array = self.data_logger.get_node(channel_group, 'Data') if data_type == 'data0D': data_array.append(np.array([datas[data_type][channel]['data']])) else: data_array.append(datas[data_type][channel]['data']) self.data_logger.h5_file.flush() elif self.logger_type == 'dblogger': self.data_logger.add_datas(datas) self.data_logger.settings.child(('N_saved')).setValue( self.data_logger.settings.child(('N_saved')).value() + 1) except Exception as e: logger.exception(str(e))
def test_capitalize(self): string = 'abcdef' assert utils.capitalize(string) == 'Abcdef' assert utils.capitalize(string, 3) == 'ABCdef'
def settings_changed(self, param, changes): """ Check for changes in the given (parameter,change,information) tuple list. In case of value changed, update the DAQscan_settings tree consequently. =============== ============================================ ============================== **Parameters** **Type** **Description** *param* instance of pyqtgraph parameter the parameter to be checked *changes* (parameter,change,information) tuple list the current changes state =============== ============================================ ============================== """ for param, change, data in changes: path = self.settings.childPath(param) if path is not None: childName = '.'.join(path) else: childName = param.name() if change == 'childAdded': pass elif change == 'value': if param.parent().name() == 'scans': if data['checked']: try: if 'Scan' in param.name(): h5module = self.h5module nodes = [ node for node in h5module.walk_nodes( data['path']) ] else: h5module = self.h5module_image nodes = [h5module.get_node(data['path'])] ind = 0 for node in nodes: flag = False if 'type' in node.attrs.attrs_name and 'data_dimension' in node.attrs.attrs_name: if 'scan_type' in node.attrs.attrs_name: if node.attrs[ 'scan_type'] == 'scan2D' and node.attrs[ 'data_dimension'] == '0D': #2d scan of 0D data flag = True elif node.attrs[ 'scan_type'] == '' and node.attrs[ 'data_dimension'] == '2D': #image data (2D) with no scan flag = True if flag: isadaptive = 'adaptive' in node.attrs[ 'scan_subtype'].lower() if isadaptive: im = TriangulationItem() else: im = ImageItem() im.setOpacity(1) # im.setOpts(axisOrder='row-major') self.viewer.image_widget.plotitem.addItem( im) im.setCompositionMode( QtGui.QPainter.CompositionMode_Plus) if 'Scan' in param.name(): if isadaptive: x_axis = h5module.get_node( h5module.get_node( data['path']).parent_node, utils.capitalize( 'scan_x_axis')).read() y_axis = h5module.get_node( h5module.get_node( data['path']).parent_node, utils.capitalize( 'scan_y_axis')).read() else: x_axis = np.unique( h5module.get_node( h5module.get_node( data['path']). parent_node, utils.capitalize( 'scan_x_axis')).read()) y_axis = np.unique( h5module.get_node( h5module.get_node( data['path']). parent_node, utils.capitalize( 'scan_y_axis')).read()) else: x_axis = np.unique( h5module.get_node( h5module.get_node( data['path']).parent_node, utils.capitalize( 'x_axis')).read()) y_axis = np.unique( h5module.get_node( h5module.get_node( data['path']).parent_node, utils.capitalize( 'y_axis')).read()) if not isadaptive: rect = QtCore.QRectF( np.min(x_axis), np.min(y_axis), (np.max(x_axis) - np.min(x_axis)), (np.max(y_axis) - np.min(y_axis))) im.setOpts(rescale=rect) im.setImage(node.read()) else: im.setImage( np.vstack((x_axis, y_axis, node.read())).T) if ind == 0: #im.setLookupTable(colors_red) self.viewer.histogram_red.setImageItem( im) if not self.viewer.histogram_red.isVisible( ): self.viewer.histogram_red.setVisible( True) elif ind == 1: #im.setLookupTable(colors_green) self.viewer.histogram_green.setImageItem( im) if not self.viewer.histogram_green.isVisible( ): self.viewer.histogram_green.setVisible( True) else: #im.setLookupTable(colors_blue) self.viewer.histogram_blue.setImageItem( im) if not self.viewer.histogram_blue.isVisible( ): self.viewer.histogram_blue.setVisible( True) self.overlays.append( dict(name='{:s}_{:03d}'.format( param.name(), ind), image=im)) ind += 1 #self.viewer.image_widget.view.autoRange() except Exception as e: logger.exception(str(e)) else: for overlay in self.overlays[:]: if param.name() in overlay['name']: ind = self.overlays.index(overlay) self.viewer.image_widget.plotitem.removeItem( overlay['image']) self.overlays.pop(ind) elif change == 'parent': for overlay in self.overlays[:]: if param.name() in overlay['name']: ind = self.overlays.index(overlay) self.viewer.image_widget.plotitem.removeItem( overlay['image']) self.overlays.pop(ind)
def show_h5_data(self, item, col): """ """ try: self.current_node_path = item.text(2) self.show_h5_attributes(item, col) node = self.h5file.get_node(item.text(2)) self.data_node_signal.emit(node._v_pathname) if 'ARRAY' in node._v_attrs['CLASS']: data = node.read() nav_axes = [] axes = dict([]) x_axis = None y_axis = None nav_x_axis = None nav_y_axis = None if isinstance(data, np.ndarray): data = np.squeeze(data) if 'type' in node._v_attrs: #was the case for older version of pymodaq files if 'data' in node._v_attrs[ 'type'] or 'channel' in node._v_attrs[ 'type'].lower(): parent_path = node._v_parent._v_pathname children = list(node._v_parent._v_children) if 'data_dimension' not in node._v_attrs: #for backcompatibility data_dim = node._v_attrs['data_type'] else: data_dim = node._v_attrs['data_dimension'] tmp_axes = ['x_axis', 'y_axis'] for ax in tmp_axes: if capitalize(ax) in children: axis_node = self.h5file.get_node( parent_path + '/{:s}'.format(capitalize(ax))) axes[ax] = dict(data=axis_node.read()) if 'units' in axis_node._v_attrs: axes[ax]['units'] = axis_node._v_attrs[ 'units'] if 'label' in axis_node._v_attrs: axes[ax]['label'] = axis_node._v_attrs[ 'label'] if 'scan_type' in node._v_attrs: scan_type = node._v_attrs['scan_type'].lower() if scan_type == 'scan1d' or scan_type == 'scan2d': scan_path = node._v_parent._v_parent._v_parent._v_parent._v_pathname children = list( node._v_parent._v_parent._v_parent. _v_parent._v_children) tmp_nav_axes = ['x_axis', 'y_axis'] if scan_type == 'scan1d' or scan_type == 'scan2d': nav_axes = [] for ind_ax, ax in enumerate( tmp_nav_axes): if 'Scan_{:s}'.format( ax) in children: nav_axes.append(ind_ax) axis_node = self.h5file.get_node( scan_path + '/Scan_{:s}'.format(ax)) axes['nav_{:s}'.format( ax)] = dict(data=np.unique( axis_node.read())) if axes['nav_{:s}'.format( ax )]['data'].shape[0] != data.shape[ ind_ax]: #could happen in case of linear back to start type of scan tmp_ax = [] for ix in axes[ 'nav_{:s}'.format( ax)]['data']: tmp_ax.extend([ix, ix]) axes['nav_{:s}'.format( ax)] = dict( data=np.array( tmp_ax)) if 'units' in axis_node._v_attrs: axes['nav_{:s}'.format( ax )]['units'] = axis_node._v_attrs[ 'units'] if 'label' in axis_node._v_attrs: axes['nav_{:s}'.format( ax )]['label'] = axis_node._v_attrs[ 'label'] self.hyperviewer.show_data(data, nav_axes=nav_axes, **axes) elif isinstance(data, list): if isinstance(data[0], str): self.ui.text_list.clear() for txt in node.read(): self.ui.text_list.addItem(txt) except Exception as e: self.status_signal.emit(getLineInfo() + str(e))