示例#1
0
class PluginLoader(object):

    extension = ".py"

    def __init__(self, filepath):
        if not _os.path.isfile(filepath):
            raise ValueError(
                "PluginLoader expects a single filename. '%s' does not point to an existing file"
                % filepath)
        if not filepath.endswith(self.extension):
            raise ValueError(
                "PluginLoader expects a filename ending with .py. '%s' does not have a .py extension"
                % filepath)
        self._filepath = filepath
        self._logger = Logger("PluginLoader")

    def run(self):
        """
            Try and load the module we are pointing at and return
            the module object.

            Any ImportErrors raised are not caught and are passed
            on to the caller
        """
        pathname = self._filepath
        name = _os.path.basename(pathname)  # Including extension
        name = _os.path.splitext(name)[0]
        self._logger.debug("Loading python plugin %s" % pathname)
        return _imp.load_source(name, pathname)
示例#2
0
文件: plugins.py 项目: liyulun/mantid
class PluginLoader(object):

    extension = ".py"

    def __init__(self, filepath):
        if not _os.path.isfile(filepath):
            raise ValueError("PluginLoader expects a single filename. '%s' does not point to an existing file" % filepath)
        if not filepath.endswith(self.extension):
            raise ValueError("PluginLoader expects a filename ending with .py. '%s' does not have a .py extension" % filepath)
        self._filepath = filepath
        self._logger = Logger("PluginLoader")

    def run(self):
        """
            Try and load the module we are pointing at and return
            the module object.

            Any ImportErrors raised are not caught and are passed
            on to the caller
        """
        pathname = self._filepath
        name = _os.path.basename(pathname) # Including extension
        name = _os.path.splitext(name)[0]
        self._logger.debug("Loading python plugin %s" % pathname)
        return _imp.load_source(name, pathname)
示例#3
0
    def test_unicode_logger(self):
        logger = Logger("LoggerTest")
        self.assertTrue(isinstance(logger, Logger))
        for att in ['fatal', 'error', 'warning', 'notice', 'information', 'debug']:
            if not hasattr(logger, att):
                self.fail("Logger object does not have the required attribute '%s'" % att)

        logger.fatal('This is a test')
        logger.error('This is a test')
        logger.warning('This is a test')
        logger.notice('This is a test')
        logger.information('This is a test')
        logger.debug('This is a test')
示例#4
0
    def test_unicode_logger(self):
        logger = Logger("LoggerTest")
        self.assertTrue(isinstance(logger, Logger))
        for att in ['fatal', 'error', 'warning', 'notice', 'information', 'debug']:
            if not hasattr(logger, att):
                self.fail("Logger object does not have the required attribute '%s'" % att)

        logger.fatal('This is a test')
        logger.error('This is a test')
        logger.warning('This is a test')
        logger.notice('This is a test')
        logger.information('This is a test')
        logger.debug('This is a test')
示例#5
0
    def test_unicode_logger(self):
        logger = Logger("LoggerTest")
        self.assertTrue(isinstance(logger, Logger))
        for att in [
                'fatal', 'error', 'warning', 'notice', 'information', 'debug',
                'flush', 'purge', 'accumulate', 'flushDebug',
                'flushInformation', 'flushNotice', 'flushWarning',
                'flushError', 'flushFatal'
        ]:
            if not hasattr(logger, att):
                self.fail(
                    "Logger object does not have the required attribute '%s'" %
                    att)

        logger.fatal('This is a test')
        logger.error('This is a test')
        logger.warning('This is a test')
        logger.notice('This is a test')
        logger.information('This is a test')
        logger.debug('This is a test')
        logger.purge()
        logger.accumulate('one')
        logger.accumulate('two')
        logger.flush()
        logger.accumulate('three')
        logger.flushDebug()
        logger.accumulate('four')
        logger.flushInformation()
        logger.accumulate('five')
        logger.flushNotice()
        logger.accumulate('six')
        logger.flushWarning()
        logger.accumulate('seven')
        logger.flushError()
        logger.accumulate('eight')
        logger.flushFatal()
        logger.purge()
示例#6
0
class Splitter:
    r"""
    Time splitter

    Parameters
    ----------
    runObj: ~mantid.
    """
    def __init__(self, runObj):
        self._log = Logger(__name__)

        # verify the scan index exists
        try:
            if runObj['scan_index'].size() == 0:
                raise RuntimeError('"scan_index" is empty')
        except KeyError as e:
            raise RuntimeError('"scan_index" does not exist') from e

        # Get the time and value from the run object
        scan_index_times = runObj['scan_index'].times  # absolute times
        scan_index_value = runObj['scan_index'].value
        # TODO add final time from pcharge logs + 1s with scan_index=0

        if np.unique(scan_index_value).size == 1:
            raise RuntimeError('WARNING: only one scan_index value'
                               )  # TODO should be something else

        self.times = None
        self.subruns = None
        self.propertyFilters = list()

        self.__generate_sub_run_splitter(scan_index_times, scan_index_value)
        self.__correct_starting_scan_index_time(runObj)
        self._createPropertyFilters()

    def __generate_sub_run_splitter(self, scan_index_times,
                                    scan_index_value) -> None:
        """Generate event splitters according to sub runs

        """
        # Init
        sub_run_time_list = list()
        sub_run_value_list = list()
        num_scan_index = scan_index_times.shape[0]

        # Loop through all scan indexes to get the correct splitters
        curr_sub_run = 0
        for i_scan in range(num_scan_index):
            if scan_index_value[i_scan] != curr_sub_run:
                #  New run no same as old one: There will be some change!
                if curr_sub_run > 0:
                    # previous run shall be saved: it is ending: record the ending time/current time
                    sub_run_time_list.append(scan_index_times[i_scan])

                if scan_index_value[i_scan] > 0:
                    # new scan index is valid: a new run shall start: record the starting time and sub run value
                    sub_run_time_list.append(scan_index_times[i_scan])
                    sub_run_value_list.append(scan_index_value[i_scan])

                # Update the curr_sub_run
                curr_sub_run = scan_index_value[i_scan]

                # Note: there is one scenario to append 2 and same time stamp: scan index change from i to j, where
                # both i and j are larger than 0
            # END-IF
        # END-FOR

        # Check the ending
        if curr_sub_run > 0:
            # In case the stop (scan_index = 0) is not recorded - add end time one day into the future
            sub_run_time_list.append(sub_run_time_list[-1] +
                                     np.timedelta64(1, 'D'))

        # Convert from list to array
        self.times = np.array(sub_run_time_list)
        self.subruns = np.array(sub_run_value_list)

        # Sanity check
        if self.times.shape[0] % 2 == 1 or self.times.shape[0] == 0:
            raise RuntimeError(
                'Algorithm error: Failed to parse\nTime: {}\nValue: {}.\n'
                'Current resulted time ({}) is incorrect as odd/even'
                ''.format(scan_index_times, scan_index_value, self.times))

        if self.times.shape[0] != self.subruns.shape[0] * 2:
            raise RuntimeError(
                'Sub run number {} and sub run times {} do not match (as twice)'
                ''.format(self.subruns, self.times))

    def __correct_starting_scan_index_time(self,
                                           runObj,
                                           abs_tolerance: float = 0.05
                                           ) -> None:
        """Correct the DAS-issue for mis-record the first scan_index/sub run before the motor is in position

        This goes through a subset of logs and compares when they actually
        get to their specified setpoint, updating the start time for
        event filtering. When this is done ``self._starttime`` will have been updated.

        Parameters
        ----------
        start_time: numpy.datetime64
            The start time according to the scan_index log
        abs_tolerance: float
            When then log is within this absolute tolerance of the setpoint, it is correct

        Returns
        -------
        float
            Corrected value or None

        """
        start_time = self.times[0]
        # loop through the 'special' logs
        for log_name in ['sx', 'sy', 'sz', '2theta', 'omega', 'chi', 'phi']:
            if log_name not in runObj:
                continue  # log doesn't exist - not a good one to look at
            if log_name + 'Setpoint' not in runObj:
                continue  # log doesn't have a setpoint - not a good one to look at
            if runObj[log_name].size() == 1:
                continue  # there is only one value

            # get the observed values of the log
            observed = runObj[log_name].value
            if observed.std() <= .5 * abs_tolerance:
                continue  # don't bother if the log is constant within half of the tolerance

            # look for the setpoint and find when the log first got there
            # only look at first setpoint
            set_point = runObj[log_name + 'Setpoint'].value[0]
            for log_time, value in zip(runObj[log_name].times, observed):
                if abs(value - set_point) < abs_tolerance:
                    # pick the larger of what was found and the previous largest value
                    if log_time > start_time:
                        start_time = log_time
                    break

        self._log.debug('Shift from start_time {} to {}'.format(
            np.datetime_as_string(self.times[0]),
            np.datetime_as_string(start_time)))
        self.times[0] = start_time

    @property
    def durations(self):
        return (self.times[1::2] - self.times[::2]) / np.timedelta64(1, 's')

    @property
    def size(self) -> int:
        return self.subruns.size

    def _createPropertyFilters(self) -> None:
        self.propertyFilters = list()
        if self.subruns.size == 1:
            self.propertyFilters.append(None)
        else:
            for subrun_index in range(self.subruns.size):
                subrun_start_time = self.times[2 * subrun_index]
                subrun_stop_time = self.times[2 * subrun_index + 1]

                # create a Boolean time series property as the filter
                time_filter = BoolTimeSeriesProperty('filter')
                time_filter.addValue(subrun_start_time, True)
                time_filter.addValue(subrun_stop_time, False)

                self.propertyFilters.append(time_filter)
示例#7
0
class SliceViewer(ObservingPresenter, SliceViewerBasePresenter):
    TEMPORARY_STATUS_TIMEOUT = 2000

    def __init__(self, ws, parent=None, window_flags=Qt.Window, model=None, view=None, conf=None):
        """
        Create a presenter for controlling the slice display for a workspace
        :param ws: Workspace containing data to display and slice
        :param parent: An optional parent widget
        :param window_flags: An optional set of window flags
        :param model: A model to define slicing operations. If None uses SliceViewerModel
        :param view: A view to display the operations. If None uses SliceViewerView
        """
        model: SliceViewerModel = model if model else SliceViewerModel(ws)
        self.view = view if view else SliceViewerView(self,
                                                      Dimensions.get_dimensions_info(ws),
                                                      model.can_normalize_workspace(), parent,
                                                      window_flags, conf)
        super().__init__(ws, self.view.data_view, model)

        self._logger = Logger("SliceViewer")
        self._peaks_presenter: PeaksViewerCollectionPresenter = None
        self._cutviewer_presenter = None
        self.conf = conf

        # Acts as a 'time capsule' to the properties of the model at this
        # point in the execution. By the time the ADS observer calls self.replace_workspace,
        # the workspace associated with self.model has already been changed.
        self.initial_model_properties = model.get_properties()
        self._new_plot_method, self.update_plot_data = self._decide_plot_update_methods()

        self.view.setWindowTitle(self.model.get_title())
        self.view.data_view.create_axes_orthogonal(
            redraw_on_zoom=not WorkspaceInfo.can_support_dynamic_rebinning(self.model.ws))

        if self.model.can_normalize_workspace():
            self.view.data_view.set_normalization(ws)
            self.view.data_view.norm_opts.currentTextChanged.connect(self.normalization_changed)
        if not self.model.can_support_peaks_overlays():
            self.view.data_view.disable_tool_button(ToolItemText.OVERLAY_PEAKS)
        # check whether to enable non-orthog view
        # don't know whether can always assume init with display indices (0,1) - so get sliceinfo
        sliceinfo = self.get_sliceinfo()
        if not sliceinfo.can_support_nonorthogonal_axes():
            self.view.data_view.disable_tool_button(ToolItemText.NONORTHOGONAL_AXES)
        if not self.model.can_support_non_axis_cuts():
            self.view.data_view.disable_tool_button(ToolItemText.NONAXISALIGNEDCUTS)

        self.view.data_view.help_button.clicked.connect(self.action_open_help_window)

        self.refresh_view()

        # Start the GUI with zoom selected.
        self.view.data_view.activate_tool(ToolItemText.ZOOM)

        self.ads_observer = SliceViewerADSObserver(self.replace_workspace, self.rename_workspace,
                                                   self.ADS_cleared, self.delete_workspace)

        # simulate clicking on the home button, which will force all signal and slot connections
        # properly set.
        # NOTE: Some part of the connections are not set in the correct, resulting in a strange behavior
        #       where the colorbar and view is not updated with switch between different scales.
        #       This is a ducktape fix and should be revisited once we have a better way to do this.
        # NOTE: This workaround solve the problem, but it leads to a failure in
        #       projectroot.qt.python.mantidqt_qt5.test_sliceviewer_presenter.test_sliceviewer_presenter
        #       Given that this issue is not of high priority, we are leaving it as is for now.
        # self.show_all_data_clicked()

    def new_plot(self, *args, **kwargs):
        self._new_plot_method(*args, **kwargs)

    def new_plot_MDH(self, dimensions_transposing=False, dimensions_changing=False):
        """
        Tell the view to display a new plot of an MDHistoWorkspace
        """
        data_view = self.view.data_view
        limits = data_view.get_axes_limits()

        if limits is None or not WorkspaceInfo.can_support_dynamic_rebinning(self.model.ws):
            data_view.plot_MDH(self.model.get_ws(), slicepoint=self.get_slicepoint())
            self._call_peaks_presenter_if_created("notify", PeaksViewerPresenter.Event.OverlayPeaks)
        else:
            self.new_plot_MDE(dimensions_transposing, dimensions_changing)

    def new_plot_MDE(self, dimensions_transposing=False, dimensions_changing=False):
        """
        Tell the view to display a new plot of an MDEventWorkspace
        """
        data_view = self.view.data_view
        limits = data_view.get_axes_limits()

        # The value at the i'th index of this tells us that the axis with that value (0 or 1) will display dimension i
        dimension_indices = self.view.dimensions.get_states()

        if dimensions_transposing:
            # Since the dimensions are transposing, the limits we have from the view are the wrong way around
            # with respect to the axes the dimensions are about to be displayed, so get the previous dimension states.
            dimension_indices = self.view.dimensions.get_previous_states()
        elif dimensions_changing:
            # If we are changing which dimensions are to be displayed, the limits we got from the view are stale
            # as they refer to the previous two dimensions that were displayed.
            limits = None

        data_view.plot_MDH(
            self.model.get_ws_MDE(slicepoint=self.get_slicepoint(),
                                  bin_params=data_view.dimensions.get_bin_params(),
                                  limits=limits,
                                  dimension_indices=dimension_indices))
        self._call_peaks_presenter_if_created("notify", PeaksViewerPresenter.Event.OverlayPeaks)

    def update_plot_data_MDH(self):
        """
        Update the view to display an updated MDHistoWorkspace slice/cut
        """
        self.view.data_view.update_plot_data(
            self.model.get_data(self.get_slicepoint(),
                                transpose=self.view.data_view.dimensions.transpose))

    def update_plot_data_MDE(self):
        """
        Update the view to display an updated MDEventWorkspace slice/cut
        """
        data_view = self.view.data_view
        data_view.update_plot_data(
            self.model.get_data(self.get_slicepoint(),
                                bin_params=data_view.dimensions.get_bin_params(),
                                dimension_indices=data_view.dimensions.get_states(),
                                limits=data_view.get_axes_limits(),
                                transpose=self.view.data_view.dimensions.transpose))

    def update_plot_data_matrix(self):
        # should never be called, since this workspace type is only 2D the plot dimensions never change
        pass

    def get_frame(self) -> SpecialCoordinateSystem:
        """Returns frame of workspace - require access for adding a peak in peaksviewer"""
        return self.model.get_frame()

    def get_sliceinfo(self, force_nonortho_mode: bool = False):
        """
        :param force_nonortho_mode: if True then don't use orthogonal angles even if non_ortho mode == False - this
            is necessary because when non-ortho view is toggled the data_view is not updated at the point a new
            SliceInfo is created
        :return: a SliceInfo object describing the current slice and transform (which by default will be orthogonal
                 if non-ortho mode is False)
        """
        dimensions = self.view.data_view.dimensions
        non_ortho_mode = True if force_nonortho_mode else self.view.data_view.nonorthogonal_mode
        axes_angles = self.model.get_axes_angles(force_orthogonal=not non_ortho_mode)  # None if can't support transform
        return SliceInfo(point=dimensions.get_slicepoint(),
                         transpose=dimensions.transpose,
                         range=dimensions.get_slicerange(),
                         qflags=dimensions.qflags,
                         axes_angles=axes_angles)

    def get_proj_matrix(self):
        return self.model.get_proj_matrix()

    def get_axes_limits(self):
        return self.view.data_view.get_axes_limits()

    def dimensions_changed(self):
        """Indicates that the dimensions have changed"""
        data_view = self._data_view
        sliceinfo = self.get_sliceinfo()
        if data_view.nonorthogonal_mode:
            if sliceinfo.can_support_nonorthogonal_axes():
                # axes need to be recreated to have the correct transform associated
                data_view.create_axes_nonorthogonal(sliceinfo.get_northogonal_transform())
            else:
                data_view.disable_tool_button(ToolItemText.NONORTHOGONAL_AXES)
                data_view.create_axes_orthogonal()
        else:
            if sliceinfo.can_support_nonorthogonal_axes():
                data_view.enable_tool_button(ToolItemText.NONORTHOGONAL_AXES)
            else:
                data_view.disable_tool_button(ToolItemText.NONORTHOGONAL_AXES)

        ws_type = WorkspaceInfo.get_ws_type(self.model.ws)
        if ws_type == WS_TYPE.MDH or ws_type == WS_TYPE.MDE:
            if self.model.get_number_dimensions() > 2 and \
                    sliceinfo.slicepoint[data_view.dimensions.get_previous_states().index(None)] is None:
                # The dimension of the slicepoint has changed
                self.new_plot(dimensions_changing=True)
            else:
                self.new_plot(dimensions_transposing=True)
        else:
            self.new_plot()
        self._call_cutviewer_presenter_if_created("on_dimension_changed")

    def slicepoint_changed(self):
        """Indicates the slicepoint has been updated"""
        self._call_peaks_presenter_if_created("notify",
                                              PeaksViewerPresenter.Event.SlicePointChanged)
        self._call_cutviewer_presenter_if_created("on_slicepoint_changed")
        self.update_plot_data()

    def export_roi(self, limits):
        """Notify that an roi has been selected for export to a workspace
        :param limits: 2-tuple of ((left, right), (bottom, top)). These are in display order
        """
        data_view = self.view.data_view

        try:
            self._show_status_message(
                self.model.export_roi_to_workspace(self.get_slicepoint(),
                                                   bin_params=data_view.dimensions.get_bin_params(),
                                                   limits=limits,
                                                   transpose=data_view.dimensions.transpose,
                                                   dimension_indices=data_view.dimensions.get_states()))
        except Exception as exc:
            self._logger.error(str(exc))
            self._show_status_message("Error exporting ROI")

    def export_cut(self, limits, cut_type):
        """Notify that an roi has been selected for export to a workspace
        :param limits: 2-tuple of ((left, right), (bottom, top)). These are in display order
        and could be transposed w.r.t to the data
        :param cut: A string indicating the required cut type
        """
        data_view = self.view.data_view

        try:
            self._show_status_message(
                self.model.export_cuts_to_workspace(
                    self.get_slicepoint(),
                    bin_params=data_view.dimensions.get_bin_params(),
                    limits=limits,
                    transpose=data_view.dimensions.transpose,
                    dimension_indices=data_view.dimensions.get_states(),
                    cut=cut_type))
        except Exception as exc:
            self._logger.error(str(exc))
            self._show_status_message("Error exporting roi cut")

    def export_pixel_cut(self, pos, axis):
        """Notify a single pixel line plot has been requested from the
        given position in data coordinates.
        :param pos: Position on the image
        :param axis: String indicating the axis the position relates to: 'x' or 'y'
        """
        data_view = self.view.data_view

        try:
            self._show_status_message(
                self.model.export_pixel_cut_to_workspace(
                    self.get_slicepoint(),
                    bin_params=data_view.dimensions.get_bin_params(),
                    pos=pos,
                    transpose=data_view.dimensions.transpose,
                    axis=axis))
        except Exception as exc:
            self._logger.error(str(exc))
            self._show_status_message("Error exporting single-pixel cut")

    def perform_non_axis_aligned_cut(self, vectors, extents, nbins):
        try:
            wscut_name = self.model.perform_non_axis_aligned_cut_to_workspace(vectors, extents, nbins)
            self._call_cutviewer_presenter_if_created('on_cut_done', wscut_name)
        except Exception as exc:
            self._logger.error(str(exc))
            self._show_status_message("Error exporting single-pixel cut")

    def nonorthogonal_axes(self, state: bool):
        """
        Toggle non-orthogonal axes on current view
        :param state: If true a request is being made to turn them on, else they should be turned off
        """
        data_view = self.view.data_view
        if state:
            data_view.deactivate_and_disable_tool(ToolItemText.REGIONSELECTION)
            data_view.disable_tool_button(ToolItemText.NONAXISALIGNEDCUTS)
            data_view.disable_tool_button(ToolItemText.LINEPLOTS)
            # set transform from sliceinfo but ignore view as non-ortho state not set yet
            data_view.create_axes_nonorthogonal(self.get_sliceinfo(force_nonortho_mode=True).get_northogonal_transform())
            self.show_all_data_clicked()
        else:
            data_view.create_axes_orthogonal()
            data_view.enable_tool_button(ToolItemText.LINEPLOTS)
            data_view.enable_tool_button(ToolItemText.REGIONSELECTION)
            data_view.enable_tool_button(ToolItemText.NONAXISALIGNEDCUTS)

        self.new_plot()

    def normalization_changed(self, norm_type):
        """
        Notify the presenter that the type of normalization has changed.
        :param norm_type: "By bin width" = volume normalization else no normalization
        """
        self.normalization = norm_type == "By bin width"
        self.new_plot()

    def overlay_peaks_workspaces(self):
        """
        Request activation of peak overlay tools.
          - Asks user to select peaks workspace(s), taking into account any current selection
          - Attaches peaks table viewer/tools if new workspaces requested. Removes any unselected
          - Displays peaks on data display (if any left to display)
        """
        names_overlayed = self._overlayed_peaks_workspaces()
        names_to_overlay = self.view.query_peaks_to_overlay(names_overlayed)
        if names_to_overlay is None:
            # cancelled
            return
        if names_to_overlay or names_overlayed:
            self._create_peaks_presenter_if_necessary().overlay_peaksworkspaces(names_to_overlay)
        else:
            self.view.peaks_view.hide()

    def non_axis_aligned_cut(self, state):
        data_view = self._data_view
        if state:
            if self._cutviewer_presenter is None:
                self._cutviewer_presenter = CutViewerPresenter(self, data_view.canvas)
                self.view.add_widget_to_splitter(self._cutviewer_presenter.get_view())
            self._cutviewer_presenter.show_view()
            data_view.deactivate_tool(ToolItemText.ZOOM)
            for tool in [ToolItemText.REGIONSELECTION, ToolItemText.LINEPLOTS, ToolItemText.NONORTHOGONAL_AXES]:
                data_view.deactivate_and_disable_tool(tool)
            # turn off cursor tracking as this causes plot to resize interfering with interactive cutting tool
            data_view.track_cursor.setChecked(False)  # on_track_cursor_state_change(False)
        else:
            self._cutviewer_presenter.hide_view()
            for tool in [ToolItemText.REGIONSELECTION, ToolItemText.LINEPLOTS]:
                data_view.enable_tool_button(tool)
            if self.get_sliceinfo().can_support_nonorthogonal_axes():
                data_view.enable_tool_button(ToolItemText.NONORTHOGONAL_AXES)

    def replace_workspace(self, workspace_name, workspace):
        """
        Called when the SliceViewerADSObserver has detected that a workspace has changed
        @param workspace_name: the name of the workspace that has changed
        @param workspace: the workspace that has changed
        """
        if not self.model.workspace_equals(workspace_name):
            # TODO this is a dead branch, since the ADS observer will call this if the
            # names are the same, but the model "workspace_equals" simply checks for the same name
            return
        try:
            candidate_model = SliceViewerModel(workspace)
            candidate_model_properties = candidate_model.get_properties()
            for (property, value) in candidate_model_properties.items():
                if self.initial_model_properties[property] != value:
                    raise ValueError(f"The property {property} is different on the new workspace.")

            # New model is OK, proceed with updating Slice Viewer
            self.model = candidate_model
            self.new_plot, self.update_plot_data = self._decide_plot_update_methods()
            self.refresh_view()
        except ValueError as err:
            self._close_view_with_message(
                f"Closing Sliceviewer as the underlying workspace was changed: {str(err)}")
            return

    def refresh_view(self):
        """
        Updates the view to enable/disable certain options depending on the model.
        """
        if not self.view:
            return

        # we don't want to use model.get_ws for the image info widget as this needs
        # extra arguments depending on workspace type.
        ws = self.model.ws
        ws.readLock()
        try:
            self.view.data_view.image_info_widget.setWorkspace(ws)
            self.new_plot()
        finally:
            ws.unlock()

    def rename_workspace(self, old_name, new_name):
        if self.model.workspace_equals(old_name):
            self.model.set_ws_name(new_name)
            self.view.emit_rename(self.model.get_title(new_name))

    def delete_workspace(self, ws_name):
        if self.model.workspace_equals(ws_name):
            self.view.emit_close()

    def ADS_cleared(self):
        if self.view:
            self.view.emit_close()

    def clear_observer(self):
        """Called by ObservingView on close event"""
        self.ads_observer = None
        if self._peaks_presenter is not None:
            self._peaks_presenter.clear_observer()

    def canvas_clicked(self, event):
        if self._peaks_presenter is not None:
            if event.inaxes:
                sliceinfo = self.get_sliceinfo()
                self._logger.debug(f"Coordinates selected x={event.xdata} y={event.ydata} z={sliceinfo.z_value}")
                pos = sliceinfo.inverse_transform([event.xdata, event.ydata, sliceinfo.z_value])
                self._logger.debug(f"Coordinates transformed into {self.get_frame()} frame, pos={pos}")
                self._peaks_presenter.add_delete_peak(pos)
                self.view.data_view.canvas.draw_idle()

    def deactivate_zoom_pan(self):
        self.view.data_view.deactivate_zoom_pan()

    def zoom_pan_clicked(self, active):
        if active and self._peaks_presenter is not None:
            self._peaks_presenter.deactivate_peak_add_delete()

    # private api
    def _create_peaks_presenter_if_necessary(self):
        if self._peaks_presenter is None:
            self._peaks_presenter = PeaksViewerCollectionPresenter(self.view.peaks_view)
        return self._peaks_presenter

    def _call_peaks_presenter_if_created(self, attr, *args, **kwargs):
        """
        Call a method on the peaks presenter if it has been created
        :param attr: The attribute to call
        :param *args: Positional-arguments to pass to call
        :param **kwargs Keyword-arguments to pass to call
        """
        if self._peaks_presenter is not None:
            getattr(self._peaks_presenter, attr)(*args, **kwargs)

    def _call_cutviewer_presenter_if_created(self, attr, *args, **kwargs):
        """
        Call a method on the peaks presenter if it has been created
        :param attr: The attribute to call
        :param *args: Positional-arguments to pass to call
        :param **kwargs Keyword-arguments to pass to call
        """
        if self._cutviewer_presenter is not None:
            getattr(self._cutviewer_presenter, attr)(*args, **kwargs)

    def _show_status_message(self, message: str):
        """
        Show a temporary message in the status of the view
        """
        self.view.data_view.show_temporary_status_message(message, self.TEMPORARY_STATUS_TIMEOUT)

    def _overlayed_peaks_workspaces(self):
        """
        :return: A list of names of the current PeaksWorkspaces overlayed
        """
        current_workspaces = []
        if self._peaks_presenter is not None:
            current_workspaces = self._peaks_presenter.workspace_names()

        return current_workspaces

    def _decide_plot_update_methods(self) -> Tuple[Callable, Callable]:
        """
        Checks the type of workspace in self.model and decides which of the
        new_plot and update_plot_data methods to use
        :return: the new_plot method to use
        """
        # TODO get rid of private access here
        ws_type = WorkspaceInfo.get_ws_type(self.model.ws)
        if ws_type == WS_TYPE.MDH:
            return self.new_plot_MDH, self.update_plot_data_MDH
        elif ws_type == WS_TYPE.MDE:
            return self.new_plot_MDE, self.update_plot_data_MDE
        else:
            return self.new_plot_matrix, self.update_plot_data_matrix

    def _close_view_with_message(self, message: str):
        self.view.emit_close()  # inherited from ObservingView
        self._logger.warning(message)

    def notify_close(self):
        self.view = None

    def action_open_help_window(self):
        InterfaceManager().showHelpPage('qthelp://org.mantidproject/doc/workbench/sliceviewer.html')
示例#8
0
class HidraProjectFile(object):
    '''Read and/or write an HB2B project to an HDF5 with entries for detector counts, sample logs, reduced data,
    fitted peaks and etc.
    All the import/export information will be buffered in order to avoid exception during operation

    File structure:
    - experiment
        - scans (raw counts)
        - logs
    - instrument
        - calibration
    - reduced diffraction data
        - main
          - sub-run
          - ...
        - mask_A
          - sub-run
          - ...
        - mask_B
          - sub-run
          - ...
    '''
    def __init__(self, project_file_name, mode=HidraProjectFileMode.READONLY):
        """
        Initialization
        :param project_file_name: project file name
        :param mode: I/O mode
        """
        # configure logging for this class
        self._log = Logger(__name__)

        # convert the mode to the enum
        self._io_mode = HidraProjectFileMode.getMode(mode)

        # check the file
        if not project_file_name:
            raise RuntimeError('Must supply a filename')
        self._file_name = str(project_file_name)  # force it to be a string
        self._checkFileAccess()

        # open the file using h5py
        self._project_h5 = h5py.File(self._file_name, mode=str(self._io_mode))
        if self._io_mode == HidraProjectFileMode.OVERWRITE:
            self._init_project()

    def _checkFileAccess(self):
        '''Verify the file has the correct acces permissions and set the value of ``self._is_writable``
        '''
        # prepare the call to check the file permissions
        check_exist = ((self._io_mode == HidraProjectFileMode.READONLY)
                       or (self._io_mode == HidraProjectFileMode.READWRITE))
        self._is_writable = (not self._io_mode
                             == HidraProjectFileMode.READONLY)

        # create a custom message based on requested access mode
        if self._io_mode == HidraProjectFileMode.READONLY:
            description = 'Read-only project file'
        elif self._io_mode == HidraProjectFileMode.OVERWRITE:
            description = 'Write-only project file'
        elif self._io_mode == HidraProjectFileMode.READWRITE:
            description = 'Append-mode project file'
        else:  # this should never happen
            raise RuntimeError(
                'Hidra project file I/O mode {} is not supported'.format(
                    HidraProjectFileMode))

        # convert the filename to an absolute path so error messages are clearer
        self._file_name = os.path.abspath(self._file_name)

        # do the check
        checkdatatypes.check_file_name(self._file_name,
                                       check_exist,
                                       self._is_writable,
                                       is_dir=False,
                                       description=description)

    def _init_project(self):
        """
        initialize the current opened project file from scratch by opening it
        """
        assert self._project_h5 is not None, 'cannot be None'
        assert self._is_writable, 'must be writable'

        # data
        exp_entry = self._project_h5.create_group(HidraConstants.RAW_DATA)
        exp_entry.create_group(HidraConstants.SUB_RUNS)
        exp_entry.create_group(HidraConstants.SAMPLE_LOGS)

        # instrument
        instrument = self._project_h5.create_group(HidraConstants.INSTRUMENT)
        instrument.create_group(HidraConstants.CALIBRATION)
        # geometry
        geometry_group = instrument.create_group('geometry setup')
        geometry_group.create_group('detector')
        geometry_group.create_group('wave length')
        # detector (pixel) efficiency
        instrument.create_group(HidraConstants.DETECTOR_EFF)

        # mask entry and 2 sub entries
        mask_entry = self._project_h5.create_group(HidraConstants.MASK)
        mask_entry.create_group(HidraConstants.DETECTOR_MASK)
        mask_entry.create_group(HidraConstants.SOLID_ANGLE_MASK)

        # peaks
        self._project_h5.create_group('peaks')

        # reduced data
        self._project_h5.create_group(HidraConstants.REDUCED_DATA)

    def __del__(self):
        self.close()

    @property
    def name(self):
        """
        File name on HDD
        """
        return self._project_h5.name

    def append_raw_counts(self, sub_run_number, counts_array):
        """Add raw detector counts collected in a single scan/Pt

        Parameters
        ----------
        sub_run_number : int
            sub run number
        counts_array : ~numpy.ndarray
            detector counts
        """
        # check
        assert self._project_h5 is not None, 'cannot be None'
        assert self._is_writable, 'must be writable'
        checkdatatypes.check_int_variable('Sub-run index', sub_run_number,
                                          (0, None))

        # create group
        scan_i_group = self._project_h5[HidraConstants.RAW_DATA][
            HidraConstants.SUB_RUNS].create_group(
                '{:04}'.format(sub_run_number))
        scan_i_group.create_dataset('counts', data=counts_array.reshape(-1))

    def append_experiment_log(self, log_name, log_value_array):
        """ add information about the experiment including scan indexes, sample logs, 2theta and etc
        :param log_name: name of the sample log
        :param log_value_array:
        """
        # check
        assert self._project_h5 is not None, 'cannot be None'
        assert self._is_writable, 'must be writable'
        checkdatatypes.check_string_variable('Log name', log_name)

        try:
            self._log.debug('Add sample log: {}'.format(log_name))
            self._project_h5[HidraConstants.RAW_DATA][
                HidraConstants.SAMPLE_LOGS].create_dataset(
                    log_name, data=log_value_array)
        except RuntimeError as run_err:
            raise RuntimeError('Unable to add log {} due to {}'.format(
                log_name, run_err))
        except TypeError as type_err:
            raise RuntimeError(
                'Failed to add log {} with value {} of type {}: {}'
                ''.format(log_name, log_value_array, type(log_value_array),
                          type_err))

    def read_default_masks(self):
        """Read default mask, i.e., for pixels at the edges

        Returns
        -------
        numpy.ndarray
            array for mask.  None for no mask

        """
        try:
            mask_name = HidraConstants.DEFAULT_MASK
            default_mask = self.read_mask_detector_array(mask_name)
        except RuntimeError:
            default_mask = None

        return default_mask

    def read_user_masks(self, mask_dict):
        """Read user-specified masks

        Parameters
        ----------
        mask_dict : dict
            dictionary to store masks (array)

        Returns
        -------
        None

        """
        # Get mask names except default mask
        try:
            mask_names = sorted(self._project_h5[HidraConstants.MASK][
                HidraConstants.DETECTOR_MASK].keys())
        except KeyError:
            # return if the file has an old format
            return
        if HidraConstants.DEFAULT_MASK in mask_names:
            mask_names.remove(HidraConstants.DEFAULT_MASK)

        # Read mask one by one
        for mask_name in mask_names:
            mask_dict[mask_name] = self.read_mask_detector_array(mask_name)

    def read_mask_detector_array(self, mask_name):
        """Get the mask from hidra project file (.h5) in the form of numpy array

        Location
          root
            - mask
                - detector
                     - mask_name

        Parameters
        ----------
        mask_name : str
            name of mask

        Returns
        -------
        numpy.ndarray
            mask array

        """
        try:
            mask_array = self._project_h5[HidraConstants.MASK][
                HidraConstants.DETECTOR_MASK][mask_name].value
        except KeyError as key_err:
            if HidraConstants.MASK not in self._project_h5.keys():
                err_msg = 'Project file {} does not have "{}" entry.  Its format is not up-to-date.' \
                          ''.format(self._file_name, HidraConstants.MASK)
            elif HidraConstants.DETECTOR_MASK not in self._project_h5[
                    HidraConstants.MASK]:
                err_msg = 'Project file {} does not have "{}" entry under {}. ' \
                          'Its format is not up-to-date.' \
                          ''.format(self._file_name, HidraConstants.DETECTOR_MASK, HidraConstants.MASK)
            else:
                err_msg = 'Detector mask {} does not exist.  Available masks are {}.' \
                          ''.format(mask_name, self._project_h5[HidraConstants.MASK].keys())
            raise RuntimeError('{}\nFYI: {}'.format(err_msg, key_err))

        return mask_array

    def write_mask_detector_array(self, mask_name, mask_array):
        """Write detector mask

        Structure:
          root
            - mask
                - detector
                     - default/universal
                     - mask_name


        Parameters
        ----------
        mask_name : str or None
            mask name.  None for default/universal detector mask
        mask_array : numpy.ndarray
            (N, ), masks, 0 for masking, 1 for ROI

        Returns
        -------
        None

        """
        # Set the default case
        if mask_name is None:
            mask_name = HidraConstants.DEFAULT_MASK

        if mask_name in self._project_h5[HidraConstants.MASK][
                HidraConstants.DETECTOR_MASK]:
            # delete the existing mask
            del self._project_h5[HidraConstants.MASK][
                HidraConstants.DETECTOR_MASK][mask_name]

        # add new detector mask (array)
        self._project_h5[HidraConstants.MASK][
            HidraConstants.DETECTOR_MASK].create_dataset(mask_name,
                                                         data=mask_array)

    def write_mask_solid_angle(self, mask_name, solid_angle_bin_edges):
        """
        Add mask in the form of solid angle
        Location: ..../main entry/mask/solid angle/
        data will be a range of solid angles and number of patterns to generate.
        example solid angle range = -8, 8, number of pattern = 3

        :param solid_angle_bin_edges: numpy 1D array as s0, s1, s2, ...
        """
        # Clean previously set if name exists
        if mask_name in self._project_h5[HidraConstants.MASK][
                HidraConstants.SOLID_ANGLE_MASK]:
            del self._project_h5[HidraConstants.MASK][
                HidraConstants.SOLID_ANGLE_MASK][mask_name]

        # Add new mask in
        solid_angle_entry = self._project_h5[HidraConstants.MASK][
            HidraConstants.SOLID_ANGLE_MASK]
        solid_angle_entry.create_dataset(mask_name, data=solid_angle_bin_edges)

    def read_mask_solid_angle(self, mask_name):
        """Get the masks in the form of solid angle bin edges
        """
        try:
            mask_array = self._project_h5[HidraConstants.MASK][
                HidraConstants.SOLID_ANGLE_MASK][mask_name]
        except KeyError as key_err:
            raise RuntimeError(
                'Detector mask {} does not exist.  Available masks are {}. FYI: {}'
                ''.format(
                    mask_name, self._project_h5[HidraConstants.MASK][
                        HidraConstants.SOLID_ANGLE_MASK].keys(), key_err))

        return mask_array

    def close(self):
        '''
        Close the file without checking whether the file can be written or not. This can
        be called multiple times without issue.
        '''
        if self._project_h5 is not None:
            self._project_h5.close()
            self._project_h5 = None  #
            self._log.information('File {} is closed'.format(self._file_name))

    def save(self, verbose=False):
        """
        convert all the information about project to HDF file.
        As the data has been written to h5.File instance already, the only thing left is to close the file
        """
        self._validate_write_operation()

        if verbose:
            self._log.information(
                'Changes are saved to {0}. File is now closed.'.format(
                    self._project_h5.filename))

        self.close()

    def read_diffraction_2theta_array(self):
        """Get the (reduced) diffraction data's 2-theta vector

        Returns
        -------
        numpy.ndarray
            1D vector for unified 2theta vector for all sub runs
            2D array for possibly various 2theta vector for each

        """
        if HidraConstants.TWO_THETA not in self._project_h5[
                HidraConstants.REDUCED_DATA]:
            # FIXME - This is a patch for 'legacy' data.  It will be removed after codes are stable
            tth_key = '2Theta'
        else:
            tth_key = HidraConstants.TWO_THETA

        two_theta_vec = self._project_h5[
            HidraConstants.REDUCED_DATA][tth_key].value

        return two_theta_vec

    def read_diffraction_intensity_vector(self, mask_id, sub_run):
        """ Get the (reduced) diffraction data's intensity
        :param mask_id:
        :param sub_run: If sub run = None: ...
        :return: 1D array or 2D array depending on sub ru
        """
        # Get default for mask/main
        if mask_id is None:
            mask_id = HidraConstants.REDUCED_MAIN

        checkdatatypes.check_string_variable(
            'Mask ID', mask_id,
            list(self._project_h5[HidraConstants.REDUCED_DATA].keys()))

        # Get data to return
        if sub_run is None:
            # all the sub runs
            reduced_diff_hist = self._project_h5[
                HidraConstants.REDUCED_DATA][mask_id].value
        else:
            # specific one sub run
            sub_run_list = self.read_sub_runs()
            sub_run_index = sub_run_list.index(sub_run)

            if mask_id is None:
                mask_id = HidraConstants.REDUCED_MAIN

            reduced_diff_hist = self._project_h5[
                HidraConstants.REDUCED_DATA][mask_id].value[sub_run_index]
        # END-IF-ELSE

        return reduced_diff_hist

    def read_diffraction_variance_vector(self, mask_id, sub_run):
        """ Get the (reduced) diffraction data's intensity
        :param mask_id:
        :param sub_run: If sub run = None: ...
        :return: 1D array or 2D array depending on sub ru
        """
        # Get default for mask/main
        if mask_id is None:
            mask_id = HidraConstants.REDUCED_MAIN

        if '_var' not in mask_id:
            mask_id += '_var'

        try:
            checkdatatypes.check_string_variable(
                'Mask ID', mask_id,
                list(self._project_h5[HidraConstants.REDUCED_DATA].keys()))

            # Get data to return
            if sub_run is None:
                # all the sub runs
                reduced_variance_hist = self._project_h5[
                    HidraConstants.REDUCED_DATA][mask_id].value
            else:
                # specific one sub run
                sub_run_list = self.read_sub_runs()
                sub_run_index = sub_run_list.index(sub_run)

                if mask_id is None:
                    mask_id = HidraConstants.REDUCED_MAIN

                if '_var' not in mask_id:
                    mask_id += '_var'

                reduced_variance_hist = self._project_h5[
                    HidraConstants.REDUCED_DATA][mask_id].value[sub_run_index]
            # END-IF-ELSE
        except ValueError:
            reduced_variance_hist = None

        return reduced_variance_hist

    def read_diffraction_masks(self):
        """
        Get the list of masks
        """
        masks = list(self._project_h5[HidraConstants.REDUCED_DATA].keys())

        # Clean up data entry '2theta' (or '2Theta')
        if HidraConstants.TWO_THETA in masks:
            masks.remove(HidraConstants.TWO_THETA)

        # FIXME - Remove when Hidra-16_Log.h5 is fixed with correction entry name as '2theta'
        # (aka HidraConstants.TWO_THETA)
        if '2Theta' in masks:
            masks.remove('2Theta')

        return masks

    def read_instrument_geometry(self):
        """
        Get instrument geometry parameters
        :return: an instance of instrument_geometry.InstrumentSetup
        """
        # Get group
        geometry_group = self._project_h5[HidraConstants.INSTRUMENT][
            HidraConstants.GEOMETRY_SETUP]
        detector_group = geometry_group[HidraConstants.DETECTOR_PARAMS]

        # Get value
        num_rows, num_cols = detector_group['detector size'].value
        pixel_size_x, pixel_size_y = detector_group['pixel dimension'].value
        arm_length = detector_group['L2'].value

        # Initialize
        instrument_setup = AnglerCameraDetectorGeometry(
            num_rows=num_rows,
            num_columns=num_cols,
            pixel_size_x=pixel_size_x,
            pixel_size_y=pixel_size_y,
            arm_length=arm_length,
            calibrated=False)

        return instrument_setup

    def read_sample_logs(self):
        """Get sample logs

        Retrieve all the (sample) logs from Hidra project file.
        Raw information retrieved from rs project file is numpy arrays

        Returns
        -------
        ndarray, dict
            ndarray : 1D array for sub runs
            dict : dict[sample log name] for sample logs in ndarray
        """
        # Get the group
        logs_group = self._project_h5[HidraConstants.RAW_DATA][
            HidraConstants.SAMPLE_LOGS]

        if HidraConstants.SUB_RUNS not in logs_group.keys():
            raise RuntimeError(
                'Failed to find {} in {} group of the file'.format(
                    HidraConstants.SUB_RUNS, HidraConstants.SAMPLE_LOGS))

        # Get 2theta and others
        samplelogs = SampleLogs()
        # first set subruns
        samplelogs[HidraConstants.SUB_RUNS] = logs_group[
            HidraConstants.SUB_RUNS].value
        for log_name in logs_group.keys():
            samplelogs[log_name] = logs_group[log_name].value

        return samplelogs

    def read_log_value(self, log_name):
        """Get a log's value

        Parameters
        ----------
        log_name

        Returns
        -------
        ndarray or single value
        """
        assert self._project_h5 is not None, 'Project HDF5 is not loaded yet'

        log_value = self._project_h5[HidraConstants.RAW_DATA][
            HidraConstants.SAMPLE_LOGS][log_name]

        return log_value

    def read_raw_counts(self, sub_run):
        """
        get the raw detector counts
        """
        assert self._project_h5 is not None, 'blabla'
        checkdatatypes.check_int_variable('sun run', sub_run, (0, None))

        sub_run_str = '{:04}'.format(sub_run)
        try:
            counts = self._project_h5[HidraConstants.RAW_DATA][
                HidraConstants.SUB_RUNS][sub_run_str]['counts'].value
        except KeyError as key_error:
            err_msg = 'Unable to access sub run {} with key {}: {}\nAvailable runs are: {}' \
                      ''.format(sub_run, sub_run_str, key_error,
                                self._project_h5[HidraConstants.RAW_DATA][HidraConstants.SUB_RUNS].keys())
            raise KeyError(err_msg)

        return counts

    def read_sub_runs(self):
        """
        get list of the sub runs
        """
        self._log.debug(str(self._project_h5.keys()))
        self._log.debug(self._file_name)
        # coded a little wacky to be less than 120 characters across
        sub_runs_str_list = self._project_h5[HidraConstants.RAW_DATA][
            HidraConstants.SAMPLE_LOGS]
        if HidraConstants.SUB_RUNS in sub_runs_str_list:
            sub_runs_str_list = sub_runs_str_list[
                HidraConstants.SUB_RUNS].value
        else:
            sub_runs_str_list = []

        self._log.debug('.... Sub runs: {}'.format(sub_runs_str_list))

        sub_run_list = [None] * len(sub_runs_str_list)
        for index, sub_run_str in enumerate(sub_runs_str_list):
            sub_run_list[index] = int(sub_run_str)

        self._log.debug('.... Sub runs: {}'.format(sub_run_list))

        return sub_run_list

    def write_instrument_geometry(self, instrument_setup):
        """
        Add instrument geometry and wave length information to project file
        """
        # check inputs
        self._validate_write_operation()
        checkdatatypes.check_type('Instrument geometry setup',
                                  instrument_setup, HidraSetup)

        # write value to instrument
        instrument_group = self._project_h5[HidraConstants.INSTRUMENT]

        # write attributes
        instrument_group.attrs['name'] = instrument_setup.name

        # get the entry for raw instrument setup
        detector_group = instrument_group['geometry setup']['detector']
        raw_geometry = instrument_setup.get_instrument_geometry(False)
        detector_group.create_dataset('L2',
                                      data=numpy.array(
                                          raw_geometry.arm_length))
        det_size = numpy.array(
            instrument_setup.get_instrument_geometry(False).detector_size)
        detector_group.create_dataset('detector size', data=det_size)
        pixel_dimension = list(
            instrument_setup.get_instrument_geometry(False).pixel_dimension)
        detector_group.create_dataset('pixel dimension',
                                      data=numpy.array(pixel_dimension))

        # wave length
        wavelength_group = instrument_group[HidraConstants.GEOMETRY_SETUP][
            HidraConstants.WAVELENGTH]
        try:
            wl = instrument_setup.get_wavelength(None)
        except (NotImplementedError, RuntimeError) as run_err:
            # No wave length from workspace: do nothing
            self._log.error(str(run_err))
            wl = None

        # Set wave length
        if wl is not None:
            wavelength_group.create_dataset('Calibrated',
                                            data=numpy.array([wl]))

    def read_peak_tags(self):
        """Get all the tags of peaks with parameters stored in HiDRA project

        Returns
        -------
        ~list
            list of string for all the peak tags

        """
        # Get main group
        peak_main_group = self._project_h5[HidraConstants.PEAKS]

        return peak_main_group.keys()

    def read_peak_parameters(self, peak_tag):
        """Get the parameters related to a peak

        The parameters including
        (1) peak profile (2) sub runs (3) chi2 (4) parameter names (5) parameter values

        Returns
        -------
        ~pyrs.peaks.peak_collection.PeakCollection
            All of the information from fitting a peak across subruns
        """
        # Get main group
        peak_main_group = self._project_h5[HidraConstants.PEAKS]

        # Get peak entry
        if peak_tag not in peak_main_group.keys():
            raise RuntimeError('Peak tag {} cannot be found'.format(peak_tag))
        peak_entry = peak_main_group[peak_tag]

        # Get all the attribute and data
        profile = peak_entry.attrs[HidraConstants.PEAK_PROFILE]
        background = peak_entry.attrs[HidraConstants.BACKGROUND_TYPE]
        sub_run_array = peak_entry[HidraConstants.SUB_RUNS].value
        chi2_array = peak_entry[HidraConstants.PEAK_FIT_CHI2].value
        param_values = peak_entry[HidraConstants.PEAK_PARAMS].value
        error_values = peak_entry[HidraConstants.PEAK_PARAMS_ERROR].value

        # validate the information makes sense
        if param_values.shape != error_values.shape:
            raise RuntimeError(
                'Parameters[{}] and Errors[{}] have different shape'.format(
                    param_values.shape, error_values.shape))
        peak_collection = PeakCollection(peak_tag, profile, background)
        peak_collection.set_peak_fitting_values(subruns=sub_run_array,
                                                parameter_values=param_values,
                                                parameter_errors=error_values,
                                                fit_costs=chi2_array)

        # Optionally for strain: reference peak center in dSpacing: (strain)
        if HidraConstants.D_REFERENCE in list(peak_entry.keys()):
            # If reference position D is ever written to this project
            ref_d_array = peak_entry[HidraConstants.D_REFERENCE].value
            # set to PeakCollection
            peak_collection.set_d_reference(ref_d_array)

        return peak_collection

    def write_peak_parameters(self, fitted_peaks):
        """Set the peak fitting results to project file.

         The tree structure for fitted peak in all sub runs is defined as
        - peaks
            - [peak-tag]
                - attr/'peak profile'
                - sub runs
                - parameter values
                - parameter fitting error

        Parameters
        ----------
        fitted_peaks : pyrs.core.peak_collection.PeakCollection

        Returns
        -------

        """
        # Check inputs and file status
        self._validate_write_operation()

        # Get value from peak collection
        peak_tag = fitted_peaks.peak_tag
        peak_profile = str(fitted_peaks.peak_profile)
        background_type = str(fitted_peaks.background_type)

        checkdatatypes.check_string_variable('Peak tag', peak_tag)
        checkdatatypes.check_string_variable('Peak profile', peak_profile)
        checkdatatypes.check_string_variable('Background type',
                                             background_type)

        # access or create node for peak with given tag
        peak_main_group = self._project_h5[HidraConstants.PEAKS]

        if peak_tag not in peak_main_group:
            # create peak-tag entry if it does not exist
            single_peak_entry = peak_main_group.create_group(peak_tag)
        else:
            # if peak-tag entry, get the reference to the entry
            single_peak_entry = peak_main_group[peak_tag]

        # Attributes
        self.set_attributes(single_peak_entry, HidraConstants.PEAK_PROFILE,
                            peak_profile)
        self.set_attributes(single_peak_entry, HidraConstants.BACKGROUND_TYPE,
                            background_type)

        single_peak_entry.create_dataset(HidraConstants.SUB_RUNS,
                                         data=fitted_peaks.sub_runs)
        single_peak_entry.create_dataset(HidraConstants.PEAK_FIT_CHI2,
                                         data=fitted_peaks.fitting_costs)
        peak_values, peak_errors = fitted_peaks.get_native_params()
        single_peak_entry.create_dataset(HidraConstants.PEAK_PARAMS,
                                         data=peak_values)
        single_peak_entry.create_dataset(HidraConstants.PEAK_PARAMS_ERROR,
                                         data=peak_errors)

        # Reference peak center in dSpacing: (strain)
        ref_d_array = fitted_peaks.get_d_reference()
        if isinstance(ref_d_array, numpy.ndarray):
            # if reference peak position in D is set
            single_peak_entry.create_dataset(HidraConstants.D_REFERENCE,
                                             data=ref_d_array)
        elif not numpy.isnan(ref_d_array):
            # single non-NaN value
            num_subruns = len(fitted_peaks.sub_runs)
            single_peak_entry.create_dataset(HidraConstants.D_REFERENCE,
                                             data=numpy.array([ref_d_array] *
                                                              num_subruns))

    def read_wavelengths(self):
        """Get calibrated wave length

        Returns
        -------
        Float
            Calibrated wave length.  NaN for wave length is not ever set
        """
        # Init wave length
        wl = numpy.nan

        # Get the node
        try:
            mono_node = self._project_h5[HidraConstants.INSTRUMENT][
                HidraConstants.MONO]
            if HidraConstants.WAVELENGTH in mono_node:
                wl = self._project_h5[HidraConstants.INSTRUMENT][
                    HidraConstants.MONO][HidraConstants.WAVELENGTH].value
                if wl.shape[0] == 0:
                    # empty numpy array: no data. keep as nan
                    pass
                elif wl.shape[0] == 1:
                    # 1 calibrated wave length
                    wl = wl[0]
                else:
                    # not supported
                    raise RuntimeError(
                        'There are more than 1 wave length registered')
                    # END-IF
        except KeyError:
            # monochromator node does not exist
            self._log.error('Node {} does not exist in HiDRA project file {}'
                            ''.format(HidraConstants.MONO, self._file_name))
        # END

        return wl

    def write_wavelength(self, wave_length):
        """ Set the calibrated wave length
        Location:
          .../instrument/monochromator setting/ ... .../
        Note:
        - same wave length to all sub runs
        - only calibrated wave length in project file
        - raw wave length comes from a table with setting
        :param wave_length: wave length in A
        :return: None
        """
        checkdatatypes.check_float_variable('Wave length', wave_length,
                                            (0, 1000))

        # Create 'monochromator setting' node if it does not exist
        if HidraConstants.MONO not in list(
                self._project_h5[HidraConstants.INSTRUMENT].keys()):
            self._project_h5[HidraConstants.INSTRUMENT].create_group(
                HidraConstants.MONO)

        # Get node and write value
        wl_entry = self._project_h5[HidraConstants.INSTRUMENT][
            HidraConstants.MONO]
        # delete the dataset if it does exist to replace
        if HidraConstants.WAVELENGTH in list(wl_entry.keys()):
            del wl_entry[HidraConstants.WAVELENGTH]
        wl_entry.create_dataset(HidraConstants.WAVELENGTH,
                                data=numpy.array([wave_length]))

    def read_efficiency_correction(self):
        """
        Set detector efficiency correction measured from vanadium (efficiency correction)
        Returns
        -------
        numpy ndarray
            Efficiency array
        """
        calib_run_number = \
            self._project_h5[HidraConstants.INSTRUMENT][HidraConstants.DETECTOR_EFF].attrs[HidraConstants.RUN]

        det_eff_array =\
            self._project_h5[HidraConstants.INSTRUMENT][HidraConstants.DETECTOR_EFF]['{}'.format(calib_run_number)]

        return det_eff_array

    def write_efficiency_correction(self, calib_run_number, eff_array):
        """ Set detector efficiency correction measured from vanadium (efficiency correction)
        Location: ... /main entry/calibration/efficiency:
        Data: numpy array with 1024**2...
        Attribute: add the run number created from to the attribute
        Parameters
        ----------
        calib_run_number : integer
            Run number where the efficiency calibration comes from
        eff_array : numpy ndarray (1D)
            Detector (pixel) efficiency
        """
        # Add attribute
        self._project_h5[HidraConstants.INSTRUMENT][HidraConstants.DETECTOR_EFF].attrs[HidraConstants.RUN] = \
            calib_run_number

        # Set data
        self._project_h5[HidraConstants.INSTRUMENT][
            HidraConstants.DETECTOR_EFF].create_dataset(
                '{}'.format(calib_run_number), data=eff_array)

    def write_information(self, info_dict):
        """
        set project information to attributes
        """
        # check and validate
        checkdatatypes.check_dict('Project file general information',
                                  info_dict)
        self._validate_write_operation()

        for info_name in info_dict:
            self._project_h5.attrs[info_name] = info_dict[info_name]

    def write_reduced_diffraction_data_set(self, two_theta_array,
                                           diff_data_set, var_data_set):
        """Set the reduced diffraction data (set)

        Parameters
        ----------
        two_theta_array : numppy.ndarray
            2D array for 2-theta vector, which could be various to each other among sub runs
        diff_data_set : dict
            dictionary of 2D arrays for reduced diffraction patterns' intensities
        var_data_set : dict
            dictionary of 2D arrays for reduced diffraction patterns' variances
        """
        # Check input
        checkdatatypes.check_numpy_arrays('Two theta vector',
                                          [two_theta_array], 2, False)
        checkdatatypes.check_dict('Diffraction data set', diff_data_set)

        # Retrieve diffraction group
        diff_group = self._project_h5[HidraConstants.REDUCED_DATA]

        # Add 2theta vector
        if HidraConstants.TWO_THETA in diff_group.keys():
            # over write data
            try:
                diff_group[HidraConstants.TWO_THETA][...] = two_theta_array
            except TypeError:
                # usually two theta vector size changed
                del diff_group[HidraConstants.TWO_THETA]
                diff_group.create_dataset(HidraConstants.TWO_THETA,
                                          data=two_theta_array)
        else:
            # new data
            diff_group.create_dataset(HidraConstants.TWO_THETA,
                                      data=two_theta_array)

        # Add Diffraction data
        for mask_id in diff_data_set:
            # Get data
            diff_data_matrix_i = diff_data_set[mask_id]
            self._log.information('Mask {} data set shape: {}'.format(
                mask_id, diff_data_matrix_i.shape))
            # Check
            checkdatatypes.check_numpy_arrays('Diffraction data (matrix)',
                                              [diff_data_matrix_i], None,
                                              False)
            if two_theta_array.shape != diff_data_matrix_i.shape:
                raise RuntimeError(
                    'Length of 2theta vector ({}) is different from intensities ({})'
                    ''.format(two_theta_array.shape, diff_data_matrix_i.shape))
            # Set name for default mask
            if mask_id is None:
                data_name = HidraConstants.REDUCED_MAIN
            else:
                data_name = mask_id

            # Write
            if data_name in diff_group.keys():
                # overwrite
                diff_h5_data = diff_group[data_name]
                try:
                    diff_h5_data[...] = diff_data_matrix_i
                except TypeError:
                    # usually two theta vector size changed
                    del diff_group[data_name]
                    diff_group.create_dataset(data_name,
                                              data=diff_data_matrix_i)
            else:
                # new
                diff_group.create_dataset(data_name, data=diff_data_matrix_i)

        # Add Variances data
        if var_data_set is None:
            var_data_set = diff_data_set
            for mask_id in var_data_set:
                var_data_set[mask_id] = numpy.sqrt(var_data_set[mask_id])

        for mask_id in var_data_set:
            # Get data
            var_data_matrix_i = var_data_set[mask_id]
            self._log.information('Mask {} data set shape: {}'.format(
                mask_id, var_data_matrix_i.shape))
            # Check
            checkdatatypes.check_numpy_arrays('Diffraction data (matrix)',
                                              [var_data_matrix_i], None, False)
            if two_theta_array.shape != var_data_matrix_i.shape:
                raise RuntimeError(
                    'Length of 2theta vector ({}) is different from intensities ({})'
                    ''.format(two_theta_array.shape, var_data_matrix_i.shape))
            # Set name for default mask
            if mask_id is None:
                data_name = HidraConstants.REDUCED_MAIN + '_var'
            else:
                data_name = mask_id + '_var'

            # Write
            if data_name in diff_group.keys():
                # overwrite
                diff_h5_data = diff_group[data_name]
                try:
                    diff_h5_data[...] = var_data_matrix_i
                except TypeError:
                    # usually two theta vector size changed
                    del diff_group[data_name]
                    diff_group.create_dataset(data_name,
                                              data=var_data_matrix_i)
            else:
                # new
                diff_group.create_dataset(data_name, data=var_data_matrix_i)

    def write_sub_runs(self, sub_runs):
        """ Set sub runs to sample log entry
        """
        if isinstance(sub_runs, list):
            sub_runs = numpy.array(sub_runs)
        else:
            checkdatatypes.check_numpy_arrays('Sub run numbers', [sub_runs], 1,
                                              False)

        sample_log_entry = self._project_h5[HidraConstants.RAW_DATA][
            HidraConstants.SAMPLE_LOGS]
        sample_log_entry.create_dataset(HidraConstants.SUB_RUNS, data=sub_runs)

    def _create_diffraction_node(self, sub_run_number):
        """ Create a node to record diffraction data
        It will check if such node already exists
        :exception: RuntimeError is raised if such 'sub run' node exists but not correct
        """
        # create a new node if it does not exist
        sub_run_group_name = '{0:04}'.format(sub_run_number)

        self._log.debug(
            'sub group entry name in hdf: {}'.format(sub_run_group_name))

        # check existing node or create a new node
        self._log.debug(
            'Diffraction node sub group/entries: {}'
            ''.format(self._project_h5[HidraConstants.REDUCED_DATA].keys()))
        if sub_run_group_name in self._project_h5[HidraConstants.REDUCED_DATA]:
            # sub-run node exist and check
            self._log('sub-group: {}'.format(sub_run_group_name))
            diff_group = self._project_h5[
                HidraConstants.REDUCED_DATA][sub_run_group_name]
            if not (DiffractionUnit.TwoTheta in diff_group
                    and DiffractionUnit.DSpacing in diff_group):
                raise RuntimeError(
                    'Diffraction node for sub run {} exists but is not complete'
                    .format(sub_run_number))
        else:
            # create new node: parent, child-2theta, child-dspacing
            diff_group = self._project_h5[
                HidraConstants.REDUCED_DATA].create_group(sub_run_group_name)
            diff_group.create_group(str(DiffractionUnit.TwoTheta))
            diff_group.create_group(str(DiffractionUnit.DSpacing))

        return diff_group

    def _validate_write_operation(self):
        """
        Validate whether a writing operation is allowed for this file
        :exception: RuntimeError
        """
        if not self._is_writable:
            raise RuntimeError(
                'Project file {} is set to read-only by user'.format(
                    self._project_h5.name))

    @staticmethod
    def set_attributes(h5_group, attribute_name, attribute_value):
        """
        Set attribute to a group
        """
        checkdatatypes.check_string_variable('Attribute name', attribute_name)

        h5_group.attrs[attribute_name] = attribute_value