Exemple #1
0
def create_fake_dns_workspace(wsname, angle=-7.53, flipper='ON', dataY=None, loadinstrument=False):
    """
    creates DNS workspace with fake data
        @param angle   Angle of detector bank rotation
        @param flipper Flipper state (ON/OFF)
        @param dataY   Data array to set as DataY of the created workspace, will be set to np.ones if None
        @param loadinstrument  If True api.LoadInstrument will be executed, needed for DNSMergeRuns
    """
    ndet = 24
    dataX = np.zeros(2*ndet)
    dataX.fill(4.2 + 0.00001)
    dataX[::2] -= 0.000002
    if dataY is None:
        dataY = np.ones(ndet)
    dataE = np.sqrt(dataY)
    # create workspace
    api.CreateWorkspace(OutputWorkspace=wsname, DataX=dataX, DataY=dataY,
                        DataE=dataE, NSpec=ndet, UnitX="Wavelength")
    outws = api.mtd[wsname]
    p_names = 'deterota,wavelength,slit_i_left_blade_position,slit_i_right_blade_position,normalized,\
            slit_i_lower_blade_position,slit_i_upper_blade_position,polarisation,polarisation_comment,flipper'
    p_values = str(angle) + ',4.2,10,10,duration,5,20,x,7a,' + flipper
    api.AddSampleLogMultiple(Workspace=outws, LogNames=p_names, LogValues=p_values, ParseType=True)
    # rotate instrument component
    if loadinstrument:
        api.LoadInstrument(outws, InstrumentName='DNS', RewriteSpectraMap=True)
        api.RotateInstrumentComponent(outws, "bank0", X=0, Y=1, Z=0, Angle=angle)

    return outws
    def _calibData(self, sam_ws, mon_ws):
	api.LoadInstrument(Workspace=sam_ws, 
                           Filename=os.path.join(DEFAULT_CONFIG_DIR, 'BASIS_Definition_311.xml'))
        api.MaskDetectors(Workspace=sam_ws, 
                          DetectorList=self._dMask)
                          #MaskedWorkspace='BASIS_MASK')
        api.ModeratorTzeroLinear(InputWorkspace=sam_ws, 
                           OutputWorkspace=sam_ws)
        api.LoadParameterFile(Workspace=sam_ws, 
                              Filename=os.path.join(DEFAULT_CONFIG_DIR, 'BASIS_silicon_311_Parameters.xml'))
        api.ConvertUnits(InputWorkspace=sam_ws, 
                         OutputWorkspace=sam_ws,
                         Target='Wavelength', EMode='Indirect')
                
        if not self._noMonNorm:
            api.ModeratorTzeroLinear(InputWorkspace=mon_ws, 
                               OutputWorkspace=mon_ws)
            api.Rebin(InputWorkspace=mon_ws, 
                      OutputWorkspace=mon_ws, Params='10')
            api.ConvertUnits(InputWorkspace=mon_ws, 
                             OutputWorkspace=mon_ws, 
                             Target='Wavelength')
            api.OneMinusExponentialCor(InputWorkspace=mon_ws,
                                       OutputWorkspace=mon_ws,
                                       C='0.20749999999999999', 
                                       C1='0.001276')
            api.Scale(InputWorkspace=mon_ws, 
                      OutputWorkspace=mon_ws,
                      Factor='9.9999999999999995e-07')
            api.RebinToWorkspace(WorkspaceToRebin=sam_ws, 
                                 WorkspaceToMatch=mon_ws,
                                 OutputWorkspace=sam_ws)
            api.Divide(LHSWorkspace=sam_ws, 
                       RHSWorkspace=mon_ws, 
                       OutputWorkspace=sam_ws)
Exemple #3
0
def load(filename="",
         load_pulse_times=True,
         instrument_filename=None,
         error_connection=None,
         **kwargs):
    """
    Wrapper function to provide a load method for a Nexus file, hiding mantid
    specific code from the scipp interface. All other keyword arguments not
    specified in the parameters below are passed on to the mantid.Load
    function.

    Example of use:

      from scipp.neutron import load
      d = sc.Dataset()
      d["sample"] = load(filename='PG3_4844_event.nxs', \
                         BankName='bank184', load_pulse_times=True)

    See also the neutron-data tutorial.

    Note that this function requires mantid to be installed and available in
    the same Python environment as scipp.

    :param str filename: The name of the Nexus/HDF file to be loaded.
    :param bool load_pulse_times: Read the pulse times if True.
    :param str instrument_filename: If specified, over-write the instrument
                                    definition in the final Dataset with the
                                    geometry contained in the file.
    :raises: If the Mantid workspace type returned by the Mantid loader is not
             either EventWorkspace or Workspace2D.
    :return: A Dataset containing the neutron event/histogram data and the
             instrument geometry.
    :rtype: Dataset
    """

    try:
        import mantid.simpleapi as mantid
        from mantid.api import EventType
    except ImportError as e:
        raise ImportError(
            "Mantid Python API was not found, please install Mantid framework "
            "as detailed in the installation instructions (https://scipp."
            "readthedocs.io/en/latest/getting-started/installation.html)"
        ) from e

    ws = mantid.Load(filename, **kwargs)
    if instrument_filename is not None:
        mantid.LoadInstrument(ws,
                              FileName=instrument_filename,
                              RewriteSpectraMap=True)
    if ws.id() == 'Workspace2D':
        return convert_Workspace2D_to_dataset(ws)
    if ws.id() == 'EventWorkspace':
        return convert_EventWorkspace_to_dataset(ws, load_pulse_times,
                                                 EventType)
    if ws.id() == 'TableWorkspace':
        return convert_TableWorkspace_to_dataset(ws, error_connection)
    raise RuntimeError('Unsupported workspace type')
Exemple #4
0
def get_nominal_difc(nxspath, idfpath, outpath=None):
    ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0, FilterByTimeStop=1)
    msa.LoadInstrument(ws, Filename=idfpath, RewriteSpectraMap=False)
    difc = msa.CalculateDIFC(InputWorkspace=ws)
    difc = difc.extractY().flatten().copy()
    msa.DeleteWorkspace('difc')
    if outpath:
        np.save(outpath, difc)
    return difc
Exemple #5
0
def to_workspace_2d(x, y, e, coord_dim, instrument_file=None):
    """
    Use the values provided to create a Mantid workspace.

    The Mantid layout expect the spectra to be the Outer-most dimension,
    i.e. y.shape[0]. If that is not the case you might have to transpose
    your data to fit that, otherwise it will not be aligned correctly in the
    Mantid workspace.

    :param x: Data to be used as X for the Mantid workspace.
    :param y: Data to be used as Y for the Mantid workspace.
    :param e: Data to be used as error for the Mantid workspace.
              If `None` the np.sqrt of y will be used.
    :param coord_dim: Dim of the coordinate, to be set as the equivalent
                      UnitX on the Mantid workspace.
    :param instrument_file: Instrument file that will be
                            loaded into the workspace
    :returns: Workspace2D containing the data for X, Y and E
    """
    try:
        import mantid.simpleapi as mantid
    except ImportError:
        raise ImportError(
            "Mantid Python API was not found, please install Mantid framework "
            "as detailed in the installation instructions (https://scipp."
            "readthedocs.io/en/latest/getting-started/installation.html)")

    assert len(y.shape) == 2, "Currently can only handle 2D data."

    e = e if e is not None else np.sqrt(y)

    unitX = validate_dim_and_get_mantid_string(coord_dim)

    nspec = y.shape[0]
    nbins = x.shape[1]
    nitems = y.shape[1]

    ws = mantid.WorkspaceFactory.create("Workspace2D",
                                        NVectors=nspec,
                                        XLength=nbins,
                                        YLength=nitems)

    for i in range(nspec):
        ws.setX(i, x[i])
        ws.setY(i, y[i])
        ws.setE(i, e[i])

    # Set X-Axis unit
    ws.getAxis(0).setUnit(unitX)

    if instrument_file is not None:
        mantid.LoadInstrument(ws,
                              FileName=instrument_file,
                              RewriteSpectraMap=True)

    return ws
Exemple #6
0
    def load_instrument(self):
        """
            Runs LoadInstrument get the parameters for the instrument
            @return the instrument parameter data
        """
        wrksp = '__'+self._NAME+'instrument_definition'
        if not AnalysisDataService.doesExist(wrksp):
            api.CreateWorkspace(OutputWorkspace=wrksp,DataX="1",DataY="1",DataE="1")
          #read the information about the instrument that stored in its xml
            api.LoadInstrument(Workspace=wrksp, InstrumentName=self._NAME, RewriteSpectraMap=True)

        return AnalysisDataService.retrieve(wrksp).getInstrument()
Exemple #7
0
def get_nominal_difc(nxspath, init_IDF, outdir):
    if not os.path.exists(outdir): os.makedirs(outdir)
    # ## Compute nominal difc
    ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0,
                            FilterByTimeStop=1)  # load just one second
    #
    msa.LoadInstrument(ws, Filename=init_IDF, RewriteSpectraMap=False)
    import shutil
    shutil.copyfile(init_IDF, os.path.join(outdir, 'init_IDF.xml'))
    #
    difc = msa.CalculateDIFC(InputWorkspace=ws)
    difc = difc.extractY().flatten().copy()
    msa.DeleteWorkspace('difc')
    np.save(os.path.join(outdir, 'difc-nominal.npy'), difc)
    return
    def test_DNSTwoTheta(self):
        outputWorkspaceName = "DNSFlippingRatioCorrTest_Test5"

        # rotate detector bank to different angles
        dataws_sf = self.__sf_nicrws - self.__sf_bkgrws
        dataws_nsf = self.__nsf_nicrws - self.__nsf_bkgrws
        wslist = [dataws_sf, dataws_nsf, self.__sf_nicrws, self.__nsf_nicrws, self.__sf_bkgrws, self.__nsf_bkgrws]
        for wks in wslist:
            api.LoadInstrument(wks, InstrumentName='DNS', RewriteSpectraMap=True)
        api.RotateInstrumentComponent(dataws_sf, "bank0", X=0, Y=1, Z=0, Angle=-7.53)
        api.RotateInstrumentComponent(dataws_nsf, "bank0", X=0, Y=1, Z=0, Angle=-7.53)
        api.RotateInstrumentComponent(self.__sf_nicrws, "bank0", X=0, Y=1, Z=0, Angle=-8.02)
        api.RotateInstrumentComponent(self.__nsf_nicrws, "bank0", X=0, Y=1, Z=0, Angle=-8.02)
        api.RotateInstrumentComponent(self.__sf_bkgrws, "bank0", X=0, Y=1, Z=0, Angle=-8.54)
        api.RotateInstrumentComponent(self.__nsf_bkgrws, "bank0", X=0, Y=1, Z=0, Angle=-8.54)
        # apply correction
        alg_test = run_algorithm("DNSFlippingRatioCorr", SFDataWorkspace=dataws_sf,
                                 NSFDataWorkspace=dataws_nsf, SFNiCrWorkspace=self.__sf_nicrws.getName(),
                                 NSFNiCrWorkspace=self.__nsf_nicrws.getName(), SFBkgrWorkspace=self.__sf_bkgrws.getName(),
                                 NSFBkgrWorkspace=self.__nsf_bkgrws.getName(), SFOutputWorkspace=outputWorkspaceName+'SF',
                                 NSFOutputWorkspace=outputWorkspaceName+'NSF')

        self.assertTrue(alg_test.isExecuted())
        ws_sf = AnalysisDataService.retrieve(outputWorkspaceName + 'SF')
        ws_nsf = AnalysisDataService.retrieve(outputWorkspaceName + 'NSF')
        # dimensions
        self.assertEqual(24, ws_sf.getNumberHistograms())
        self.assertEqual(24, ws_nsf.getNumberHistograms())
        self.assertEqual(2,  ws_sf.getNumDims())
        self.assertEqual(2,  ws_nsf.getNumDims())
        # 2theta angles must not change after correction has been applied
        tthetas = np.array([7.53 + i*5 for i in range(24)])
        for i in range(24):
            det = ws_sf.getDetector(i)
            self.assertAlmostEqual(tthetas[i], np.degrees(ws_sf.detectorSignedTwoTheta(det)))
            det = ws_nsf.getDetector(i)
            self.assertAlmostEqual(tthetas[i], np.degrees(ws_nsf.detectorSignedTwoTheta(det)))

        run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName + 'SF')
        run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName + 'NSF')
        run_algorithm("DeleteWorkspace", Workspace=dataws_sf)
        run_algorithm("DeleteWorkspace", Workspace=dataws_nsf)

        return
Exemple #9
0
def load(filename="",
         load_pulse_times=True,
         instrument_filename=None,
         error_connection=None,
         mantid_args=None):
    """
    Wrapper function to provide a load method for a Nexus file, hiding mantid
    specific code from the scipp interface. All other keyword arguments not
    specified in the parameters below are passed on to the mantid.Load
    function.

    Example of use:

    .. highlight:: python
    .. code-block:: python

        from scipp.neutron import load
        d = sc.Dataset()
        d["sample"] = load(filename='PG3_4844_event.nxs',
                           load_pulse_times=False,
                           mantid_args={'BankName': 'bank184',
                                        'LoadMonitors': True})

    See also the neutron-data tutorial.

    Note that this function requires mantid to be installed and available in
    the same Python environment as scipp.

    :param str filename: The name of the Nexus/HDF file to be loaded.
    :param bool load_pulse_times: Read the pulse times if True.
    :param str instrument_filename: If specified, over-write the instrument
                                    definition in the final Dataset with the
                                    geometry contained in the file.
    :param dict mantid_args: Dict of keyword arguments to forward to Mantid.
    :raises: If the Mantid workspace type returned by the Mantid loader is not
             either EventWorkspace or Workspace2D.
    :return: A Dataset containing the neutron event/histogram data and the
             instrument geometry.
    :rtype: Dataset
    """

    if mantid_args is None:
        mantid_args = {}

    with run_mantid_alg('Load', filename, **mantid_args) as loaded:
        # Determine what Load has provided us
        from mantid.api import Workspace
        if isinstance(loaded, Workspace):
            # A single workspace
            data_ws = loaded
        else:
            # Seperate data and monitor workspaces
            data_ws = loaded.OutputWorkspace

        if instrument_filename is not None:
            import mantid.simpleapi as mantid
            mantid.LoadInstrument(data_ws,
                                  FileName=instrument_filename,
                                  RewriteSpectraMap=True)

        return from_mantid(data_ws,
                           load_pulse_times=load_pulse_times,
                           error_connection=error_connection)
    def PyExec(self):
        # Input
        filename = self.getPropertyValue("Filename")
        outws_name = self.getPropertyValue("OutputWorkspace")
        norm = self.getPropertyValue("Normalization")

        # load data array from the given file
        data_array = np.loadtxt(filename)
        if not data_array.size:
            message = "File " + filename + " does not contain any data!"
            self.log().error(message)
            raise RuntimeError(message)
        # sample logs
        logs = {"names": [], "values": [], "units": []}

        # load run information
        metadata = DNSdata()
        try:
            metadata.read_legacy(filename)
        except RuntimeError as err:
            message = "Error of loading of file " + filename + ": " + str(err)
            self.log().error(message)
            raise RuntimeError(message)

        tmp = api.LoadEmptyInstrument(InstrumentName='DNS')
        self.instrument = tmp.getInstrument()
        api.DeleteWorkspace(tmp)

        # load polarisation table and determine polarisation
        poltable = self.get_polarisation_table()
        pol = self.get_polarisation(metadata, poltable)
        if not pol:
            pol = ['0', 'undefined']
            self.log().warning("Failed to determine polarisation for " +
                               filename +
                               ". Values have been set to undefined.")
        ndet = 24
        unitX = "Wavelength"
        if metadata.tof_channel_number < 2:
            dataX = np.zeros(2 * ndet)
            dataX.fill(metadata.wavelength + 0.00001)
            dataX[::2] -= 0.000002
        else:
            unitX = "TOF"

            # get instrument parameters
            l1 = np.linalg.norm(self.instrument.getSample().getPos() -
                                self.instrument.getSource().getPos())
            self.log().notice("L1 = {} m".format(l1))
            dt_factor = float(
                self.instrument.getStringParameter("channel_width_factor")[0])

            # channel width
            dt = metadata.tof_channel_width * dt_factor
            # calculate tof1
            velocity = h / (m_n * metadata.wavelength * 1e-10)  # m/s
            tof1 = 1e+06 * l1 / velocity  # microseconds
            self.log().debug("TOF1 = {} microseconds".format(tof1))
            self.log().debug("Delay time = {} microsecond".format(
                metadata.tof_delay_time))
            # create dataX array
            x0 = tof1 + metadata.tof_delay_time
            self.log().debug("TOF1 = {} microseconds".format(tof1))
            dataX = np.linspace(x0, x0 + metadata.tof_channel_number * dt,
                                metadata.tof_channel_number + 1)

            # sample logs
            logs["names"].extend(
                ["channel_width", "TOF1", "delay_time", "tof_channels"])
            logs["values"].extend([
                dt, tof1, metadata.tof_delay_time, metadata.tof_channel_number
            ])
            logs["units"].extend(
                ["microseconds", "microseconds", "microseconds", ""])
            if metadata.tof_elastic_channel:
                logs["names"].append("EPP")
                logs["values"].append(metadata.tof_elastic_channel)
                logs["units"].append("")
            if metadata.chopper_rotation_speed:
                logs["names"].append("chopper_speed")
                logs["values"].append(metadata.chopper_rotation_speed)
                logs["units"].append("Hz")
            if metadata.chopper_slits:
                logs["names"].append("chopper_slits")
                logs["values"].append(metadata.chopper_slits)
                logs["units"].append("")

        # data normalization
        factor = 1.0
        yunit = "Counts"
        ylabel = "Intensity"
        if norm == 'duration':
            factor = metadata.duration
            yunit = "Counts/s"
            ylabel = "Intensity normalized to duration"
            if factor <= 0:
                raise RuntimeError("Duration is invalid for file " + filename +
                                   ". Cannot normalize.")
        if norm == 'monitor':
            factor = metadata.monitor_counts
            yunit = "Counts/monitor"
            ylabel = "Intensity normalized to monitor"
            if factor <= 0:
                raise RuntimeError("Monitor counts are invalid for file " +
                                   filename + ". Cannot normalize.")
        # set values for dataY and dataE
        dataY = data_array[0:ndet, 1:] / factor
        dataE = np.sqrt(data_array[0:ndet, 1:]) / factor
        # create workspace
        api.CreateWorkspace(OutputWorkspace=outws_name,
                            DataX=dataX,
                            DataY=dataY,
                            DataE=dataE,
                            NSpec=ndet,
                            UnitX=unitX)
        outws = api.AnalysisDataService.retrieve(outws_name)
        api.LoadInstrument(outws, InstrumentName='DNS', RewriteSpectraMap=True)

        run = outws.mutableRun()
        if metadata.start_time and metadata.end_time:
            run.setStartAndEndTime(DateAndTime(metadata.start_time),
                                   DateAndTime(metadata.end_time))
        # add name of file as a run title
        fname = os.path.splitext(os.path.split(filename)[1])[0]
        run.addProperty('run_title', fname, True)

        # rotate the detector bank to the proper position
        api.RotateInstrumentComponent(outws,
                                      "bank0",
                                      X=0,
                                      Y=1,
                                      Z=0,
                                      Angle=metadata.deterota)
        # add sample log Ei and wavelength
        logs["names"].extend(["Ei", "wavelength"])
        logs["values"].extend([metadata.incident_energy, metadata.wavelength])
        logs["units"].extend(["meV", "Angstrom"])

        # add other sample logs
        logs["names"].extend([
            "deterota", "mon_sum", "duration", "huber", "omega", "T1", "T2",
            "Tsp"
        ])
        logs["values"].extend([
            metadata.deterota, metadata.monitor_counts, metadata.duration,
            metadata.huber, metadata.huber - metadata.deterota, metadata.temp1,
            metadata.temp2, metadata.tsp
        ])
        logs["units"].extend([
            "Degrees", "Counts", "Seconds", "Degrees", "Degrees", "K", "K", "K"
        ])

        # flipper, coil currents and polarisation
        flipper_status = 'OFF'  # flipper OFF
        if abs(metadata.flipper_precession_current) > sys.float_info.epsilon:
            flipper_status = 'ON'  # flipper ON
        logs["names"].extend([
            "flipper_precession", "flipper_z_compensation", "flipper", "C_a",
            "C_b", "C_c", "C_z", "polarisation", "polarisation_comment"
        ])
        logs["values"].extend([
            metadata.flipper_precession_current,
            metadata.flipper_z_compensation_current, flipper_status,
            metadata.a_coil_current, metadata.b_coil_current,
            metadata.c_coil_current, metadata.z_coil_current,
            str(pol[0]),
            str(pol[1])
        ])
        logs["units"].extend(["A", "A", "", "A", "A", "A", "A", "", ""])

        # slits
        logs["names"].extend([
            "slit_i_upper_blade_position", "slit_i_lower_blade_position",
            "slit_i_left_blade_position", "slit_i_right_blade_position"
        ])
        logs["values"].extend([
            metadata.slit_i_upper_blade_position,
            metadata.slit_i_lower_blade_position,
            metadata.slit_i_left_blade_position,
            metadata.slit_i_right_blade_position
        ])
        logs["units"].extend(["mm", "mm", "mm", "mm"])

        # add information whether the data are normalized (duration/monitor/no):
        api.AddSampleLog(outws,
                         LogName='normalized',
                         LogText=norm,
                         LogType='String')
        api.AddSampleLogMultiple(outws,
                                 LogNames=logs["names"],
                                 LogValues=logs["values"],
                                 LogUnits=logs["units"])

        outws.setYUnit(yunit)
        outws.setYUnitLabel(ylabel)

        self.setProperty("OutputWorkspace", outws)
        self.log().debug('LoadDNSLegacy: data are loaded to the workspace ' +
                         outws_name)

        return
Exemple #11
0
def load(filename="",
         load_pulse_times=True,
         instrument_filename=None,
         error_connection=None,
         mantid_args=None):
    """
    Wrapper function to provide a load method for a Nexus file, hiding mantid
    specific code from the scipp interface. All other keyword arguments not
    specified in the parameters below are passed on to the mantid.Load
    function.

    Example of use:

      from scipp.neutron import load
      d = sc.Dataset()
      d["sample"] = load(filename='PG3_4844_event.nxs', \
                         load_pulse_times=True, \
                         mantid_args={'BankName': 'bank184'})

    See also the neutron-data tutorial.

    Note that this function requires mantid to be installed and available in
    the same Python environment as scipp.

    :param str filename: The name of the Nexus/HDF file to be loaded.
    :param bool load_pulse_times: Read the pulse times if True.
    :param str instrument_filename: If specified, over-write the instrument
                                    definition in the final Dataset with the
                                    geometry contained in the file.
    :raises: If the Mantid workspace type returned by the Mantid loader is not
             either EventWorkspace or Workspace2D.
    :return: A Dataset containing the neutron event/histogram data and the
             instrument geometry.
    :rtype: Dataset
    """

    try:
        import mantid.simpleapi as mantid
        from mantid.api import Workspace
    except ImportError:
        raise ImportError(
            "Mantid Python API was not found, please install Mantid framework "
            "as detailed in the installation instructions (https://scipp."
            "readthedocs.io/en/latest/getting-started/installation.html)")

    if mantid_args is None:
        mantid_args = {}

    loaded = mantid.Load(filename, StoreInADS=False, **mantid_args)

    # Determine what Load has provided us
    if isinstance(loaded, Workspace):
        # A single workspace
        data_ws = loaded
        monitor_ws = None
    else:
        # Seperate data and monitor workspaces
        data_ws = loaded.OutputWorkspace
        monitor_ws = loaded.MonitorWorkspace

    if instrument_filename is not None:
        mantid.LoadInstrument(data_ws,
                              FileName=instrument_filename,
                              RewriteSpectraMap=True)

    dataset = None
    if data_ws.id() == 'Workspace2D':
        has_monitors = False
        for spec in data_ws.spectrumInfo():
            has_monitors |= spec.isMonitor
            if has_monitors:
                break
        if has_monitors:
            data_ws, monitor_ws = mantid.ExtractMonitors(data_ws,
                                                         StoreInADS=False)
        dataset = convert_Workspace2D_to_dataarray(data_ws)
    elif data_ws.id() == 'EventWorkspace':
        dataset = convertEventWorkspace_to_dataarray(data_ws, load_pulse_times)
    elif data_ws.id() == 'TableWorkspace':
        dataset = convert_TableWorkspace_to_dataset(data_ws, error_connection)

    if dataset is None:
        raise RuntimeError('Unsupported workspace type')
    elif monitor_ws is not None:
        if monitor_ws.id() == 'Workspace2D':
            dataset.attrs["monitors"] = sc.Variable(
                value=convert_Workspace2D_to_dataarray(monitor_ws))
        elif monitor_ws.id() == 'EventWorkspace':
            dataset.attrs["monitors"] = sc.Variable(
                value=convertEventWorkspace_to_dataarray(
                    monitor_ws, load_pulse_times))

    return dataset
Exemple #12
0
    def PyExec(self):
        # Input
        filename = self.getPropertyValue("Filename")
        outws_name = self.getPropertyValue("OutputWorkspace")
        norm = self.getPropertyValue("Normalization")

        # load data array from the given file
        data_array = np.loadtxt(filename)
        if not data_array.size:
            message = "File " + filename + " does not contain any data!"
            self.log().error(message)
            raise RuntimeError(message)

        # load run information
        metadata = DNSdata()
        try:
            metadata.read_legacy(filename)
        except RuntimeError as err:
            message = "Error of loading of file " + filename + ": " + str(err)
            self.log().error(message)
            raise RuntimeError(message)

        # load polarisation table and determine polarisation
        poltable = self.get_polarisation_table()
        pol = self.get_polarisation(metadata, poltable)
        if not pol:
            pol = ['0', 'undefined']
            self.log().warning("Failed to determine polarisation for " + filename +
                               ". Values have been set to undefined.")
        ndet = 24
        # this needed to be able to use ConvertToMD
        dataX = np.zeros(2*ndet)
        dataX.fill(metadata.wavelength + 0.00001)
        dataX[::2] -= 0.000002
        # data normalization
        factor = 1.0
        yunit = "Counts"
        ylabel = "Intensity"
        if norm == 'duration':
            factor = metadata.duration
            yunit = "Counts/s"
            ylabel = "Intensity normalized to duration"
            if factor <= 0:
                raise RuntimeError("Duration is invalid for file " + filename + ". Cannot normalize.")
        if norm == 'monitor':
            factor = metadata.monitor_counts
            yunit = "Counts/monitor"
            ylabel = "Intensity normalized to monitor"
            if factor <= 0:
                raise RuntimeError("Monitor counts are invalid for file " + filename + ". Cannot normalize.")
        # set values for dataY and dataE
        dataY = data_array[0:ndet, 1:]/factor
        dataE = np.sqrt(data_array[0:ndet, 1:])/factor
        # create workspace
        api.CreateWorkspace(OutputWorkspace=outws_name, DataX=dataX, DataY=dataY,
                            DataE=dataE, NSpec=ndet, UnitX="Wavelength")
        outws = api.AnalysisDataService.retrieve(outws_name)
        api.LoadInstrument(outws, InstrumentName='DNS', RewriteSpectraMap=True)

        run = outws.mutableRun()
        if metadata.start_time and metadata.end_time:
            run.setStartAndEndTime(DateAndTime(metadata.start_time),
                                   DateAndTime(metadata.end_time))
        # add name of file as a run title
        fname = os.path.splitext(os.path.split(filename)[1])[0]
        run.addProperty('run_title', fname, True)

        # rotate the detector bank to the proper position
        api.RotateInstrumentComponent(outws, "bank0", X=0, Y=1, Z=0, Angle=metadata.deterota)
        # add sample log Ei and wavelength
        api.AddSampleLog(outws, LogName='Ei', LogText=str(metadata.incident_energy),
                         LogType='Number', LogUnit='meV')
        api.AddSampleLog(outws, LogName='wavelength', LogText=str(metadata.wavelength),
                         LogType='Number', LogUnit='Angstrom')
        # add other sample logs
        api.AddSampleLog(outws, LogName='deterota', LogText=str(metadata.deterota),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, 'mon_sum',
                         LogText=str(float(metadata.monitor_counts)), LogType='Number')
        api.AddSampleLog(outws, LogName='duration', LogText=str(metadata.duration),
                         LogType='Number', LogUnit='Seconds')
        api.AddSampleLog(outws, LogName='huber', LogText=str(metadata.huber),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, LogName='omega', LogText=str(metadata.huber - metadata.deterota),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, LogName='T1', LogText=str(metadata.temp1),
                         LogType='Number', LogUnit='K')
        api.AddSampleLog(outws, LogName='T2', LogText=str(metadata.temp2),
                         LogType='Number', LogUnit='K')
        api.AddSampleLog(outws, LogName='Tsp', LogText=str(metadata.tsp),
                         LogType='Number', LogUnit='K')
        # flipper
        api.AddSampleLog(outws, LogName='flipper_precession',
                         LogText=str(metadata.flipper_precession_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='flipper_z_compensation',
                         LogText=str(metadata.flipper_z_compensation_current),
                         LogType='Number', LogUnit='A')
        flipper_status = 'OFF'    # flipper OFF
        if abs(metadata.flipper_precession_current) > sys.float_info.epsilon:
            flipper_status = 'ON'    # flipper ON
        api.AddSampleLog(outws, LogName='flipper',
                         LogText=flipper_status, LogType='String')
        # coil currents
        api.AddSampleLog(outws, LogName='C_a', LogText=str(metadata.a_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_b', LogText=str(metadata.b_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_c', LogText=str(metadata.c_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_z', LogText=str(metadata.z_coil_current),
                         LogType='Number', LogUnit='A')
        # type of polarisation
        api.AddSampleLog(outws, 'polarisation', LogText=pol[0], LogType='String')
        api.AddSampleLog(outws, 'polarisation_comment', LogText=str(pol[1]), LogType='String')
        # slits
        api.AddSampleLog(outws, LogName='slit_i_upper_blade_position',
                         LogText=str(metadata.slit_i_upper_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, LogName='slit_i_lower_blade_position',
                         LogText=str(metadata.slit_i_lower_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, LogName='slit_i_left_blade_position',
                         LogText=str(metadata.slit_i_left_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, 'slit_i_right_blade_position',
                         LogText=str(metadata.slit_i_right_blade_position),
                         LogType='Number', LogUnit='mm')
        # data normalization

        # add information whether the data are normalized (duration/monitor/no):
        api.AddSampleLog(outws, LogName='normalized', LogText=norm, LogType='String')

        outws.setYUnit(yunit)
        outws.setYUnitLabel(ylabel)

        self.setProperty("OutputWorkspace", outws)
        self.log().debug('LoadDNSLegacy: data are loaded to the workspace ' + outws_name)

        return
Exemple #13
0
def to_mantid(data, dim, instrument_file=None):
    """
    Convert data to a Mantid workspace.

    The Mantid layout expect the spectra to be the Outer-most dimension,
    i.e. y.shape[0]. If that is not the case you might have to transpose
    your data to fit that, otherwise it will not be aligned correctly in the
    Mantid workspace.

    :param data: Data to be converted.
    :param dim: Coord to use for Mantid's first axis (X).
    :param instrument_file: Instrument file that will be
                            loaded into the workspace
    :returns: Workspace containing converted data. The concrete workspace type
              may differ depending on the content of `data`.
    """
    if not is_data_array(data):
        raise RuntimeError(
            "Currently only data arrays can be converted to a Mantid workspace"
        )
    if data.data is None or contains_events(data):
        raise RuntimeError(
            "Currently only histogrammed data can be converted.")
    try:
        import mantid.simpleapi as mantid
    except ImportError:
        raise ImportError(
            "Mantid Python API was not found, please install Mantid framework "
            "as detailed in the installation instructions (https://scipp."
            "github.io/getting-started/installation.html)")
    x = data.coords[dim].values
    y = data.values
    e = data.variances

    assert (len(y.shape) == 2 or len(y.shape) == 1), \
        "Currently can only handle 2D data."

    e = np.sqrt(e) if e is not None else np.sqrt(y)

    # Convert a single array (e.g. single spectra) into 2d format
    if len(y.shape) == 1:
        y = np.array([y])

    if len(e.shape) == 1:
        e = np.array([e])

    unitX = validate_dim_and_get_mantid_string(dim)

    nspec = y.shape[0]
    if len(x.shape) == 1:
        # SCIPP is using a  1:n spectra coord mapping, Mantid needs
        # a 1:1 mapping so expand this out
        x = np.broadcast_to(x, shape=(nspec, len(x)))

    nbins = x.shape[1]
    nitems = y.shape[1]

    ws = mantid.WorkspaceFactory.create("Workspace2D",
                                        NVectors=nspec,
                                        XLength=nbins,
                                        YLength=nitems)
    if data.unit != sc.units.counts:
        ws.setDistribution(True)

    for i in range(nspec):
        ws.setX(i, x[i])
        ws.setY(i, y[i])
        ws.setE(i, e[i])

    # Set X-Axis unit
    ws.getAxis(0).setUnit(unitX)

    if instrument_file is not None:
        mantid.LoadInstrument(ws,
                              FileName=instrument_file,
                              RewriteSpectraMap=True)

    return ws
Exemple #14
0
for i, f in enumerate(frames):

    print("=========================================")
    print("{}/{}".format(i, len(frames)))
    print("=========================================")

    # Create workspace for current frame
    ws_for_this_frame = createWorkspace(data_x=[f.wavelength],
                                        data_y=f.IntensityUp,
                                        data_e=np.sqrt(f.IntensityUp),
                                        n_spec=f.x*f.y,
                                        unit="Wavelength")
    # Register the workspace in the mantid ADS
    mantid.mtd.addOrReplace("ws_for_this_frame", ws_for_this_frame)
    # Load the instrument from the definition file
    mantid.LoadInstrument(ws_for_this_frame, FileName="5C1_Definition.xml",
                          RewriteSpectraMap=True)
    # Rotate the instrument to the current gamma angle (in degrees)
    mantid.RotateInstrumentComponent(ws_for_this_frame, "detector_panel",
                                     X=0, Y=1, Z=0, Angle=f.Gamma,
                                     RelativeRotation=False)
    # Normalise by monitor counts
    normalised = ws_for_this_frame / f.totalmonitorcount
    # Convert to d-spacing
    dspacing = mantid.ConvertUnits(InputWorkspace=normalised,
                                   Target="dSpacing", EMode="Elastic")

    # 1. Naive accumulation of workspace data: this does not seem to work
    rebinned = mantid.Rebin(dspacing, "0.5,0.01,5.0")
    if final is None:
        final = mantid.CloneWorkspace(rebinned)
    else:
# coding: utf-8

import os, numpy as np
from mantid import simpleapi as msa, mtd

workdir = "/SNS/users/lj7/dv/sns-chops/detcalib/SEQ"
os.chdir(workdir)

# ## Compute nominal difc
nxspath = '/SNS/SEQ/IPTS-19573/nexus/SEQ_130249.nxs.h5'
ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0, FilterByTimeStop=1)
msa.LoadInstrument(ws,
                   Filename='./SEQUOIA_Definition_guessshortpacks.xml',
                   RewriteSpectraMap=False)
difc = msa.CalculateDIFC(InputWorkspace=ws)
difc = difc.extractY().flatten().copy()
msa.DeleteWorkspace('difc')

# get det ID list
detIDs = []
for i in range(ws.getNumberHistograms()):
    sp = ws.getSpectrum(i)
    dets = list(sp.getDetectorIDs())
    assert len(dets) == 1
    detIDs.append(dets[0])
    continue
for i in range(len(detIDs) - 1):
    assert detIDs[i] < detIDs[i + 1]


# # Get pack index
Exemple #16
0
packtype = 'eightpack'
x_path = 'C60-C26T-I_d-x.npy'
y_path = 'C60-I_d-y-B24.npy'

d_spacing_max_mismatch = 0.2  # maximum fractional mismatch of d spacing values allowed.
d_spacing_peak_width = 0.1  # fractional width of d spacing peak.
maxchisq = 3.  # if chisq>maxchisq, mask this pixel
min_counts = 2000  # if total couts of the peak < min_counts, don't count this peak

# Outputs
difc_outpath = "C60-difc-2-B24.npy"
difc_mask_outpath = 'C60-difc-2-B24-mask.npy'

# ## Compute nominal difc
ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0, FilterByTimeStop=1)
msa.LoadInstrument(ws, Filename=initial_idf, RewriteSpectraMap=False)
difc = msa.CalculateDIFC(InputWorkspace=ws)
difc = difc.extractY().flatten().copy()
msa.DeleteWorkspace('difc')

# # Get pack pixel IDs
instrument = ws.getInstrument()
pack = instrument.getComponentByName("%s/%s" % (packname, packtype))
firstpixel = pack[0][0].getID()
lasttube = pack[pack.nelements() - 1]
lastpixel = lasttube[lasttube.nelements() - 1]
lastpixel = lastpixel.getID()

# Get detID list
detIDs = []
for i in range(ws.getNumberHistograms()):
def compare(
        pack="C25B/eightpack-bottom",
        nxspath="/SNS/SEQ/IPTS-19573/nexus/SEQ_130249.nxs.h5",  #C60
        detIDs_npy='../C60-I_d/detIDs.npy',
        newIDF='./SEQUOIA_Definition.xml',
        dmin=2,
        dmax=11,
        dd=0.01,
        dvalues=None,
        tmin=0,
        tmax=2000):
    orig_ws = msa.LoadEventNexus(Filename=nxspath,
                                 FilterByTimeStart=tmin,
                                 FilterByTimeStop=tmax)

    ws = orig_ws
    instrument = ws.getInstrument()
    packnameandtype = pack
    packname, packtype = pack.split('/')
    pack = instrument.getComponentByName(packnameandtype)
    firstpixel = pack[0][0].getID()
    lasttube = pack[pack.nelements() - 1]
    lastpixel = lasttube[lasttube.nelements() - 1]
    lastpixel = lastpixel.getID()
    print "first and last pixel IDs:", firstpixel, lastpixel
    #
    #
    detIDs = list(np.load(detIDs_npy))
    startindex = detIDs.index(firstpixel)
    endindex = detIDs.index(lastpixel)
    print "first and last pixel indexes:", startindex, endindex
    del ws

    # # Old I(d)
    daxis = "%s,%s,%s" % (dmin, dd, dmax)
    I_d_0 = msa.ConvertUnits(InputWorkspace=orig_ws,
                             Target='dSpacing',
                             EMode='Elastic')
    I_d_0 = msa.Rebin(InputWorkspace=I_d_0, Params=daxis)
    pack_I_d_0 = msa.SumSpectra(InputWorkspace=I_d_0,
                                StartWorkspaceIndex=startindex,
                                EndWorkspaceIndex=endindex)
    xbb0 = pack_I_d_0.readX(0)
    y0 = pack_I_d_0.readY(0).copy()
    x0 = (xbb0[1:] + xbb0[:-1]) / 2
    msa.DeleteWorkspace(I_d_0)
    msa.DeleteWorkspace(pack_I_d_0)

    # # New I(d)
    msa.LoadInstrument(orig_ws, Filename=newIDF, RewriteSpectraMap=False)
    I_d_1 = msa.ConvertUnits(InputWorkspace=orig_ws,
                             Target='dSpacing',
                             EMode='Elastic')
    I_d_1 = msa.Rebin(InputWorkspace=I_d_1, Params=daxis)
    pack_I_d_1 = msa.SumSpectra(InputWorkspace=I_d_1,
                                StartWorkspaceIndex=startindex,
                                EndWorkspaceIndex=endindex)
    xbb1 = pack_I_d_1.readX(0)
    y1 = pack_I_d_1.readY(0).copy()
    x1 = (xbb1[1:] + xbb1[:-1]) / 2
    msa.DeleteWorkspace(I_d_1)
    msa.DeleteWorkspace(pack_I_d_1)
    msa.DeleteWorkspace(orig_ws)

    data = [x0, y0, x1, y1]
    np.save("%s-I_d.npy" % packname, data)
    plt.figure(figsize=(7, 4))
    plt.title("Pack %s" % packname)
    plt.plot(x0, y0, label='original')
    plt.plot(x1, y1, label='after loading new xml')
    for d in dvalues:
        plt.axvline(x=d, linewidth=1, color='k')
    # plt.xlim(3,3.3)
    plt.legend(loc='upper left')
    outpng = '%s-I_d.png' % packname
    plt.savefig(outpng)
    return
Exemple #18
0
def get_I_d(nxs_files,
            init_IDF,
            outdir,
            packs,
            dt=1000.,
            d_axis=(2., 11., 0.02),
            Npixels_per_pack=1024):
    """nxs_files: paths of calibration nxs files
    init_IDF: initial IDF path
    outdir: output directory
    packs: list of pack names, e.g. C26B/eightpack-bottom
    dt: time step for loading files. too large will need too much memory
    d_axis: dmin, dmax, delta_d. e.g. 2., 11., 0.02
    Npixels_per_pack: number of pixels per pack

    Output files:
    * difc-nominal.npy
    * detIDs.npy
    * I_d-xbb.npy
    * I_d-y-PACKNAME.npy
    * pack-PACKNAME.yaml

    NOTE:
    * Assumed that the difc array from CalculateDIFC is ordered according to the "spectrrum list" in
      the mantid workspace. See function getDetIDs
    * Different combinations of nxs_files, init_IDF, d_axis should use different outdirs
    """
    if not os.path.exists(outdir): os.makedirs(outdir)
    # ## Compute nominal difc using first file in the list
    nxspath = nxs_files[0]
    ws = msa.LoadEventNexus(nxspath, FilterByTimeStart=0,
                            FilterByTimeStop=1)  # load just one second
    #
    msa.LoadInstrument(ws, Filename=init_IDF, RewriteSpectraMap=False)
    import shutil
    shutil.copyfile(init_IDF, os.path.join(outdir, 'init_IDF.xml'))
    #
    difc = msa.CalculateDIFC(InputWorkspace=ws)
    difc = difc.extractY().flatten().copy()
    msa.DeleteWorkspace('difc')
    np.save(os.path.join(outdir, 'difc-nominal.npy'), difc)
    # IDs of all pixels
    detIDs = getDetIDs(ws)
    np.save(os.path.join(outdir, 'detIDs.npy'), detIDs)
    #
    # map pack name to (start_pixelID, stop_pixelID)
    pack2pixelID_start_stop = dict()
    for name in packs:
        pack2pixelID_start_stop[name] = getFirstLastPixelIDs(ws, name)
        continue
    # clean up
    msa.DeleteWorkspace('ws')

    runtimes = dict()
    for f in nxs_files:
        runtimes[f] = getRunTime(f)
    print "* run times:", runtimes

    dmin, dmax, delta_d = d_axis
    Nd = int((dmax - dmin) / delta_d)
    print "* Number of d bins:", Nd

    #
    Npacks = len(packs)

    y_matrix = np.zeros((Npacks, Npixels_per_pack, Nd))
    xbb_saved = None
    for nxsfile in nxs_files:
        print "* Working on", nxsfile
        t_total = runtimes[nxsfile]
        for tstart in np.arange(0, t_total - dt, dt):
            print "* tstart", tstart
            tend = min(t_total - 1, tstart + dt)
            ws = msa.LoadEventNexus(nxsfile,
                                    FilterByTimeStart=tstart,
                                    FilterByTimeStop=tend)
            msa.LoadInstrument(ws, Filename=init_IDF, RewriteSpectraMap=False)
            I_d = msa.ConvertUnits(InputWorkspace=ws,
                                   Target='dSpacing',
                                   EMode='Elastic')
            I_d = msa.Rebin(InputWorkspace=I_d,
                            Params='%s,%s,%s' % (dmin, delta_d, dmax))

            # loop over packs
            for ipack, packname in enumerate(packs):
                firstpixel, lastpixel = pack2pixelID_start_stop[packname]
                startindex = detIDs.index(firstpixel)
                endindex = detIDs.index(lastpixel)
                print "array indexes of first and last pixel", startindex, endindex

                y_pack = y_matrix[ipack]
                # loop over pixels in the pack
                for i, pixelindex in enumerate(range(startindex,
                                                     endindex + 1)):
                    I_d_pixel = msa.SumSpectra(InputWorkspace=I_d,
                                               StartWorkspaceIndex=pixelindex,
                                               EndWorkspaceIndex=pixelindex)
                    xbb = I_d_pixel.readX(0)
                    if xbb_saved is None: xbb_saved = np.array(xbb, copy=True)
                    y = I_d_pixel.readY(0)
                    y_pack[i] += y
                    msa.DeleteWorkspace('I_d_pixel')
                    continue
                continue

            msa.DeleteWorkspaces(['ws', 'I_d'])
            continue
        continue

    xbb = np.arange(dmin, dmax + delta_d / 2., delta_d)
    np.save(os.path.join(outdir, "I_d-xbb.npy"), xbb)
    # for debugging
    np.save(os.path.join(outdir, "I_d-y_matrix.npy"), y_matrix)

    for ipack, packname in enumerate(packs):
        y_pack = y_matrix[ipack]
        packname1 = packname.split('/')[0]  # "C25T"
        # save y values of I(d) for the pack
        np.save(os.path.join(outdir, "I_d-y-%s.npy" % packname1), y_pack)
        # save pack info
        first, last = pack2pixelID_start_stop[packname]
        pixelIDs = dict(first=first, last=last)
        pack_info = dict(pixelIDs=pixelIDs)
        dumpYaml(pack_info, os.path.join(outdir, 'pack-%s.yaml' % packname1))
        continue
    return