Exemplo n.º 1
0
    def PyExec(self):
        # Input
        filename = self.getPropertyValue("Filename")
        outws_name = self.getPropertyValue("OutputWorkspace")
        norm = self.getPropertyValue("Normalization")
        wavelength = self.getProperty("Wavelength").value

        # load data array from the given file
        data_array = np.loadtxt(filename)
        if not data_array.size:
            message = "File " + filename + " does not contain any data!"
            self.log().error(message)
            raise RuntimeError(message)
        # sample logs
        logs = {"names": [], "values": [], "units": []}

        # load run information
        metadata = DNSdata()
        try:
            metadata.read_legacy(filename)
        except RuntimeError as err:
            message = "Error of loading of file " + filename + ": " + str(err)
            self.log().error(message)
            raise RuntimeError(message)

        if wavelength < 0.01:
            wavelength = metadata.wavelength

        # calculate incident energy, since given in the data file is wrong
        velocity = h/(m_n*wavelength*1e-10)   # m/s
        incident_energy = 0.5e+03*m_n*velocity*velocity/physical_constants['electron volt'][0]  # meV

        tmp = api.LoadEmptyInstrument(InstrumentName='DNS')
        self.instrument = tmp.getInstrument()
        api.DeleteWorkspace(tmp)

        # load polarisation table and determine polarisation
        poltable = self.get_polarisation_table()
        pol = self.get_polarisation(metadata, poltable)
        if not pol:
            pol = ['0', 'undefined']
            self.log().warning("Failed to determine polarisation for " + filename +
                               ". Values have been set to undefined.")
        ndet = 24
        unitX="Wavelength"
        arr = data_array[0:ndet, 1:]
        if metadata.tof_channel_number < 2:
            dataX = np.zeros(2*ndet)
            dataX.fill(wavelength + 0.00001)
            dataX[::2] -= 0.000002
        else:
            unitX="TOF"

            # get instrument parameters
            l1 = np.linalg.norm(self.instrument.getSample().getPos() - self.instrument.getSource().getPos())
            l2 = float(self.instrument.getStringParameter("l2")[0])
            self.log().notice("L1 = {} m".format(l1))
            self.log().notice("L2 = {} m".format(l2))
            dt_factor = float(self.instrument.getStringParameter("channel_width_factor")[0])

            # channel width
            dt = metadata.tof_channel_width*dt_factor
            # calculate tof1
            tof1 = 1e+06*l1/velocity        # microseconds
            self.log().debug("TOF1 = {} microseconds".format(tof1))
            self.log().debug("Delay time = {} microsecond".format(metadata.tof_delay_time))
            tof2_elastic = 1e+06*l2/velocity
            self.log().debug("TOF2 Elastic = {} microseconds".format(tof2_elastic))
            epp_geom = int(tof2_elastic/dt)

            epp_user = self.getProperty("ElasticChannel").value

            # for comissioning period EPP in the data file is not relevant
            in_comissioning = self.instrument.getStringParameter("tof_comissioning")[0]
            if (epp_user < 1) and (in_comissioning == 'no') and  metadata.tof_elastic_channel:
                epp_user = metadata.tof_elastic_channel

            # shift channels to keep elastic in the right position
            # required, since zero time channel is not calibrated
            if epp_user > 0:
                arr = np.roll(arr, epp_geom - epp_user, 1)

            # create dataX array
            x0 = tof1 + metadata.tof_delay_time
            dataX = np.linspace(x0, x0+metadata.tof_channel_number*dt, metadata.tof_channel_number+1)

            # sample logs
            logs["names"].extend(["channel_width", "TOF1", "delay_time", "tof_channels"])
            logs["values"].extend([dt, tof1, metadata.tof_delay_time, metadata.tof_channel_number])
            logs["units"].extend(["microseconds", "microseconds", "microseconds", ""])
            if epp_user:
                logs["names"].append("EPP")
                logs["values"].append(epp_user)
                logs["units"].append("")
            if metadata.chopper_rotation_speed:
                logs["names"].append("chopper_speed")
                logs["values"].append(metadata.chopper_rotation_speed)
                logs["units"].append("Hz")
            if metadata.chopper_slits:
                logs["names"].append("chopper_slits")
                logs["values"].append(metadata.chopper_slits)
                logs["units"].append("")

        # data normalization
        factor = 1.0
        yunit = "Counts"
        ylabel = "Intensity"
        if norm == 'duration':
            factor = metadata.duration
            yunit = "Counts/s"
            ylabel = "Intensity normalized to duration"
            if factor <= 0:
                raise RuntimeError("Duration is invalid for file " + filename + ". Cannot normalize.")
        if norm == 'monitor':
            factor = metadata.monitor_counts
            yunit = "Counts/monitor"
            ylabel = "Intensity normalized to monitor"
            if factor <= 0:
                raise RuntimeError("Monitor counts are invalid for file " + filename + ". Cannot normalize.")
        # set values for dataY and dataE
        dataY = arr/factor
        dataE = np.sqrt(arr)/factor

        # create workspace
        api.CreateWorkspace(OutputWorkspace=outws_name, DataX=dataX, DataY=dataY,
                            DataE=dataE, NSpec=ndet, UnitX=unitX)
        outws = api.AnalysisDataService.retrieve(outws_name)
        api.LoadInstrument(outws, InstrumentName='DNS', RewriteSpectraMap=True)

        run = outws.mutableRun()
        if metadata.start_time and metadata.end_time:
            run.setStartAndEndTime(DateAndTime(metadata.start_time),
                                   DateAndTime(metadata.end_time))
        # add name of file as a run title
        fname = os.path.splitext(os.path.split(filename)[1])[0]
        run.addProperty('run_title', fname, True)

        # rotate the detector bank to the proper position
        api.RotateInstrumentComponent(outws, "bank0", X=0, Y=1, Z=0, Angle=metadata.deterota)
        # add sample log Ei and wavelength
        logs["names"].extend(["Ei", "wavelength"])
        logs["values"].extend([incident_energy, wavelength])
        logs["units"].extend(["meV", "Angstrom"])

        # add other sample logs
        logs["names"].extend(["deterota", "mon_sum", "duration", "huber", "omega", "T1", "T2", "Tsp"])
        logs["values"].extend([metadata.deterota, float(metadata.monitor_counts), metadata.duration,
                               metadata.huber, metadata.huber - metadata.deterota,
                               metadata.temp1, metadata.temp2, metadata.tsp])
        logs["units"].extend(["Degrees", "Counts", "Seconds", "Degrees", "Degrees", "K", "K", "K"])

        # flipper, coil currents and polarisation
        flipper_status = 'OFF'    # flipper OFF
        if abs(metadata.flipper_precession_current) > sys.float_info.epsilon:
            flipper_status = 'ON'    # flipper ON
        logs["names"].extend(["flipper_precession", "flipper_z_compensation", "flipper",
                              "C_a", "C_b", "C_c", "C_z", "polarisation", "polarisation_comment"])
        logs["values"].extend([metadata.flipper_precession_current,
                               metadata.flipper_z_compensation_current, flipper_status,
                               metadata.a_coil_current, metadata.b_coil_current,
                               metadata.c_coil_current, metadata.z_coil_current,
                               str(pol[0]), str(pol[1])])
        logs["units"].extend(["A", "A", "", "A", "A", "A", "A", "", ""])

        # slits
        logs["names"].extend(["slit_i_upper_blade_position", "slit_i_lower_blade_position",
                              "slit_i_left_blade_position", "slit_i_right_blade_position"])
        logs["values"].extend([metadata.slit_i_upper_blade_position, metadata.slit_i_lower_blade_position,
                               metadata.slit_i_left_blade_position, metadata.slit_i_right_blade_position])
        logs["units"].extend(["mm", "mm", "mm", "mm"])

        # add information whether the data are normalized (duration/monitor/no):
        api.AddSampleLog(outws, LogName='normalized', LogText=norm, LogType='String')
        api.AddSampleLogMultiple(outws, LogNames=logs["names"], LogValues=logs["values"], LogUnits=logs["units"])

        outws.setYUnit(yunit)
        outws.setYUnitLabel(ylabel)

        self.setProperty("OutputWorkspace", outws)
        self.log().debug('LoadDNSLegacy: data are loaded to the workspace ' + outws_name)

        return
Exemplo n.º 2
0
    def PyExec(self):
        # Input
        filename = self.getPropertyValue("Filename")
        outws_name = self.getPropertyValue("OutputWorkspace")
        norm = self.getPropertyValue("Normalization")

        # load data array from the given file
        data_array = np.loadtxt(filename)
        if not data_array.size:
            message = "File " + filename + " does not contain any data!"
            self.log().error(message)
            raise RuntimeError(message)

        # load run information
        metadata = DNSdata()
        try:
            metadata.read_legacy(filename)
        except RuntimeError as err:
            message = "Error of loading of file " + filename + ": " + str(err)
            self.log().error(message)
            raise RuntimeError(message)

        # load polarisation table and determine polarisation
        poltable = self.get_polarisation_table()
        pol = self.get_polarisation(metadata, poltable)
        if not pol:
            pol = ['0', 'undefined']
            self.log().warning("Failed to determine polarisation for " + filename +
                               ". Values have been set to undefined.")
        ndet = 24
        # this needed to be able to use ConvertToMD
        dataX = np.zeros(2*ndet)
        dataX.fill(metadata.wavelength + 0.00001)
        dataX[::2] -= 0.000002
        # data normalization
        factor = 1.0
        yunit = "Counts"
        ylabel = "Intensity"
        if norm == 'duration':
            factor = metadata.duration
            yunit = "Counts/s"
            ylabel = "Intensity normalized to duration"
            if factor <= 0:
                raise RuntimeError("Duration is invalid for file " + filename + ". Cannot normalize.")
        if norm == 'monitor':
            factor = metadata.monitor_counts
            yunit = "Counts/monitor"
            ylabel = "Intensity normalized to monitor"
            if factor <= 0:
                raise RuntimeError("Monitor counts are invalid for file " + filename + ". Cannot normalize.")
        # set values for dataY and dataE
        dataY = data_array[0:ndet, 1:]/factor
        dataE = np.sqrt(data_array[0:ndet, 1:])/factor
        # create workspace
        api.CreateWorkspace(OutputWorkspace=outws_name, DataX=dataX, DataY=dataY,
                            DataE=dataE, NSpec=ndet, UnitX="Wavelength")
        outws = api.AnalysisDataService.retrieve(outws_name)
        api.LoadInstrument(outws, InstrumentName='DNS', RewriteSpectraMap=True)

        run = outws.mutableRun()
        if metadata.start_time and metadata.end_time:
            run.setStartAndEndTime(DateAndTime(metadata.start_time),
                                   DateAndTime(metadata.end_time))
        # add name of file as a run title
        fname = os.path.splitext(os.path.split(filename)[1])[0]
        run.addProperty('run_title', fname, True)

        # rotate the detector bank to the proper position
        api.RotateInstrumentComponent(outws, "bank0", X=0, Y=1, Z=0, Angle=metadata.deterota)
        # add sample log Ei and wavelength
        api.AddSampleLog(outws, LogName='Ei', LogText=str(metadata.incident_energy),
                         LogType='Number', LogUnit='meV')
        api.AddSampleLog(outws, LogName='wavelength', LogText=str(metadata.wavelength),
                         LogType='Number', LogUnit='Angstrom')
        # add other sample logs
        api.AddSampleLog(outws, LogName='deterota', LogText=str(metadata.deterota),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, 'mon_sum',
                         LogText=str(float(metadata.monitor_counts)), LogType='Number')
        api.AddSampleLog(outws, LogName='duration', LogText=str(metadata.duration),
                         LogType='Number', LogUnit='Seconds')
        api.AddSampleLog(outws, LogName='huber', LogText=str(metadata.huber),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, LogName='omega', LogText=str(metadata.huber - metadata.deterota),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, LogName='T1', LogText=str(metadata.temp1),
                         LogType='Number', LogUnit='K')
        api.AddSampleLog(outws, LogName='T2', LogText=str(metadata.temp2),
                         LogType='Number', LogUnit='K')
        api.AddSampleLog(outws, LogName='Tsp', LogText=str(metadata.tsp),
                         LogType='Number', LogUnit='K')
        # flipper
        api.AddSampleLog(outws, LogName='flipper_precession',
                         LogText=str(metadata.flipper_precession_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='flipper_z_compensation',
                         LogText=str(metadata.flipper_z_compensation_current),
                         LogType='Number', LogUnit='A')
        flipper_status = 'OFF'    # flipper OFF
        if abs(metadata.flipper_precession_current) > sys.float_info.epsilon:
            flipper_status = 'ON'    # flipper ON
        api.AddSampleLog(outws, LogName='flipper',
                         LogText=flipper_status, LogType='String')
        # coil currents
        api.AddSampleLog(outws, LogName='C_a', LogText=str(metadata.a_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_b', LogText=str(metadata.b_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_c', LogText=str(metadata.c_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_z', LogText=str(metadata.z_coil_current),
                         LogType='Number', LogUnit='A')
        # type of polarisation
        api.AddSampleLog(outws, 'polarisation', LogText=pol[0], LogType='String')
        api.AddSampleLog(outws, 'polarisation_comment', LogText=str(pol[1]), LogType='String')
        # slits
        api.AddSampleLog(outws, LogName='slit_i_upper_blade_position',
                         LogText=str(metadata.slit_i_upper_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, LogName='slit_i_lower_blade_position',
                         LogText=str(metadata.slit_i_lower_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, LogName='slit_i_left_blade_position',
                         LogText=str(metadata.slit_i_left_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, 'slit_i_right_blade_position',
                         LogText=str(metadata.slit_i_right_blade_position),
                         LogType='Number', LogUnit='mm')
        # data normalization

        # add information whether the data are normalized (duration/monitor/no):
        api.AddSampleLog(outws, LogName='normalized', LogText=norm, LogType='String')

        outws.setYUnit(yunit)
        outws.setYUnitLabel(ylabel)

        self.setProperty("OutputWorkspace", outws)
        self.log().debug('LoadDNSLegacy: data are loaded to the workspace ' + outws_name)

        return
Exemplo n.º 3
0
    def PyExec(self):
        # Input
        filename = self.getPropertyValue("Filename")
        outws_name = self.getPropertyValue("OutputWorkspace")
        norm = self.getPropertyValue("Normalization")

        # load data array from the given file
        data_array = np.loadtxt(filename)
        if not data_array.size:
            message = "File " + filename + " does not contain any data!"
            self.log().error(message)
            raise RuntimeError(message)
        # sample logs
        logs = {"names": [], "values": [], "units": []}

        # load run information
        metadata = DNSdata()
        try:
            metadata.read_legacy(filename)
        except RuntimeError as err:
            message = "Error of loading of file " + filename + ": " + str(err)
            self.log().error(message)
            raise RuntimeError(message)

        tmp = api.LoadEmptyInstrument(InstrumentName='DNS')
        self.instrument = tmp.getInstrument()
        api.DeleteWorkspace(tmp)

        # load polarisation table and determine polarisation
        poltable = self.get_polarisation_table()
        pol = self.get_polarisation(metadata, poltable)
        if not pol:
            pol = ['0', 'undefined']
            self.log().warning("Failed to determine polarisation for " +
                               filename +
                               ". Values have been set to undefined.")
        ndet = 24
        unitX = "Wavelength"
        if metadata.tof_channel_number < 2:
            dataX = np.zeros(2 * ndet)
            dataX.fill(metadata.wavelength + 0.00001)
            dataX[::2] -= 0.000002
        else:
            unitX = "TOF"

            # get instrument parameters
            l1 = np.linalg.norm(self.instrument.getSample().getPos() -
                                self.instrument.getSource().getPos())
            self.log().notice("L1 = {} m".format(l1))
            dt_factor = float(
                self.instrument.getStringParameter("channel_width_factor")[0])

            # channel width
            dt = metadata.tof_channel_width * dt_factor
            # calculate tof1
            velocity = h / (m_n * metadata.wavelength * 1e-10)  # m/s
            tof1 = 1e+06 * l1 / velocity  # microseconds
            self.log().debug("TOF1 = {} microseconds".format(tof1))
            self.log().debug("Delay time = {} microsecond".format(
                metadata.tof_delay_time))
            # create dataX array
            x0 = tof1 + metadata.tof_delay_time
            self.log().debug("TOF1 = {} microseconds".format(tof1))
            dataX = np.linspace(x0, x0 + metadata.tof_channel_number * dt,
                                metadata.tof_channel_number + 1)

            # sample logs
            logs["names"].extend(
                ["channel_width", "TOF1", "delay_time", "tof_channels"])
            logs["values"].extend([
                dt, tof1, metadata.tof_delay_time, metadata.tof_channel_number
            ])
            logs["units"].extend(
                ["microseconds", "microseconds", "microseconds", ""])
            if metadata.tof_elastic_channel:
                logs["names"].append("EPP")
                logs["values"].append(metadata.tof_elastic_channel)
                logs["units"].append("")
            if metadata.chopper_rotation_speed:
                logs["names"].append("chopper_speed")
                logs["values"].append(metadata.chopper_rotation_speed)
                logs["units"].append("Hz")
            if metadata.chopper_slits:
                logs["names"].append("chopper_slits")
                logs["values"].append(metadata.chopper_slits)
                logs["units"].append("")

        # data normalization
        factor = 1.0
        yunit = "Counts"
        ylabel = "Intensity"
        if norm == 'duration':
            factor = metadata.duration
            yunit = "Counts/s"
            ylabel = "Intensity normalized to duration"
            if factor <= 0:
                raise RuntimeError("Duration is invalid for file " + filename +
                                   ". Cannot normalize.")
        if norm == 'monitor':
            factor = metadata.monitor_counts
            yunit = "Counts/monitor"
            ylabel = "Intensity normalized to monitor"
            if factor <= 0:
                raise RuntimeError("Monitor counts are invalid for file " +
                                   filename + ". Cannot normalize.")
        # set values for dataY and dataE
        dataY = data_array[0:ndet, 1:] / factor
        dataE = np.sqrt(data_array[0:ndet, 1:]) / factor
        # create workspace
        api.CreateWorkspace(OutputWorkspace=outws_name,
                            DataX=dataX,
                            DataY=dataY,
                            DataE=dataE,
                            NSpec=ndet,
                            UnitX=unitX)
        outws = api.AnalysisDataService.retrieve(outws_name)
        api.LoadInstrument(outws, InstrumentName='DNS', RewriteSpectraMap=True)

        run = outws.mutableRun()
        if metadata.start_time and metadata.end_time:
            run.setStartAndEndTime(DateAndTime(metadata.start_time),
                                   DateAndTime(metadata.end_time))
        # add name of file as a run title
        fname = os.path.splitext(os.path.split(filename)[1])[0]
        run.addProperty('run_title', fname, True)

        # rotate the detector bank to the proper position
        api.RotateInstrumentComponent(outws,
                                      "bank0",
                                      X=0,
                                      Y=1,
                                      Z=0,
                                      Angle=metadata.deterota)
        # add sample log Ei and wavelength
        logs["names"].extend(["Ei", "wavelength"])
        logs["values"].extend([metadata.incident_energy, metadata.wavelength])
        logs["units"].extend(["meV", "Angstrom"])

        # add other sample logs
        logs["names"].extend([
            "deterota", "mon_sum", "duration", "huber", "omega", "T1", "T2",
            "Tsp"
        ])
        logs["values"].extend([
            metadata.deterota, metadata.monitor_counts, metadata.duration,
            metadata.huber, metadata.huber - metadata.deterota, metadata.temp1,
            metadata.temp2, metadata.tsp
        ])
        logs["units"].extend([
            "Degrees", "Counts", "Seconds", "Degrees", "Degrees", "K", "K", "K"
        ])

        # flipper, coil currents and polarisation
        flipper_status = 'OFF'  # flipper OFF
        if abs(metadata.flipper_precession_current) > sys.float_info.epsilon:
            flipper_status = 'ON'  # flipper ON
        logs["names"].extend([
            "flipper_precession", "flipper_z_compensation", "flipper", "C_a",
            "C_b", "C_c", "C_z", "polarisation", "polarisation_comment"
        ])
        logs["values"].extend([
            metadata.flipper_precession_current,
            metadata.flipper_z_compensation_current, flipper_status,
            metadata.a_coil_current, metadata.b_coil_current,
            metadata.c_coil_current, metadata.z_coil_current,
            str(pol[0]),
            str(pol[1])
        ])
        logs["units"].extend(["A", "A", "", "A", "A", "A", "A", "", ""])

        # slits
        logs["names"].extend([
            "slit_i_upper_blade_position", "slit_i_lower_blade_position",
            "slit_i_left_blade_position", "slit_i_right_blade_position"
        ])
        logs["values"].extend([
            metadata.slit_i_upper_blade_position,
            metadata.slit_i_lower_blade_position,
            metadata.slit_i_left_blade_position,
            metadata.slit_i_right_blade_position
        ])
        logs["units"].extend(["mm", "mm", "mm", "mm"])

        # add information whether the data are normalized (duration/monitor/no):
        api.AddSampleLog(outws,
                         LogName='normalized',
                         LogText=norm,
                         LogType='String')
        api.AddSampleLogMultiple(outws,
                                 LogNames=logs["names"],
                                 LogValues=logs["values"],
                                 LogUnits=logs["units"])

        outws.setYUnit(yunit)
        outws.setYUnitLabel(ylabel)

        self.setProperty("OutputWorkspace", outws)
        self.log().debug('LoadDNSLegacy: data are loaded to the workspace ' +
                         outws_name)

        return
Exemplo n.º 4
0
    def PyExec(self):
        # Input
        filename = self.getPropertyValue("Filename")
        outws_name = self.getPropertyValue("OutputWorkspace")
        monws_name = outws_name + '_NORM'
        pol = self.getPropertyValue("Polarisation")
        norm = self.getPropertyValue("Normalization")

        # load data array from the given file
        data_array = np.loadtxt(filename)
        if not data_array.size:
            message = "File " + filename + " does not contain any data!"
            self.log().error(message)
            raise RuntimeError(message)

        # load run information
        metadata = DNSdata()
        try:
            metadata.read_legacy(filename)
        except RuntimeError as err:
            message = "Error of loading of file " + filename + ": " + err
            self.log().error(message)
            raise RuntimeError(message)

        ndet = 24
        # this needed to be able to use ConvertToMD
        dataX = np.zeros(2*ndet)
        dataX.fill(metadata.wavelength + 0.00001)
        dataX[::2] -= 0.000002
        dataY = data_array[0:ndet, 1:]
        dataE = np.sqrt(dataY)
        # create workspace
        api.CreateWorkspace(OutputWorkspace=outws_name, DataX=dataX, DataY=dataY,
                            DataE=dataE, NSpec=ndet, UnitX="Wavelength")
        outws = api.mtd[outws_name]
        api.LoadInstrument(outws, InstrumentName='DNS')

        run = outws.mutableRun()
        if metadata.start_time and metadata.end_time:
            run.setStartAndEndTime(DateAndTime(metadata.start_time),
                                   DateAndTime(metadata.end_time))
        # add name of file as a run title
        fname = os.path.splitext(os.path.split(filename)[1])[0]
        run.addProperty('run_title', fname, True)

        # rotate the detector bank to the proper position
        api.RotateInstrumentComponent(outws, "bank0", X=0, Y=1, Z=0, Angle=metadata.deterota)
        # add sample log Ei and wavelength
        api.AddSampleLog(outws, LogName='Ei', LogText=str(metadata.incident_energy),
                         LogType='Number', LogUnit='meV')
        api.AddSampleLog(outws, LogName='wavelength', LogText=str(metadata.wavelength),
                         LogType='Number', LogUnit='Angstrom')
        # add other sample logs
        api.AddSampleLog(outws, LogName='deterota', LogText=str(metadata.deterota),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, 'mon_sum',
                         LogText=str(float(metadata.monitor_counts)), LogType='Number')
        api.AddSampleLog(outws, LogName='duration', LogText=str(metadata.duration),
                         LogType='Number', LogUnit='Seconds')
        api.AddSampleLog(outws, LogName='huber', LogText=str(metadata.huber),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, LogName='omega', LogText=str(metadata.huber - metadata.deterota),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, LogName='T1', LogText=str(metadata.t1),
                         LogType='Number', LogUnit='K')
        api.AddSampleLog(outws, LogName='T2', LogText=str(metadata.t2),
                         LogType='Number', LogUnit='K')
        api.AddSampleLog(outws, LogName='Tsp', LogText=str(metadata.tsp),
                         LogType='Number', LogUnit='K')
        # flipper
        api.AddSampleLog(outws, LogName='flipper_precession',
                         LogText=str(metadata.flipper_precession_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='flipper_z_compensation',
                         LogText=str(metadata.flipper_z_compensation_current),
                         LogType='Number', LogUnit='A')
        flipper_status = 'OFF'    # flipper OFF
        if abs(metadata.flipper_precession_current) > sys.float_info.epsilon:
            flipper_status = 'ON'    # flipper ON
        api.AddSampleLog(outws, LogName='flipper',
                         LogText=flipper_status, LogType='String')
        # coil currents
        api.AddSampleLog(outws, LogName='C_a', LogText=str(metadata.a_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_b', LogText=str(metadata.b_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_c', LogText=str(metadata.c_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_z', LogText=str(metadata.z_coil_current),
                         LogType='Number', LogUnit='A')
        # type of polarisation
        api.AddSampleLog(outws, 'polarisation',
                         LogText=pol, LogType='String')
        # slits
        api.AddSampleLog(outws, LogName='slit_i_upper_blade_position',
                         LogText=str(metadata.slit_i_upper_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, LogName='slit_i_lower_blade_position',
                         LogText=str(metadata.slit_i_lower_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, LogName='slit_i_left_blade_position',
                         LogText=str(metadata.slit_i_left_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, 'slit_i_right_blade_position',
                         LogText=str(metadata.slit_i_right_blade_position),
                         LogType='Number', LogUnit='mm')

        # create workspace with normalization data (monitor or duration)
        if norm == 'duration':
            dataY.fill(metadata.duration)
            dataE.fill(0.001)
        else:
            dataY.fill(metadata.monitor_counts)
            dataE = np.sqrt(dataY)
        api.CreateWorkspace(OutputWorkspace=monws_name, DataX=dataX, DataY=dataY,
                            DataE=dataE, NSpec=ndet, UnitX="Wavelength")
        monws = api.mtd[monws_name]
        api.LoadInstrument(monws, InstrumentName='DNS')
        api.CopyLogs(InputWorkspace=outws_name, OutputWorkspace=monws_name, MergeStrategy='MergeReplaceExisting')

        self.setProperty("OutputWorkspace", outws)
        self.log().debug('LoadDNSLegacy: data are loaded to the workspace ' + outws_name)

        return
Exemplo n.º 5
0
    def PyExec(self):
        # Input
        filename = self.getPropertyValue("Filename")
        outws_name = self.getPropertyValue("OutputWorkspace")
        norm = self.getPropertyValue("Normalization")

        # load data array from the given file
        data_array = np.loadtxt(filename)
        if not data_array.size:
            message = "File " + filename + " does not contain any data!"
            self.log().error(message)
            raise RuntimeError(message)

        # load run information
        metadata = DNSdata()
        try:
            metadata.read_legacy(filename)
        except RuntimeError as err:
            message = "Error of loading of file " + filename + ": " + str(err)
            self.log().error(message)
            raise RuntimeError(message)

        # load polarisation table and determine polarisation
        poltable = self.get_polarisation_table()
        pol = self.get_polarisation(metadata, poltable)
        if not pol:
            pol = ['0', 'undefined']
            self.log().warning("Failed to determine polarisation for " + filename +
                               ". Values have been set to undefined.")
        ndet = 24
        # this needed to be able to use ConvertToMD
        dataX = np.zeros(2*ndet)
        dataX.fill(metadata.wavelength + 0.00001)
        dataX[::2] -= 0.000002
        # data normalization
        factor = 1.0
        yunit = "Counts"
        ylabel = "Intensity"
        if norm == 'duration':
            factor = metadata.duration
            yunit = "Counts/s"
            ylabel = "Intensity normalized to duration"
            if factor <= 0:
                raise RuntimeError("Duration is invalid for file " + filename + ". Cannot normalize.")
        if norm == 'monitor':
            factor = metadata.monitor_counts
            yunit = "Counts/monitor"
            ylabel = "Intensity normalized to monitor"
            if factor <= 0:
                raise RuntimeError("Monitor counts are invalid for file " + filename + ". Cannot normalize.")
        # set values for dataY and dataE
        dataY = data_array[0:ndet, 1:]/factor
        dataE = np.sqrt(data_array[0:ndet, 1:])/factor
        # create workspace
        api.CreateWorkspace(OutputWorkspace=outws_name, DataX=dataX, DataY=dataY,
                            DataE=dataE, NSpec=ndet, UnitX="Wavelength")
        outws = api.AnalysisDataService.retrieve(outws_name)
        api.LoadInstrument(outws, InstrumentName='DNS', RewriteSpectraMap=True)

        run = outws.mutableRun()
        if metadata.start_time and metadata.end_time:
            run.setStartAndEndTime(DateAndTime(metadata.start_time),
                                   DateAndTime(metadata.end_time))
        # add name of file as a run title
        fname = os.path.splitext(os.path.split(filename)[1])[0]
        run.addProperty('run_title', fname, True)

        # rotate the detector bank to the proper position
        api.RotateInstrumentComponent(outws, "bank0", X=0, Y=1, Z=0, Angle=metadata.deterota)
        # add sample log Ei and wavelength
        api.AddSampleLog(outws, LogName='Ei', LogText=str(metadata.incident_energy),
                         LogType='Number', LogUnit='meV')
        api.AddSampleLog(outws, LogName='wavelength', LogText=str(metadata.wavelength),
                         LogType='Number', LogUnit='Angstrom')
        # add other sample logs
        api.AddSampleLog(outws, LogName='deterota', LogText=str(metadata.deterota),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, 'mon_sum',
                         LogText=str(float(metadata.monitor_counts)), LogType='Number')
        api.AddSampleLog(outws, LogName='duration', LogText=str(metadata.duration),
                         LogType='Number', LogUnit='Seconds')
        api.AddSampleLog(outws, LogName='huber', LogText=str(metadata.huber),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, LogName='omega', LogText=str(metadata.huber - metadata.deterota),
                         LogType='Number', LogUnit='Degrees')
        api.AddSampleLog(outws, LogName='T1', LogText=str(metadata.temp1),
                         LogType='Number', LogUnit='K')
        api.AddSampleLog(outws, LogName='T2', LogText=str(metadata.temp2),
                         LogType='Number', LogUnit='K')
        api.AddSampleLog(outws, LogName='Tsp', LogText=str(metadata.tsp),
                         LogType='Number', LogUnit='K')
        # flipper
        api.AddSampleLog(outws, LogName='flipper_precession',
                         LogText=str(metadata.flipper_precession_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='flipper_z_compensation',
                         LogText=str(metadata.flipper_z_compensation_current),
                         LogType='Number', LogUnit='A')
        flipper_status = 'OFF'    # flipper OFF
        if abs(metadata.flipper_precession_current) > sys.float_info.epsilon:
            flipper_status = 'ON'    # flipper ON
        api.AddSampleLog(outws, LogName='flipper',
                         LogText=flipper_status, LogType='String')
        # coil currents
        api.AddSampleLog(outws, LogName='C_a', LogText=str(metadata.a_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_b', LogText=str(metadata.b_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_c', LogText=str(metadata.c_coil_current),
                         LogType='Number', LogUnit='A')
        api.AddSampleLog(outws, LogName='C_z', LogText=str(metadata.z_coil_current),
                         LogType='Number', LogUnit='A')
        # type of polarisation
        api.AddSampleLog(outws, 'polarisation', LogText=pol[0], LogType='String')
        api.AddSampleLog(outws, 'polarisation_comment', LogText=str(pol[1]), LogType='String')
        # slits
        api.AddSampleLog(outws, LogName='slit_i_upper_blade_position',
                         LogText=str(metadata.slit_i_upper_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, LogName='slit_i_lower_blade_position',
                         LogText=str(metadata.slit_i_lower_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, LogName='slit_i_left_blade_position',
                         LogText=str(metadata.slit_i_left_blade_position),
                         LogType='Number', LogUnit='mm')
        api.AddSampleLog(outws, 'slit_i_right_blade_position',
                         LogText=str(metadata.slit_i_right_blade_position),
                         LogType='Number', LogUnit='mm')
        # data normalization

        # add information whether the data are normalized (duration/monitor/no):
        api.AddSampleLog(outws, LogName='normalized', LogText=norm, LogType='String')

        outws.setYUnit(yunit)
        outws.setYUnitLabel(ylabel)

        self.setProperty("OutputWorkspace", outws)
        self.log().debug('LoadDNSLegacy: data are loaded to the workspace ' + outws_name)

        return
Exemplo n.º 6
0
    def PyExec(self):
        # Input
        filename = self.getPropertyValue("Filename")
        pol = self.getPropertyValue("Polarisation")

        # load data array from the given file
        data_array = np.loadtxt(filename)
        ndet = 24
        dataX = np.zeros(ndet)
        dataY = data_array[0:ndet, 1:]
        dataE = np.sqrt(dataY)
        # create workspace
        __temporary_workspace__ = api.CreateWorkspace(DataX=dataX, \
                DataY=dataY, DataE=dataE, NSpec=ndet, UnitX="Wavelength")
        api.LoadInstrument(__temporary_workspace__, InstrumentName='DNS')

        # load run information
        metadata = DNSdata()
        metadata.read_legacy(filename)
        run = __temporary_workspace__.mutableRun()
        if metadata.start_time and metadata.end_time:
            run.setStartAndEndTime(DateAndTime(metadata.start_time), \
                    DateAndTime(metadata.end_time))
        # add name of file as a run title
        fname = os.path.splitext(os.path.split(filename)[1])[0]
        run.addProperty('run_title', fname, True)
        #run.addProperty('dur_secs', str(metadata.duration), True)

        # rotate the detector bank to the proper position
        api.RotateInstrumentComponent(__temporary_workspace__, \
                "bank0", X=0, Y=1, Z=0, Angle=metadata.deterota)
        # add sample log Ei and wavelength
        api.AddSampleLog(__temporary_workspace__, \
                'Ei', LogText=str(metadata.incident_energy), \
                LogType='Number')
        api.AddSampleLog(__temporary_workspace__, \
                'wavelength', LogText=str(metadata.wavelength), \
                LogType='Number')
        # add other sample logs
        api.AddSampleLog(__temporary_workspace__, 'deterota', \
                LogText=str(metadata.deterota), LogType='Number')
        api.AddSampleLog(__temporary_workspace__, 'mon_sum', \
                LogText=str(metadata.monitor_counts), LogType='Number')
        api.AddSampleLog(__temporary_workspace__, 'duration', \
                LogText=str(metadata.duration), LogType='Number')
        api.AddSampleLog(__temporary_workspace__, 'huber', \
                LogText=str(metadata.huber), LogType='Number')
        api.AddSampleLog(__temporary_workspace__, 'T1', \
                LogText=str(metadata.t1), LogType='Number')
        api.AddSampleLog(__temporary_workspace__, 'T2', \
                LogText=str(metadata.t2), LogType='Number')
        api.AddSampleLog(__temporary_workspace__, 'Tsp', \
                LogText=str(metadata.tsp), LogType='Number')
        # flipper
        api.AddSampleLog(__temporary_workspace__, 'flipper_precession', \
                LogText=str(metadata.flipper_precession_current), LogType='Number')
        api.AddSampleLog(__temporary_workspace__, 'flipper_z_compensation', \
                LogText=str(metadata.flipper_z_compensation_current), LogType='Number')
        flipper_status = 'OFF'
        if abs(metadata.flipper_precession_current) > sys.float_info.epsilon:
            flipper_status = 'ON'
        api.AddSampleLog(__temporary_workspace__, 'flipper', \
                LogText=flipper_status, LogType='String')
        # coil currents
        api.AddSampleLog(__temporary_workspace__, 'C_a', \
                LogText=str(metadata.a_coil_current), LogType='Number')
        api.AddSampleLog(__temporary_workspace__, 'C_b', \
                LogText=str(metadata.b_coil_current), LogType='Number')
        api.AddSampleLog(__temporary_workspace__, 'C_c', \
                LogText=str(metadata.c_coil_current), LogType='Number')
        api.AddSampleLog(__temporary_workspace__, 'C_z', \
                LogText=str(metadata.z_coil_current), LogType='Number')
        # type of polarisation
        api.AddSampleLog(__temporary_workspace__, 'polarisation', \
                LogText=pol, LogType='String')

        self.setProperty("OutputWorkspace", __temporary_workspace__)
        self.log().debug('LoadDNSLegacy: OK')
        api.DeleteWorkspace(__temporary_workspace__)

        return