示例#1
0
 def testGETS(self):
     w = Load('ADARAMonitors.nxs')
     run = w.getRun()
     run.setStartAndEndTime(DateAndTime("2015-01-27T11:00:00"),
                            DateAndTime("2015-01-27T11:57:51"))
     LoadInstrument(Workspace=w,
                    InstrumentName='SEQUOIA',
                    RewriteSpectraMap=False)
     AddSampleLog(Workspace=w,
                  LogName='vChTrans',
                  LogText='1',
                  LogType='Number Series')
     AddSampleLog(Workspace=w,
                  LogName='EnergyRequest',
                  LogText='20',
                  LogType='Number Series')
     res = GetEiT0atSNS(w)
     self.assertAlmostEqual(res[0], 20.09, places=2)
     self.assertAlmostEqual(res[1], 30.415, places=2)
     try:
         res = GetEiT0atSNS(w, 0.1)
     except Exception as e:
         s = "Could not get Ei, and this is not a white beam run\nNo peak found for the monitor with spectra num: 2"
         self.assertEqual(str(e).find(s), 0)
     DeleteWorkspace(w)
示例#2
0
 def test_event_list_addEventQuickly(self):
     el = EventList()
     el.addEventQuickly(float(0.123), DateAndTime(42))
     self.assertEquals(el.getNumberEvents(), 1)
     self.assertEquals(el.getEventType(), EventType.TOF)
     self.assertEquals(el.getTofs()[0], float(0.123))
     self.assertEquals(el.getPulseTimes()[0], DateAndTime(42))
示例#3
0
 def setUp(self):
     if self.__class__._expt_ws is None:
         run = Run()
         run.addProperty("gd_prtn_chrg", 10.05, True)
         run.addProperty("nspectra", self._nspec, True)
         run.setStartAndEndTime(DateAndTime("2008-12-18T17:58:38"),
                                DateAndTime("2008-12-18T17:59:40"))
         self.__class__._run = run
示例#4
0
 def setUp(self):
     if self.__class__._expt_ws is None:
         alg = run_algorithm('CreateWorkspace', DataX=[1,2,3,4,5], DataY=[1,2,3,4,5],NSpec=self._nspec, child=True)
         ws = alg.getProperty("OutputWorkspace").value
         ws.run().addProperty("gd_prtn_chrg", 10.05, True)
         ws.run().addProperty("nspectra", self._nspec, True)
         ws.run().setStartAndEndTime(DateAndTime("2008-12-18T17:58:38"), DateAndTime("2008-12-18T17:59:40"))
         self.__class__._expt_ws = ws
示例#5
0
    def test_convert_to_np(self):
        dt = DateAndTime(598471118000000000)
        dt_np = timedelta64(dt.total_nanoseconds(), 'ns') + datetime64('1990-01-01T00:00')

        # convert both into ISO8601 strings up to the seconds
        dt = str(dt)[:19]
        dt_np = str(dt_np)[:19]
        self.assertEquals(dt, dt_np)
示例#6
0
    def test_convert_to_np(self):
        dt = DateAndTime(598471118000000000)
        dt_np = timedelta64(dt.totalNanoseconds(),
                            'ns') + datetime64('1990-01-01T00:00')

        # convert both into ISO8601 strings up to the seconds
        dt = str(dt)[:19]
        dt_np = str(dt_np)[:19]
        self.assertEqual(dt, dt_np)
示例#7
0
 def test_event_list_addEventQuickly(self):
     el = EventList()
     el.addEventQuickly(float(0.123), DateAndTime(42))
     self.assertEqual(el.getNumberEvents(), 1)
     self.assertEqual(el.getEventType(), EventType.TOF)
     self.assertEqual(el.getTofs()[0], float(0.123))
     self.assertEqual(el.getPulseTimes()[0], DateAndTime(42))
     self.assertEqual(el.getPulseTimesAsNumpy()[0],
                      gps_epoch_plus_42_nanoseconds)
    def addSampleLogEntry(self, log_name, ws, start_time, extra_time_shift):
        number_of_times = 10
        for i in range(0, number_of_times):

            val = random.randrange(0, 10, 1)
            date = DateAndTime(start_time)
            date +=  int(i*1e9)
            date += int(extra_time_shift*1e9)
            AddTimeSeriesLog(ws, Name=log_name, Time=date.__str__().strip(), Value=val)
示例#9
0
def addSampleLogEntry(log_name, ws, start_time, extra_time_shift):
    number_of_times = 10
    for i in range(0, number_of_times):

        val = random.randrange(0, 10, 1)
        date = DateAndTime(start_time)
        date +=  int(i*1e9)
        date += int(extra_time_shift*1e9)
        AddTimeSeriesLog(ws, Name=log_name, Time=date.__str__().strip(), Value=val)
示例#10
0
 def test_event_list_addWeightedEventQuickly(self):
     el = EventList()
     el.switchTo(EventType.WEIGHTED)
     el.addWeightedEventQuickly(float(0.123), 1.0, 0.1, DateAndTime(42))
     self.assertEqual(el.getEventType(), EventType.WEIGHTED)
     self.assertEqual(el.getTofs()[0], float(0.123))
     self.assertEqual(el.getPulseTimes()[0], DateAndTime(42))
     self.assertEqual(el.getPulseTimesAsNumpy()[0],
                      gps_epoch_plus_42_nanoseconds)
     self.assertEqual(el.getWeights()[0], 1.0)
示例#11
0
def provide_workspace_with_proton_charge(is_event=True):
    sample_name = "CreateSampleWorkspace"
    sample_options = {
        "OutputWorkspace": "dummy",
        "NumBanks": 1,
        "BankPixelWidth": 2
    }
    if is_event:
        sample_options.update({"WorkspaceType": "Event"})
    else:
        sample_options.update({"WorkspaceType": "Histogram"})

    sample_alg = create_unmanaged_algorithm(sample_name, **sample_options)
    sample_alg.execute()
    workspace = sample_alg.getProperty("OutputWorkspace").value

    # Provide a proton charge
    log_name = "AddTimeSeriesLog"
    log_options = {
        "Workspace": workspace,
        "Name": "proton_charge",
        "Type": "double"
    }
    log_alg = create_unmanaged_algorithm(log_name, **log_options)
    time = DateAndTime("2010-01-01T00:10:00")
    for index in range(0, 10):
        time += 1000000000
        value = 1.0
        log_alg.setProperty("Time", str(time))
        log_alg.setProperty("Value", value)
        log_alg.execute()
    return workspace
示例#12
0
    def test_that_can_extract_information_for_added_histogram_data_and_nexus_format(
            self):
        # Arrange
        # The file is a single period, histogram-based and added
        file_name = "AddedHistogram-add"
        factory = SANSFileInformationFactory()

        # Act
        file_information = factory.create_sans_file_information(file_name)

        # Assert
        self.assertEqual(file_information.get_number_of_periods(), 1)
        self.assertEqual(file_information.get_date(),
                         DateAndTime("2013-10-25T14:21:19"))
        self.assertEqual(file_information.get_instrument(),
                         SANSInstrument.SANS2D)
        self.assertEqual(file_information.get_type(),
                         FileType.ISIS_NEXUS_ADDED)
        self.assertEqual(file_information.get_run_number(), 22024)
        self.assertFalse(file_information.is_event_mode())
        self.assertTrue(file_information.is_added_data())
        self.assertEqual(file_information.get_width(), 8.0)
        self.assertEqual(file_information.get_height(), 8.0)
        self.assertEqual(file_information.get_thickness(), 1.0)
        self.assertEqual(file_information.get_shape(), SampleShape.DISC)
示例#13
0
    def test_that_can_extract_information_for_LARMOR_added_event_data_and_multi_period_and_nexus_format(
            self):
        # Arrange
        # The file is a single period, histogram-based and added
        file_name = "AddedEvent-add"
        factory = SANSFileInformationFactory()

        # Act
        file_information = factory.create_sans_file_information(file_name)

        # Assert
        self.assertEqual(file_information.get_number_of_periods(), 4)
        self.assertEqual(file_information.get_date(),
                         DateAndTime("2016-10-12T04:33:47"))
        self.assertEqual(file_information.get_instrument(),
                         SANSInstrument.LARMOR)
        self.assertEqual(file_information.get_type(),
                         FileType.ISIS_NEXUS_ADDED)
        self.assertEqual(file_information.get_run_number(), 13065)
        self.assertTrue(file_information.is_event_mode())
        self.assertTrue(file_information.is_added_data())
        self.assertEqual(file_information.get_width(), 6.0)
        self.assertEqual(file_information.get_height(), 8.0)
        self.assertEqual(file_information.get_thickness(), 1.0)
        self.assertEqual(file_information.get_shape(), SampleShape.FLAT_PLATE)
    def test_that_can_extract_information_from_file_for_SANS2D_single_period_and_ISISNexus(
            self):
        # Arrange
        # The file is a single period
        file_name = "SANS2D00022024"
        factory = SANSFileInformationFactory()

        # Act
        file_information = factory.create_sans_file_information(file_name)

        # Assert
        self.assertTrue(file_information.get_number_of_periods() == 1)
        self.assertTrue(
            file_information.get_date() == DateAndTime("2013-10-25T14:21:19"))
        self.assertTrue(
            file_information.get_instrument() == SANSInstrument.SANS2D)
        self.assertTrue(file_information.get_type() == FileType.ISISNexus)
        self.assertTrue(file_information.get_run_number() == 22024)
        self.assertFalse(file_information.is_event_mode())
        self.assertFalse(file_information.is_added_data())
        self.assertTrue(file_information.get_width() == 8.0)
        self.assertTrue(file_information.get_height() == 8.0)
        self.assertTrue(file_information.get_thickness() == 1.0)
        self.assertTrue(
            file_information.get_shape() is SampleShape.CylinderAxisAlong)
示例#15
0
    def get_raw_measurement_time(date_input, time_input):
        year = date_input[7:(7 + 4)]
        day = date_input[0:2]
        month_string = date_input[3:6]
        month = get_month(month_string)

        date_and_time_string = year + "-" + month + "-" + day + "T" + time_input
        return DateAndTime(date_and_time_string)
def add_log(ws, number_of_times, log_name, start_time):
    alg_log  = AlgorithmManager.create("AddTimeSeriesLog")
    alg_log.initialize()
    alg_log.setChild(True)
    alg_log.setProperty("Workspace", ws)
    alg_log.setProperty("Name", log_name)

    # Add the data
    if log_name == "good_frames":
        convert_to_type = int
    else:
        convert_to_type = float

    for i in range(0, number_of_times):
        date = DateAndTime(start_time)
        date +=  int(i*1e9) # Add nanoseconds
        alg_log.setProperty("Time", date.__str__().strip())
        alg_log.setProperty("Value", convert_to_type(i))
        alg_log.execute()

    return alg_log.getProperty("Workspace").value
示例#17
0
 def _get_date_and_run_number_added_nexus(file_name):
     with h5.File(file_name, 'r') as h5_file:
         keys = list(h5_file.keys())
         first_entry = h5_file[keys[0]]
         logs = first_entry["logs"]
         # Start time
         start_time = logs["start_time"]
         start_time_value = DateAndTime(start_time["value"][0])
         # Run number
         run_number = logs["run_number"]
         run_number_value = int(run_number["value"][0])
     return start_time_value, run_number_value
示例#18
0
def add_log(ws, number_of_times, log_name, start_time):
    alg_log = AlgorithmManager.create("AddTimeSeriesLog")
    alg_log.initialize()
    alg_log.setChild(True)
    alg_log.setProperty("Workspace", ws)
    alg_log.setProperty("Name", log_name)

    # Add the data
    if log_name == "good_frames":
        convert_to_type = int
    else:
        convert_to_type = float

    for i in range(0, number_of_times):
        date = DateAndTime(start_time)
        date += int(i * 1e9)  # Add nanoseconds
        alg_log.setProperty("Time", date.__str__().strip())
        alg_log.setProperty("Value", convert_to_type(i))
        alg_log.execute()

    return alg_log.getProperty("Workspace").value
def provide_workspace_with_proton_charge(output_name, is_event=True):
    ws_type = "Event" if is_event else "Histogram"

    # AddTimeSeriesLog forces us to store in ADS
    dummy_ws = CreateSampleWorkspace(NumBanks=1, BankPixelWidth=2,
                                     WorkspaceType=ws_type, OutputWorkspace=output_name)
    # Provide a proton charge
    time = DateAndTime("2010-01-01T00:10:00")
    value = 1.0
    for index in range(0, 10):
        time += 1000000000
        AddTimeSeriesLog(Workspace=dummy_ws, Name="proton_charge", Type="double",
                         Time=str(time), Value=value)
    return dummy_ws
示例#20
0
    def test_convert_from_np(self):
        if LooseVersion(numpy.__version__) < LooseVersion('1.9'):
            dt_np = datetime64('2000-01-01T00:00Z')
        else:  # newer numpy only uses UTC and warns on specifying timezones
            dt_np = datetime64('2000-01-01T00:00')
        dt = DateAndTime(dt_np)

        # convert both into ISO8601 strings up to the minutes b/c time was only specified that much
        dt = str(dt)
        dt_np = dt_np.item().strftime('%Y-%m-%dT%M:%S')
        length = min(len(dt), len(dt_np))
        dt = dt[:length]
        dt_np = dt_np[:length]
        self.assertEqual(dt, dt_np)
示例#21
0
 def test_timestd(self):
     """
     """
     run = Run()
     start_time = DateAndTime("2008-12-18T17:58:38")
     nanosec = 1000000000
     # === Float type ===
     temp1 = FloatTimeSeriesProperty("TEMP1")
     vals = np.arange(10) * 2.
     for i in range(10):
         temp1.addValue(start_time + i * nanosec, vals[i])
     run.addProperty(temp1.name, temp1, True)
     # ignore the last value
     expected = vals[:-1].std()
     self.assertEqual(run.getTimeAveragedStd("TEMP1"), expected)
示例#22
0
    def setUp(self):
        if self._test_ws is not None:
            return
        alg = run_algorithm('CreateWorkspace',
                            DataX=[1, 2, 3, 4, 5],
                            DataY=[1, 2, 3, 4, 5],
                            NSpec=1,
                            child=True)
        ws = alg.getProperty("OutputWorkspace").value
        run = ws.run()

        start_time = DateAndTime("2008-12-18T17:58:38")
        nanosec = 1000000000
        # === Float type ===
        temp1 = FloatTimeSeriesProperty("TEMP1")
        tempvalue = -0.00161
        for i in range(self._ntemp):
            temp1.addValue(start_time + i * nanosec, tempvalue)
        run.addProperty(temp1.name, temp1, True)

        # === Int type ===
        raw_frames = Int64TimeSeriesProperty("raw_frames")
        values = [17, 1436, 2942, 4448, 5955, 7461]
        for value in values:
            raw_frames.addValue(start_time + i * nanosec, value)
        run.addProperty(raw_frames.name, raw_frames, True)

        # === String type ===
        icp_event = temp1 = StringTimeSeriesProperty("icp_event")
        values = [
            'CHANGE_PERIOD 1',
            'START_COLLECTION PERIOD 1 GF 0 RF 0 GUAH 0.000000', 'BEGIN',
            'STOP_COLLECTION PERIOD 1 GF 1053 RF 1053 GUAH 0.000000 DUR 22'
        ]
        for value in values:
            icp_event.addValue(start_time + i * nanosec, value)
        run.addProperty(icp_event.name, icp_event, True)

        # === Boolean type ===
        period_1 = temp1 = BoolTimeSeriesProperty("period 1")
        values = [True]
        for value in values:
            period_1.addValue(start_time + i * nanosec, value)
        run.addProperty(period_1.name, period_1, True)

        self.__class__._test_ws = ws
示例#23
0
    def test_that_get_valid_to_date_from_idf_string(self):
        # Arrange
        idf_string = '<?xml version="1.0" encoding="UTF-8" ?>' \
                     '<!-- For help on the notation used to specify an Instrument Definition File ' \
                     'see http://www.mantidproject.org/IDF -->' \
                     '<instrument xmlns="http://www.mantidproject.org/IDF/1.0" ' \
                     '            xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' \
                     '            xsi:schemaLocation="http://www.mantidproject.org/IDF/1.0 http://schema.mantidproject.org/IDF/1.0/IDFSchema.xsd" ' \
                     '            name="PEARL" valid-from   ="1900-01-31 23:59:59" ' \
                     '            valid-to     ="2011-05-01 23:59:50" ' \
                     '            last-modified="2008-09-17 05:00:00">' \
                     '</instrument>'

        # Act
        extracted_time = get_valid_to_time_from_idf_string(idf_string)
        # Assert
        self.assertEqual(extracted_time, DateAndTime("2011-05-01 23:59:50"))
示例#24
0
    def test_that_can_extract_information_from_file_for_LOQ_single_period_and_raw_format(self):
        # Arrange
        # The file is a single period
        file_name = "LOQ48094"
        factory = SANSFileInformationFactory()

        # Act
        file_information = factory.create_sans_file_information(file_name)

        # Assert
        self.assertEqual(file_information.get_number_of_periods(),  1)
        self.assertEqual(file_information.get_date(),  DateAndTime("2008-12-18T11:20:58"))
        self.assertEqual(file_information.get_instrument(),  SANSInstrument.LOQ)
        self.assertEqual(file_information.get_type(), FileType.ISIS_RAW)
        self.assertEqual(file_information.get_run_number(),  48094)
        self.assertFalse(file_information.is_added_data())
        self.assertEqual(file_information.get_width(),  8.0)
        self.assertEqual(file_information.get_height(),  8.0)
        self.assertEqual(file_information.get_thickness(),  1.0)
        self.assertEqual(file_information.get_shape(), SampleShape.DISC)
示例#25
0
    def test_that_can_extract_information_from_file_for_SANS2D_multi_period_event_and_nexus_format(self):
        # Arrange
        # The file is a multi period and event-based
        file_name = "LARMOR00003368"
        factory = SANSFileInformationFactory()

        # Act
        file_information = factory.create_sans_file_information(file_name)

        # Assert
        self.assertEqual(file_information.get_number_of_periods(),  4)
        self.assertEqual(file_information.get_date(),  DateAndTime("2015-06-05T14:43:49"))
        self.assertEqual(file_information.get_instrument(),  SANSInstrument.LARMOR)
        self.assertEqual(file_information.get_type(), FileType.ISIS_NEXUS)
        self.assertEqual(file_information.get_run_number(),  3368)
        self.assertTrue(file_information.is_event_mode())
        self.assertFalse(file_information.is_added_data())
        self.assertEqual(file_information.get_width(),  8.0)
        self.assertEqual(file_information.get_height(),  8.0)
        self.assertEqual(file_information.get_thickness(),  2.0)
        self.assertEqual(file_information.get_shape(), SampleShape.FLAT_PLATE)
示例#26
0
    def PyExec(self):
        """ Main execution body
        """
        wm = self.getProperty("MonitorWorkspace").value
        i = wm.getInstrument()

        if numpy.mean(wm.getRun()['vChTrans'].value) == 2:
            Ei = numpy.nan
            Tzero = numpy.nan
        else:
            EGuess = self.getProperty("IncidentEnergyGuess").value
            if EGuess < 0:
                try:
                    EGuess = wm.getRun()['EnergyRequest'].getStatistics().mean
                except:
                    raise RuntimeError(
                        "No energy guess was given or could be found in sample logs"
                    )
            try:
                #fix more than 2 monitors
                sp1 = -1
                sp2 = -1
                nsp = wm.getNumberHistograms()
                if nsp < 2:
                    raise ValueError("There are less than 2 monitors")
                for sp in range(nsp):
                    if wm.getSpectrum(sp).getDetectorIDs()[0] == -int(
                            i.getNumberParameter('ei-mon1-spec')[0]):
                        sp1 = sp
                    if wm.getSpectrum(sp).getDetectorIDs()[0] == -int(
                            i.getNumberParameter('ei-mon2-spec')[0]):
                        sp2 = sp
                if sp1 == -1:
                    raise RuntimeError(
                        "Could not find spectrum for the first monitor")
                if sp2 == -1:
                    raise RuntimeError(
                        "Could not find spectrum for the second monitor")
                #change frame for monitors. ARCS monitors would be in the first frame for Ei>10meV
                so = i.getSource().getPos()
                m1 = wm.getDetector(sp1).getPos()
                m2 = wm.getDetector(sp2).getPos()
                run_starttime = wm.getRun().startTime()
                from mantid.kernel import DateAndTime
                SNS_DAS_changed_time_wrapping = DateAndTime(
                    "2019-06-15T00:00:00")
                if run_starttime < SNS_DAS_changed_time_wrapping:
                    v = 437.4 * numpy.sqrt(
                        wm.getRun()['EnergyRequest'].getStatistics().mean)
                    t1 = m1.distance(so) * 1e6 / v
                    t2 = m2.distance(so) * 1e6 / v
                    t1f = int(t1 * 60e-6)  #frame number for monitor 1
                    t2f = int(t2 * 60e-6)  #frame number for monitor 2
                    wtemp = mantid.simpleapi.ChangeBinOffset(
                        wm, t1f * 16667, sp1, sp1)
                    wtemp = mantid.simpleapi.ChangeBinOffset(
                        wtemp, t2f * 16667, sp2, sp2)
                else:
                    wtemp = wm
                maxtof = wtemp.readX(0)[-1]
                period = 1.e6 / 60
                Nmax = int(maxtof / period) + 1
                for i in range(1, Nmax + 1):
                    tmin = min(i * period - 30., maxtof)
                    tmax = min(i * period + 30., maxtof)
                    if tmin < tmax:
                        mantid.simpleapi.MaskBins(InputWorkspace=wtemp,
                                                  OutputWorkspace=wtemp,
                                                  XMin=tmin,
                                                  XMax=tmax)
                wtemp = mantid.simpleapi.Rebin(InputWorkspace=wtemp,
                                               Params="1",
                                               PreserveEvents=True)
                #Run GetEi algorithm
                alg = mantid.simpleapi.GetEi(InputWorkspace=wtemp,
                                             Monitor1Spec=sp1 + 1,
                                             Monitor2Spec=sp2 + 1,
                                             EnergyEstimate=EGuess)
                Ei = alg[0]
                Tzero = alg[3]  #Extract incident energy and T0
                mantid.simpleapi.DeleteWorkspace(wtemp)
            except Exception as e:
                raise RuntimeError(
                    "Could not get Ei, and this is not a white beam run\n" +
                    str(e))
        self.setProperty("Ei", Ei)
        self.setProperty("T0", Tzero)
示例#27
0
def get_date_for_added_workspace(file_name):
    value = get_top_level_nexus_entry(file_name, "start_time")
    return DateAndTime(value)
示例#28
0
def get_date_for_isis_nexus(file_name):
    value = get_top_level_nexus_entry(file_name, START_TIME)
    return DateAndTime(value)
 def get_date(self):
     return DateAndTime(self._date)
示例#30
0
 def test_construction_with_total_nano_seconds(self):
     dt = DateAndTime(598471118000000000)
     self.assertEqual(self.iso_str_plus_space, str(dt))
示例#31
0
 def test_construction_with_ISO_string_produces_expected_object(self):
     dt = DateAndTime(self.iso_str)
     self.assertEqual(self.iso_str_plus_space, str(dt))
     self.assertEqual(dt.totalNanoseconds(), 598471118000000000)
示例#32
0
 def test_construction_with_ISO_string_produces_expected_object(self):
     dt = DateAndTime(self.iso_str)
     self.assertEquals(self.iso_str_plus_space, str(dt))
     self.assertEquals(dt.totalNanoseconds(), 598471118000000000)
示例#33
0
    def _update_content(self):
        """
            Get the job status from the compute resource and
            update the job table content.
        """
        self._fill_in_defaults()
        
        user = str(self._content.username_edit.text())
        pwd = str(self._content.password_edit.text())
        if len(user)==0 or len(pwd)==0:
            util.set_valid(self._content.username_edit, False)
            util.set_valid(self._content.password_edit, False)
            return
        else:
            self._settings.cluster_user = user
            self._settings.cluster_pass = pwd
            util.set_valid(self._content.username_edit, True)
            util.set_valid(self._content.password_edit, True)
        alg = AlgorithmManager.create("Authenticate")
        alg.initialize()
        alg.setProperty("ComputeResource", str(self._settings.compute_resource))
        alg.setProperty("UserName", str(self._settings.cluster_user))
        alg.setProperty("Password", str(self._settings.cluster_pass))
        alg.execute()
        
        alg = AlgorithmManager.create("QueryAllRemoteJobs")
        alg.initialize()
        alg.setProperty("ComputeResource", str(self._settings.compute_resource))
        alg.execute()
        job_id = alg.getProperty("JobId").value
        job_status = alg.getProperty("JobStatusString").value
        job_name = alg.getProperty("JobName").value
        job_trans_id = alg.getProperty("TransID").value
        
        njobs = len(job_name)
        job_start = alg.getProperty("StartDate").value
        job_end = alg.getProperty("CompletionDate").value
                
        job_list = zip(*(job_id, job_status, job_name, job_start, job_end, job_trans_id))
        
        self._clear_table()
        self._content.job_table.setSortingEnabled(False)
        self._content.job_table.setRowCount(len(job_list))
        unavailable = DateAndTime(0)
        unavailable.setToMinimum()

        for i in range(len(job_list)):
            # Make sure that only recent jobs are displayed
            oldest = DateAndTime(str(self._content.date_time_edit.dateTime().toString(QtCore.Qt.ISODate)))
            end_time = job_list[i][4]
            if end_time == '': 
                job_end = unavailable
            else:
                job_end = DateAndTime(end_time)
            if job_end>unavailable and job_end<oldest:
                self._content.job_table.setRowHidden(i, True)
                continue
            self._content.job_table.setRowHidden(i, False)

            # Job ID
            item = QtGui.QTableWidgetItem(str(job_list[i][0]))
            item.setFlags(QtCore.Qt.ItemIsSelectable |QtCore.Qt.ItemIsEnabled )
            self._content.job_table.setItem(i, 0, item)
            job_id = str(job_list[i][0])
          
            # Title
            item = QtGui.QTableWidgetItem(str(job_list[i][2]))
            item.setFlags(QtCore.Qt.ItemIsSelectable |QtCore.Qt.ItemIsEnabled )
            self._content.job_table.setItem(i, 1, item)
          
            # Status
            item = QtGui.QTableWidgetItem(str(job_list[i][1]))
            item.setFlags(QtCore.Qt.ItemIsSelectable |QtCore.Qt.ItemIsEnabled )
            self._content.job_table.setItem(i, 2, item)
            is_running = str(job_list[i][1]).lower()=='running'
            
            # Start time
            time_displayed = str(job_list[i][3]).replace('T', ' ')
            if DateAndTime(job_list[i][3]) == unavailable:
                time_displayed = ''
            item = QtGui.QTableWidgetItem(time_displayed)
            item.setFlags(QtCore.Qt.ItemIsSelectable |QtCore.Qt.ItemIsEnabled )
            self._content.job_table.setItem(i, 3, item)
            
            # Completion time
            time_displayed = end_time.replace('T', ' ')
            if job_end == unavailable:
                time_displayed = ''
            item = QtGui.QTableWidgetItem(time_displayed)
            item.setFlags(QtCore.Qt.ItemIsSelectable |QtCore.Qt.ItemIsEnabled )
            self._content.job_table.setItem(i, 4, item)
            
            # create an cell widget
            btn = QtGui.QPushButton(self._content.job_table)
            if is_running:
                btn.setText('Abort')
                btn.setToolTip('Cleanly abort this job')
            else:
                btn.setText('Remove')
                btn.setToolTip('Remove this job and its temporary files')
            call_back = partial(self._remove_job, is_running=is_running, job_id=job_id, trans_id=job_list[i][5])
            self.connect(btn, QtCore.SIGNAL("clicked()"), call_back)
            self._content.job_table.setCellWidget(i, 5, btn)          


        self._content.job_table.setSortingEnabled(True)
        self._content.job_table.sortItems(3, 1)
示例#34
0
    def PyExec(self):
        # Input
        filename = self.getPropertyValue("Filename")
        outws_name = self.getPropertyValue("OutputWorkspace")
        norm = self.getPropertyValue("Normalization")

        # load data array from the given file
        data_array = np.loadtxt(filename)
        if not data_array.size:
            message = "File " + filename + " does not contain any data!"
            self.log().error(message)
            raise RuntimeError(message)
        # sample logs
        logs = {"names": [], "values": [], "units": []}

        # load run information
        metadata = DNSdata()
        try:
            metadata.read_legacy(filename)
        except RuntimeError as err:
            message = "Error of loading of file " + filename + ": " + str(err)
            self.log().error(message)
            raise RuntimeError(message)

        tmp = api.LoadEmptyInstrument(InstrumentName='DNS')
        self.instrument = tmp.getInstrument()
        api.DeleteWorkspace(tmp)

        # load polarisation table and determine polarisation
        poltable = self.get_polarisation_table()
        pol = self.get_polarisation(metadata, poltable)
        if not pol:
            pol = ['0', 'undefined']
            self.log().warning("Failed to determine polarisation for " +
                               filename +
                               ". Values have been set to undefined.")
        ndet = 24
        unitX = "Wavelength"
        if metadata.tof_channel_number < 2:
            dataX = np.zeros(2 * ndet)
            dataX.fill(metadata.wavelength + 0.00001)
            dataX[::2] -= 0.000002
        else:
            unitX = "TOF"

            # get instrument parameters
            l1 = np.linalg.norm(self.instrument.getSample().getPos() -
                                self.instrument.getSource().getPos())
            self.log().notice("L1 = {} m".format(l1))
            dt_factor = float(
                self.instrument.getStringParameter("channel_width_factor")[0])

            # channel width
            dt = metadata.tof_channel_width * dt_factor
            # calculate tof1
            velocity = h / (m_n * metadata.wavelength * 1e-10)  # m/s
            tof1 = 1e+06 * l1 / velocity  # microseconds
            self.log().debug("TOF1 = {} microseconds".format(tof1))
            self.log().debug("Delay time = {} microsecond".format(
                metadata.tof_delay_time))
            # create dataX array
            x0 = tof1 + metadata.tof_delay_time
            self.log().debug("TOF1 = {} microseconds".format(tof1))
            dataX = np.linspace(x0, x0 + metadata.tof_channel_number * dt,
                                metadata.tof_channel_number + 1)

            # sample logs
            logs["names"].extend(
                ["channel_width", "TOF1", "delay_time", "tof_channels"])
            logs["values"].extend([
                dt, tof1, metadata.tof_delay_time, metadata.tof_channel_number
            ])
            logs["units"].extend(
                ["microseconds", "microseconds", "microseconds", ""])
            if metadata.tof_elastic_channel:
                logs["names"].append("EPP")
                logs["values"].append(metadata.tof_elastic_channel)
                logs["units"].append("")
            if metadata.chopper_rotation_speed:
                logs["names"].append("chopper_speed")
                logs["values"].append(metadata.chopper_rotation_speed)
                logs["units"].append("Hz")
            if metadata.chopper_slits:
                logs["names"].append("chopper_slits")
                logs["values"].append(metadata.chopper_slits)
                logs["units"].append("")

        # data normalization
        factor = 1.0
        yunit = "Counts"
        ylabel = "Intensity"
        if norm == 'duration':
            factor = metadata.duration
            yunit = "Counts/s"
            ylabel = "Intensity normalized to duration"
            if factor <= 0:
                raise RuntimeError("Duration is invalid for file " + filename +
                                   ". Cannot normalize.")
        if norm == 'monitor':
            factor = metadata.monitor_counts
            yunit = "Counts/monitor"
            ylabel = "Intensity normalized to monitor"
            if factor <= 0:
                raise RuntimeError("Monitor counts are invalid for file " +
                                   filename + ". Cannot normalize.")
        # set values for dataY and dataE
        dataY = data_array[0:ndet, 1:] / factor
        dataE = np.sqrt(data_array[0:ndet, 1:]) / factor
        # create workspace
        api.CreateWorkspace(OutputWorkspace=outws_name,
                            DataX=dataX,
                            DataY=dataY,
                            DataE=dataE,
                            NSpec=ndet,
                            UnitX=unitX)
        outws = api.AnalysisDataService.retrieve(outws_name)
        api.LoadInstrument(outws, InstrumentName='DNS', RewriteSpectraMap=True)

        run = outws.mutableRun()
        if metadata.start_time and metadata.end_time:
            run.setStartAndEndTime(DateAndTime(metadata.start_time),
                                   DateAndTime(metadata.end_time))
        # add name of file as a run title
        fname = os.path.splitext(os.path.split(filename)[1])[0]
        run.addProperty('run_title', fname, True)

        # rotate the detector bank to the proper position
        api.RotateInstrumentComponent(outws,
                                      "bank0",
                                      X=0,
                                      Y=1,
                                      Z=0,
                                      Angle=metadata.deterota)
        # add sample log Ei and wavelength
        logs["names"].extend(["Ei", "wavelength"])
        logs["values"].extend([metadata.incident_energy, metadata.wavelength])
        logs["units"].extend(["meV", "Angstrom"])

        # add other sample logs
        logs["names"].extend([
            "deterota", "mon_sum", "duration", "huber", "omega", "T1", "T2",
            "Tsp"
        ])
        logs["values"].extend([
            metadata.deterota, metadata.monitor_counts, metadata.duration,
            metadata.huber, metadata.huber - metadata.deterota, metadata.temp1,
            metadata.temp2, metadata.tsp
        ])
        logs["units"].extend([
            "Degrees", "Counts", "Seconds", "Degrees", "Degrees", "K", "K", "K"
        ])

        # flipper, coil currents and polarisation
        flipper_status = 'OFF'  # flipper OFF
        if abs(metadata.flipper_precession_current) > sys.float_info.epsilon:
            flipper_status = 'ON'  # flipper ON
        logs["names"].extend([
            "flipper_precession", "flipper_z_compensation", "flipper", "C_a",
            "C_b", "C_c", "C_z", "polarisation", "polarisation_comment"
        ])
        logs["values"].extend([
            metadata.flipper_precession_current,
            metadata.flipper_z_compensation_current, flipper_status,
            metadata.a_coil_current, metadata.b_coil_current,
            metadata.c_coil_current, metadata.z_coil_current,
            str(pol[0]),
            str(pol[1])
        ])
        logs["units"].extend(["A", "A", "", "A", "A", "A", "A", "", ""])

        # slits
        logs["names"].extend([
            "slit_i_upper_blade_position", "slit_i_lower_blade_position",
            "slit_i_left_blade_position", "slit_i_right_blade_position"
        ])
        logs["values"].extend([
            metadata.slit_i_upper_blade_position,
            metadata.slit_i_lower_blade_position,
            metadata.slit_i_left_blade_position,
            metadata.slit_i_right_blade_position
        ])
        logs["units"].extend(["mm", "mm", "mm", "mm"])

        # add information whether the data are normalized (duration/monitor/no):
        api.AddSampleLog(outws,
                         LogName='normalized',
                         LogText=norm,
                         LogType='String')
        api.AddSampleLogMultiple(outws,
                                 LogNames=logs["names"],
                                 LogValues=logs["values"],
                                 LogUnits=logs["units"])

        outws.setYUnit(yunit)
        outws.setYUnitLabel(ylabel)

        self.setProperty("OutputWorkspace", outws)
        self.log().debug('LoadDNSLegacy: data are loaded to the workspace ' +
                         outws_name)

        return
示例#35
0
 def createRandomEventList(self, length):
     el = EventList()
     for i in range(length):
         el.addEventQuickly(float(i), DateAndTime(i))
     return el