예제 #1
0
    def test_get_value_for_unfiltered(self):
        # Checks that filtered_value works for unfiltered logs, e.g. at SNS

        ws_T = Load('Training_Exercise3a_SNS.nxs')
        ws_C = Load('CNCS_7860_event.nxs')  # Has no single entries

        run_T = ws_T.getRun()
        all_logs_T = run_T.getLogData()
        model_T = SampleLogsModel(ws_T)

        run_C = ws_C.getRun()
        all_logs_C = run_C.getLogData()
        model_C = SampleLogsModel(ws_C)

        # Valid log with one entry
        self.assertEqual(
            get_value(all_logs_T[2]), '{} (1 entry)'.format(
                model_T.get_statistics('ChopperStatus1').mean))
        self.assertEqual(
            get_value(all_logs_T[2]), '{} (1 entry)'.format(
                model_T.get_statistics('ChopperStatus1').maximum))

        # Valid log with 2 identical value entries
        self.assertEqual(
            get_value(all_logs_C[2]), '{} (2 entries)'.format(
                model_C.get_statistics('ChopperStatus1').mean))
        self.assertEqual(
            get_value(all_logs_C[2]), '{} (2 entries)'.format(
                model_C.get_statistics('ChopperStatus1').maximum))

        # Valid log with multiple different value entries
        self.assertEqual(get_value(all_logs_C[13]), '({} entries)'.format(
            all_logs_C[13].size()))  # Phase2
예제 #2
0
    def test_get_value_for_filtered(self):
        # Checks that table values and plot log stats agree, even when filtered.
        ws = Load('ENGINX00228061_log_alarm_data.nxs')

        run = ws.getRun()
        all_logs = run.getLogData()
        model = SampleLogsModel(ws)

        # Partially invalid log with one filtered entry
        self.assertEqual(
            get_value(all_logs[31]),
            '{} (1 entry)'.format(model.get_statistics('cryo_temp1').mean))
        self.assertEqual(
            get_value(all_logs[31]),
            '{} (1 entry)'.format(model.get_statistics('cryo_temp1').maximum))

        # Fully invalid log with multiple entries (not affected by filtering)
        self.assertEqual(get_value(all_logs[33]), '({} entries)'.format(
            all_logs[32].size()))  # cryo_temp2

        # Valid log with one entry filtered by status, with a differently valued entry unfiltered
        self.assertEqual(
            get_value(all_logs[16]), '{} (1 entry)'.format(
                model.get_statistics('C6_SLAVE_FREQUENCY').mean))
        self.assertEqual(
            get_value(all_logs[16]), '{} (1 entry)'.format(
                model.get_statistics('C6_SLAVE_FREQUENCY').maximum))

        # Valid log with one entry filtered by status, with another same valued entries unfiltered
        self.assertEqual(
            get_value(all_logs[25]), '{} (1 entry)'.format(
                model.get_statistics('SECI_OUT_OF_RANGE_BLOCK').mean))
        self.assertEqual(
            get_value(all_logs[25]), '{} (1 entry)'.format(
                model.get_statistics('SECI_OUT_OF_RANGE_BLOCK').maximum))

        # Valid log with 2 identical value entries
        self.assertEqual(
            get_value(all_logs[21]), '{} (2 entries)'.format(
                model.get_statistics('C9_SLAVE_PHASE').mean))
        self.assertEqual(
            get_value(all_logs[21]), '{} (2 entries)'.format(
                model.get_statistics('C9_SLAVE_PHASE').maximum))

        # Valid log with 4 identical value entries
        self.assertEqual(
            get_value(all_logs[38]),
            '{} (4 entries)'.format(model.get_statistics('x').mean))
        self.assertEqual(
            get_value(all_logs[38]),
            '{} (4 entries)'.format(model.get_statistics('x').maximum))

        # Valid log with multiple different value entries
        self.assertEqual(get_value(all_logs[29]), '({} entries)'.format(
            all_logs[29].size()))  # cryo_Sample
예제 #3
0
    def _run_number_changed(self):
        """ Handling event if run number is changed... If it is a valid run number,
        the load the meta data
        """

        # 1. Form the file
        newrunnumberstr = self._content.run_number_edit.text()
        instrument = self._instrument_name
        eventnxsname = "%s_%s_event.nxs" % (instrument, newrunnumberstr)
        msg = str("Load event nexus file %s" % (eventnxsname))
        self._content.info_text_browser.setText(msg)

        # 2. Load file
        metawsname = "%s_%s_meta" % (instrument, newrunnumberstr)
        try:
            metaws = Load(Filename=str(eventnxsname),
                          OutputWorkspace=str(metawsname),
                          MetaDataOnly=True)
        except ValueError:
            metaws = None

        # 3. Update the log name combo box
        if metaws is None:
            self._content.info_text_browser.setText(
                str("Error! Failed to load data file %s.  Current working directory is %s. "
                    % (eventnxsname, os.getcwd())))
        else:
            self._metaws = metaws

            # a) Clear
            self._content.log_name_combo.clear()

            # b) Get properties
            wsrun = metaws.getRun()
            ps = wsrun.getProperties()
            properties = []
            for p in ps:
                if p.__class__.__name__ == "FloatTimeSeriesProperty":
                    if p.size() > 1:
                        properties.append(p.name)
                        Logger('FilterSetupWidget').information(
                            '{}[{}]'.format(p.name, p.size()))
            # ENDFOR p
            properties = sorted(properties)

            # c) Add
            for p in properties:
                self._content.log_name_combo.addItem(p)
    def _run_number_changed(self):
        """ Handling event if run number is changed... If it is a valid run number,
        the load the meta data
        """

        # 1. Form the file
        newrunnumberstr = self._content.run_number_edit.text()
        instrument = self._instrument_name
        eventnxsname = "%s_%s_event.nxs" % (instrument, newrunnumberstr)
        msg = str("Load event nexus file %s" % (eventnxsname))
        self._content.info_text_browser.setText(msg)

        # 2. Load file
        metawsname = "%s_%s_meta" % (instrument, newrunnumberstr)
        try:
            metaws = Load(Filename=str(eventnxsname), OutputWorkspace=str(metawsname), MetaDataOnly=True)
        except ValueError:
            metaws = None

        # 3. Update the log name combo box
        if metaws is None:
            self._content.info_text_browser.setText(
                str("Error! Failed to load data file %s.  Current working directory is %s. " % (eventnxsname, os.getcwd())))
        else:
            self._metaws = metaws

            # a) Clear
            self._content.log_name_combo.clear()

            # b) Get properties
            wsrun = metaws.getRun()
            ps = wsrun.getProperties()
            properties = []
            for p in ps:
                if p.__class__.__name__ == "FloatTimeSeriesProperty":
                    if p.size() > 1:
                        properties.append(p.name)
                        Logger('FilterSetupWidget').information('{}[{}]'.format(p.name, p.size()))
            # ENDFOR p
            properties = sorted(properties)

            # c) Add
            for p in properties:
                self._content.log_name_combo.addItem(p)
예제 #5
0
def reduceOneKeepingEvents(nxsfile, angle, eiguess, eaxis, outfile, t0guess=0.):
    """reduce nxs from one angle of a single crystal scan, keeping events
    (only do tof->E conversion)

    nxsfile: input path
    angle: psi in degrees
    eiguess: Ei in meV
    eaxis: Emin, Emax, dE
    outfile: output path
    """
    from mantid.simpleapi import DgsReduction, SofQW3, SaveNexus, SaveNXSPE, LoadInstrument, Load, MoveInstrumentComponent, AddSampleLog
    outfile = os.path.abspath(outfile)
    print "* working on reducing %s to %s" % (nxsfile, outfile)
    # load workspace from input nexus file
    workspace = Load(nxsfile)

    # workspace name have to be unique
    unique_name = os.path.dirname(nxsfile).split('/')[-1]
    wsname = 'reduced-%s' % unique_name

    # Ei
    if eiguess == 0.:
        # If user does not supply Ei, we try to get it from the samplelog,
        # because mcvine-generated SEQUOIA data files are mantid-processed nexus file
        # with sample logs of Ei and t0.
        # If we don't have them from sample logs, we just set Ei and T0 to None
        run = workspace.getRun()
        UseIncidentEnergyGuess = False
        try:
            Ei = run.getLogData('mcvine-Ei').value
            UseIncidentEnergyGuess = True
        except:
            Ei = None
        try:
            T0 = run.getLogData('mcvine-t0').value
        except:
            T0 = None
    else:
        # user specified Ei, just use that
        Ei = eiguess
        T0 = t0guess
        UseIncidentEnergyGuess = True
    # keep events (need to then run RebinToWorkspace and ConvertToDistribution)
    Emin, Emax, dE = eaxis
    eaxis = '%s,%s,%s' % (Emin,dE, Emax)
    DgsReduction(
        SampleInputWorkspace = workspace,
        IncidentEnergyGuess = Ei,
        TimeZeroGuess = T0,
        UseIncidentEnergyGuess=UseIncidentEnergyGuess,
        OutputWorkspace=wsname,
        SofPhiEIsDistribution='0',
        EnergyTransferRange = eaxis,
        )

    AddSampleLog(Workspace=wsname,LogName="psi",LogText=str(angle),LogType="Number")
    SaveNexus(
        InputWorkspace=wsname,
        Filename = outfile,
        Title = 'reduced',
        )
    return
예제 #6
0
파일: nxs.py 프로젝트: mcvine/instruments
def reduce(nxsfile,
           qaxis,
           outfile,
           use_ei_guess=False,
           ei_guess=None,
           eaxis=None,
           tof2E=True,
           ibnorm='ByCurrent'):
    from mantid.simpleapi import DgsReduction, SofQW3, SaveNexus, LoadInstrument, Load, MoveInstrumentComponent, \
        MaskBTP, ConvertToMD, BinMD, ConvertMDHistoToMatrixWorkspace, GetEiT0atSNS, GetEi
    from mantid import mtd
    ws = Load(nxsfile)

    if tof2E == 'guess':
        axis = ws.getAxis(0).getUnit().caption().lower()
        # axis name should be "Time-of-flight"
        tof2E = "time" in axis and "flight" in axis

    if tof2E:
        # mask packs around beam
        # MaskBTP(ws, Bank="98-102")
        if not use_ei_guess:
            run = ws.getRun()
            Efixed = run.getLogData('mcvine-Ei').value
            T0 = run.getLogData('mcvine-t0').value
        else:
            Efixed, T0 = ei_guess, 0

        DgsReduction(
            SampleInputWorkspace=ws,
            IncidentEnergyGuess=Efixed,
            UseIncidentEnergyGuess=True,
            TimeZeroGuess=T0,
            OutputWorkspace='reduced',
            EnergyTransferRange=eaxis,
            IncidentBeamNormalisation=ibnorm,
        )
        reduced = mtd['reduced']
    else:
        reduced = Load(nxsfile)

    # if eaxis is not specified, use the data in reduced workspace
    if eaxis is None:
        Edim = reduced.getXDimension()
        emin = Edim.getMinimum()
        emax = Edim.getMaximum()
        de = Edim.getX(1) - Edim.getX(0)
        eaxis = emin, de, emax

    qmin, dq, qmax = qaxis
    nq = int(round((qmax - qmin) / dq))
    emin, de, emax = eaxis
    ne = int(round((emax - emin) / de))
    md = ConvertToMD(
        InputWorkspace='reduced',
        QDimensions='|Q|',
        dEAnalysisMode='Direct',
        MinValues="%s,%s" % (qmin, emin),
        MaxValues="%s,%s" % (qmax, emax),
        SplitInto="%s,%s" % (nq, ne),
    )
    binned = BinMD(
        InputWorkspace=md,
        AxisAligned=1,
        AlignedDim0="|Q|,%s,%s,%s" % (qmin, qmax, nq),
        AlignedDim1="DeltaE,%s,%s,%s" % (emin, emax, ne),
    )
    # convert to histogram
    import histogram as H, histogram.hdf as hh
    data = binned.getSignalArray().copy()
    err2 = binned.getErrorSquaredArray().copy()
    nev = binned.getNumEventsArray()
    data /= nev
    err2 /= (nev * nev)
    qaxis = H.axis('Q',
                   boundaries=np.arange(qmin, qmax + dq / 2., dq),
                   unit='1./angstrom')
    eaxis = H.axis('E',
                   boundaries=np.arange(emin, emax + de / 2., de),
                   unit='meV')
    hist = H.histogram('IQE', (qaxis, eaxis), data=data, errors=err2)
    if outfile.endswith('.nxs'):
        import warnings
        warnings.warn(
            "reduce function no longer writes iqe.nxs nexus file. it only writes iqe.h5 histogram file"
        )
        outfile = outfile[:-4] + '.h5'
    hh.dump(hist, outfile)
    return
예제 #7
0
def process_json(json_filename):
    """This will read a json file, process the data and save the calibration.

    Only ``Calibrant`` and ``Groups`` are required.

    An example input showing every possible options is:

    .. code-block:: JSON

      {
        "Calibrant": "12345",
        "Groups": "/path/to/groups.xml",
        "Mask": "/path/to/mask.xml",
        "Instrument": "NOM",
        "Date" : "2019_09_04",
        "SampleEnvironment": "shifter",
        "PreviousCalibration": "/path/to/cal.h5",
        "CalDirectory": "/path/to/output_directory",
        "CrossCorrelate": {"Step": 0.001,
                           "DReference: 1.5,
                           "Xmin": 1.0,
                           "Xmax": 3.0,
                           "MaxDSpaceShift": 0.25},
        "PDCalibration": {"PeakPositions": [1, 2, 3],
                          "TofBinning": (300,0.001,16666),
                          "PeakFunction": 'Gaussian',
                          "PeakWindow": 0.1,
                          "PeakWidthPercent": 0.001}
      }
    """
    with open(json_filename) as json_file:
        args = json.load(json_file)

    calibrant_file = args.get('CalibrantFile', None)
    if calibrant_file is None:
        calibrant = args['Calibrant']
    groups = args['Groups']
    out_groups_by = args.get('OutputGroupsBy', 'Group')
    sample_env = args.get('SampleEnvironment', 'UnknownSampleEnvironment')
    mask = args.get('Mask')
    instrument = args.get('Instrument', 'NOM')
    cc_kwargs = args.get('CrossCorrelate', {})
    pdcal_kwargs = args.get('PDCalibration', {})
    previous_calibration = args.get('PreviousCalibration')

    date = str(args.get('Date', datetime.datetime.now().strftime('%Y_%m_%d')))
    caldirectory = str(args.get('CalDirectory', os.path.abspath('.')))

    if calibrant_file is not None:
        ws = Load(calibrant_file)
        calibrant = ws.getRun().getProperty('run_number').value
    else:
        filename = f'{instrument}_{calibrant}'
        ws = Load(filename)

    calfilename = f'{caldirectory}/{instrument}_{calibrant}_{date}_{sample_env}.h5'
    logger.notice(f'going to create calibration file: {calfilename}')

    groups = LoadDetectorsGroupingFile(groups, InputWorkspace=ws)

    if mask:
        mask = LoadMask(instrument, mask)
        MaskDetectors(ws, MaskedWorkspace=mask)

    if previous_calibration:
        previous_calibration = LoadDiffCal(previous_calibration,
                                           MakeGroupingWorkspace=False,
                                           MakeMaskWorkspace=False)

    diffcal = do_group_calibration(ws,
                                   groups,
                                   previous_calibration,
                                   cc_kwargs=cc_kwargs,
                                   pdcal_kwargs=pdcal_kwargs)
    mask = mtd['group_calibration_pd_diffcal_mask']

    CreateGroupingWorkspace(InputWorkspace=ws,
                            GroupDetectorsBy=out_groups_by,
                            OutputWorkspace='out_groups')
    SaveDiffCal(CalibrationWorkspace=diffcal,
                MaskWorkspace=mask,
                GroupingWorkspace=mtd['out_groups'],
                Filename=calfilename)
예제 #8
0
def _load_ws(entry,
             ext,
             inst,
             ws_name,
             raw_types,
             period=_NO_INDIVIDUAL_PERIODS):
    filename, ext = _make_filename(entry, ext, inst)
    sanslog.notice('reading file:\t{}'.format(filename))

    is_data_set_event = False
    workspace_type = get_workspace_type(filename)
    if workspace_type is WorkspaceType.MultiperiodHistogram:
        if period != _NO_INDIVIDUAL_PERIODS:
            outWs = Load(Filename=filename,
                         OutputWorkspace=ws_name,
                         EntryNumber=period)
        else:
            outWs = Load(Filename=filename, OutputWorkspace=ws_name)
    elif workspace_type is WorkspaceType.Histogram:
        outWs = Load(Filename=filename, OutputWorkspace=ws_name)
    elif workspace_type is WorkspaceType.Event or workspace_type is WorkspaceType.MultiperiodEvent:
        is_data_set_event = True
        temp_ws_name = ws_name + "_event_temp"
        temp_ws_name_monitors = temp_ws_name + "_monitors"
        ws_name_monitors = ws_name + "_monitors"

        LoadEventNexus(Filename=filename,
                       OutputWorkspace=temp_ws_name,
                       LoadMonitors=True)
        outWs = mtd[temp_ws_name]
        # If we are dealing with a multiperiod workspace then we must can only use a single period at a
        # time, hence we reload from disk the whole data set every time which is very bad and must be
        # cached in the future
        if isinstance(outWs, WorkspaceGroup):
            remove_unwanted_workspaces(ws_name, temp_ws_name, period)
            remove_unwanted_workspaces(ws_name_monitors, temp_ws_name_monitors,
                                       period)
        else:
            RenameWorkspace(InputWorkspace=temp_ws_name,
                            OutputWorkspace=ws_name)
            RenameWorkspace(InputWorkspace=temp_ws_name_monitors,
                            OutputWorkspace=ws_name_monitors)

        run_details = mtd[ws_name].getRun()
        time_array = run_details.getLogData("proton_charge").times

        # There should never be a time increment in the proton charge larger than say "two weeks"
        # SANS2D currently is run at 10 frames per second. This may be incremented to 5Hz
        # (step of 0.2 sec). Although time between frames may be larger due to having the SMP veto switched on,
        # but hopefully not longer than two weeks!
        for i in range(len(time_array) - 1):
            # Cal time dif in seconds
            time_diff = (time_array[i + 1] - time_array[i]) / np.timedelta64(
                1, 's')
            if time_diff > 172800:
                sanslog.warning(
                    "Time increments in the proton charge log of {} are suspiciously large. "
                    "For example, a time difference of {} seconds has "
                    "been observed.".format(filename, str(time_diff)))
                break
    else:
        outWs = Load(Filename=filename, OutputWorkspace=ws_name)

    full_path, __ = getFileAndName(filename)
    path, f_name = os.path.split(full_path)
    if path.find('/') == -1:
        # Looks like we're on a windows system, convert the directory separators
        path = path.replace('\\', '/')

    if _is_type(ext, raw_types):
        LoadSampleDetailsFromRaw(InputWorkspace=ws_name,
                                 Filename=path + '/' + f_name)

    # Change below when logs in Nexus files work  file types of .raw need their log files to be copied too
    # if isType(ext, raw_types):
    log_file = os.path.splitext(f_name)[0] + '.log'
    try:
        outWs = mtd[ws_name]
        run = outWs.getRun()
        num_periods = run.getLogData('nperiods').value
    except Exception:
        # Assume the run file didn't support multi-period data and so there is only one period
        num_periods = 1

    return path, f_name, log_file, num_periods, is_data_set_event
예제 #9
0
파일: md.py 프로젝트: rosswhitfield/corelli
#s2=bin2.getSignalArray().copy()

x = np.linspace(-1, 1, 1000)
X, Y = np.meshgrid(x, x)
mask = (X**2 + Y**2 > 1) + (X**2 + Y**2 < 0.25)

s1[mask] = 0

s1_mask = s1 < np.percentile(s1, 99.5)

mask[s1_mask[:, :, 0]] = True
#mask[s2[:,:,0]==0] = True

s1[mask] = np.nan

start = ws2.getRun().getLogData('BL9:Mot:Sample:Axis2.RBV').value.mean()

max_corr = 0
max_angle = 0

for angle in np.arange(-0.03, 0.03, 0.001):
    SetGoniometer(ws2, Axis0=str(start + angle) + ',0,1,0,1')
    md2 = ConvertToMD(ws2,
                      QDimensions='Q3D',
                      dEAnalysisMode='Elastic',
                      Q3DFrames='Q_sample',
                      MinValues=[-10, -10, -10],
                      MaxValues=[10, 10, 10])
    bin2 = BinMD(md2,
                 AlignedDim0='Q_sample_x,-10,10,1000',
                 AlignedDim1='Q_sample_z,-10,10,1000',
예제 #10
0
파일: nxs.py 프로젝트: mcvine/instruments
def reduce(nxsfile,
           qaxis,
           outfile,
           use_ei_guess=False,
           ei_guess=None,
           eaxis=None,
           tof2E=True,
           ibnorm='ByCurrent',
           t0_guess=None,
           use_monitors=False,
           n_monitors_to_remove_from_workspace=None):
    from mantid.simpleapi import DgsReduction, LoadInstrument, Load, MoveInstrumentComponent, GetEiT0atSNS, GetEi, CropWorkspace
    from mantid import mtd
    if sys.version_info < (3, 0) and isinstance(nxsfile, unicode):
        nxsfile = nxsfile.encode('utf-8')

    if tof2E == 'guess':
        # XXX: this is a simple guess. all raw data files seem to have root "entry"
        cmd = 'h5ls %s' % nxsfile
        import subprocess as sp, shlex
        o = sp.check_output(shlex.split(cmd)).strip().split()[0]
        tof2E = o == 'entry'

    if tof2E:
        if not use_ei_guess and use_monitors:
            # use monitors
            ws, mons = Load(nxsfile, LoadMonitors=True)
            Eguess = ws.getRun()['EnergyRequest'].getStatistics().mean
            try:
                Efixed, _p, _i, T0 = GetEi(InputWorkspace=mons,
                                           Monitor1Spec=1,
                                           Monitor2Spec=2,
                                           EnergyEstimate=Eguess,
                                           FixEi=False)
            except:
                import warnings
                warnings.warn(
                    "Failed to determine Ei from monitors. Use EnergyRequest log %s"
                    % Eguess)
                Efixed, T0 = Eguess, 0
        else:
            ws = Load(nxsfile)
            if not use_ei_guess:
                # use Ei T0 saved from beam simulation
                run = ws.getRun()
                Efixed = run.getLogData('mcvine-Ei').value
                T0 = run.getLogData('mcvine-t0').value
            else:
                # use Ei guess from function parameters
                Efixed, T0 = ei_guess, t0_guess or 0.
        # van = SolidAngle(ws) # for solid angle normalization
        if n_monitors_to_remove_from_workspace:
            ws = CropWorkspace(
                ws, StartWorkspaceIndex=n_monitors_to_remove_from_workspace)
        DgsReduction(
            SampleInputWorkspace=ws,
            IncidentEnergyGuess=Efixed,
            UseIncidentEnergyGuess=True,
            TimeZeroGuess=T0,
            OutputWorkspace='reduced',
            EnergyTransferRange=eaxis,
            IncidentBeamNormalisation=ibnorm,
            # DetectorVanadiumInputWorkspace=van,
            # UseProcessedDetVan=True
        )
        reduced = mtd['reduced']
    else:
        reduced = Load(nxsfile)

    getSqeHistogramFromMantidWS(reduced, outfile, qaxis, eaxis)
    return
예제 #11
0
def calculate_ei(input_file):
    # Auto Find Eis by finding maximum data point in m2 (excluding end points)
    # and looking for neighbouring reps according to Fermi speed
    # Doesn't deal with cases where the peaks enter the 2nd frame (i.e 2 meV on MARI)
    print(input_file)
    w1 = Load(input_file)
    mon = LoadNexusMonitors(input_file)
    run = w1.getRun()

    # set up ==================================================
    monitor_spectra_2  = 41475
    monitor_spectra_3  = 41476
    monitor_index_2 = 2
    monitor_index_3 = 3
    log  = 'Fermi_Speed'

    # Get instrument parameters ===============================
    inst = w1.getInstrument()
    source = inst.getSource()
    L_m2 = mon.getDetector(monitor_index_2).getDistance(source)
    L_m3 = mon.getDetector(monitor_index_3).getDistance(source)
    L_Fermi = inst.getComponentByName("chopper-position").getDistance(source)
    freq = run.getLogData(log).value[-1]
    period = L_m2 / L_Fermi * 1.e6 / freq / 2. # include pi-pulses

    # Find maximum value and identify strongest rep ===========
    m2spec = ExtractSingleSpectrum(mon,monitor_index_2)
    m2spec = Rebin(m2spec,"200,2,18000")
    maxm2 = Max(m2spec)
    TOF = maxm2.readX(0)[0]

    # Generate list of possible reps in m2 ====================
    irep = -5
    while True:
        t = TOF + irep*period
        if t > 0:
            ireps = numpy.array(range(irep,irep+20))
            reps = TOF + period * ireps
            break
        else:
            irep += 1

    # exclude all reps that go past the frame in m3 ===========       
    reps_m3 = reps * L_m3 / L_m2
    reps_m3 = [x for x in reps_m3 if x < 19999.]
    reps = reps[0:len(reps_m3)]
    # exclude all reps at short times
    reps = [x for x in reps if x > 200.]

    # try GetEi for the reps ==================================
    Ei = []
    TOF = []
    for t in reps:
        v_i = L_m2 / t                    # m/mus
        Ei_guess = 5.227e6 * v_i**2       # meV
        try:
            (En,TOF2,dummy,tzero) = GetEi(mon, monitor_spectra_2, monitor_spectra_3, Ei_guess)
        except:
            continue
        if abs(t - TOF2) > 20. or abs(tzero) > 100.: continue
        Ei.append(Ei_guess)
        TOF.append(TOF2)

    #=========================================================
    for ii in range(len(Ei)):
        print("%f meV at TOF = %f mus" % (Ei[ii],TOF[ii]))
    return Ei
예제 #12
0
from mantid.simpleapi import Load, mtd, CloneWorkspace, Integration, SaveNexus, RemoveLogs
import numpy as np

ws_list = np.genfromtxt('/SNS/users/rwp/corelli/tube_calibration/list',
                        delimiter=',',
                        dtype=[('runs', '|S11'), ('banks', '5i8'),
                               ('height', 'i8')])

for run, banks, height in ws_list:
    banks = np.asarray(banks)
    banks = banks[np.nonzero(banks)]
    bank_names = ','.join('bank' + str(b) for b in banks)
    data = Load(Filename='CORELLI_' + run,
                BankName=bank_names,
                SingleBankPixelsOnly=False)
    pc = sum(data.getRun()['proton_charge'].value)
    data = Integration(data)
    data /= pc
    RemoveLogs(data)
    if 'accum' in mtd:
        accum += data
    else:
        accum = CloneWorkspace(data)

SaveNexus(accum, '/SNS/users/rwp/corelli/tube_calibration/all_banks.nxs')