Пример #1
0
def handle_saving_event_workspace_when_saving_as_histogram(binning, runs, def_type, inst):
    ws_in_monitor = mtd[ADD_FILES_SUM_TEMPORARY_MONITORS]
    if binning == 'Monitors':
        mon_x = ws_in_monitor.dataX(0)
        binning = str(mon_x[0])
        bin_gap = mon_x[1] - mon_x[0]
        binning = binning + "," + str(bin_gap)
        for j in range(2, len(mon_x)):
            next_bin_gap = mon_x[j] - mon_x[j-1]
            if next_bin_gap != bin_gap:
                bin_gap = next_bin_gap
                binning = binning + "," + str(mon_x[j-1]) + "," + str(bin_gap)
        binning = binning + "," + str(mon_x[len(mon_x)-1])

    sanslog.notice(binning)
    Rebin(InputWorkspace=ADD_FILES_SUM_TEMPORARY, OutputWorkspace='AddFilesSumTemporary_Rebin', Params=binning,
          PreserveEvents=False)

    # loading the nexus file using LoadNexus is necessary because it has some metadata
    # that is not in LoadEventNexus. This must be fixed.
    filename, ext = _make_filename(runs[0], def_type, inst)
    workspace_type = get_workspace_type(filename)
    if workspace_type is WorkspaceType.MultiperiodEvent:
        # If we are dealing with multi-period event workspaces then there is no way of getting any other
        # sample log information hence we use make a copy of the monitor workspace and use that instead
        # of the reloading the first file again
        CloneWorkspace(InputWorkspace=ADD_FILES_SUM_TEMPORARY_MONITORS, OutputWorkspace=ADD_FILES_SUM_TEMPORARY)
    else:
        LoadNexus(Filename=filename, OutputWorkspace=ADD_FILES_SUM_TEMPORARY,
                  SpectrumMax=ws_in_monitor.getNumberHistograms())
    # User may have selected a binning which is different from the default
    Rebin(InputWorkspace=ADD_FILES_SUM_TEMPORARY, OutputWorkspace=ADD_FILES_SUM_TEMPORARY, Params=binning)
    # For now the monitor binning must be the same as the detector binning
    # since otherwise both cannot exist in the same output histogram file
    Rebin(InputWorkspace=ADD_FILES_SUM_TEMPORARY_MONITORS, OutputWorkspace=ADD_FILES_SUM_TEMPORARY_MONITORS,
          Params=binning)

    ws_in_monitor = mtd[ADD_FILES_SUM_TEMPORARY_MONITORS]
    wsOut = mtd[ADD_FILES_SUM_TEMPORARY]
    ws_in_detector = mtd['AddFilesSumTemporary_Rebin']

    # We loose added sample log information since we reload a single run workspace
    # and conjoin with the added workspace. In order to preserve some added sample
    # logs we need to transfer them at this point
    transfer_special_sample_logs(from_ws=ws_in_detector, to_ws=wsOut)

    mon_n = ws_in_monitor.getNumberHistograms()
    for i in range(mon_n):
        wsOut.setY(i, ws_in_monitor.dataY(i))
        wsOut.setE(i, ws_in_monitor.dataE(i))
    ConjoinWorkspaces(wsOut, ws_in_detector, CheckOverlapping=True)

    if 'AddFilesSumTemporary_Rebin' in mtd:
        DeleteWorkspace('AddFilesSumTemporary_Rebin')
Пример #2
0
def handle_saving_event_workspace_when_saving_as_histogram(binning, is_first_data_set_group_workspace, runs, defType, inst):
    wsInMonitor = mtd[ADD_FILES_SUM_TEMPORARY_MONITORS]
    if binning == 'Monitors':
        monX = wsInMonitor.dataX(0)
        binning = str(monX[0])
        binGap = monX[1] - monX[0]
        binning = binning + "," + str(binGap)
        for j in range(2,len(monX)):
            nextBinGap = monX[j] - monX[j-1]
            if nextBinGap != binGap:
                binGap = nextBinGap
                binning = binning + "," + str(monX[j-1]) + "," + str(binGap)
        binning = binning + "," + str(monX[len(monX)-1])

    sanslog.notice(binning)
    Rebin(InputWorkspace=ADD_FILES_SUM_TEMPORARY,OutputWorkspace='AddFilesSumTempory_Rebin',Params=binning,
          PreserveEvents=False)

    # loading the nexus file using LoadNexus is necessary because it has some metadata
    # that is not in LoadEventNexus. This must be fixed.
    filename, ext = _makeFilename(runs[0], defType, inst)
    workspace_type = get_workspace_type(filename)
    if workspace_type is WorkspaceType.MultiperiodEvent:
        # If we are dealing with multi-period event workspaces then there is no way of getting any other
        # sample log information hence we use make a copy of the monitor workspace and use that instead
        # of the reloading the first file again
        CloneWorkspace(InputWorkspace=ADD_FILES_SUM_TEMPORARY_MONITORS, OutputWorkspace=ADD_FILES_SUM_TEMPORARY)
    else:
        LoadNexus(Filename=filename, OutputWorkspace=ADD_FILES_SUM_TEMPORARY, SpectrumMax=wsInMonitor.getNumberHistograms())
    # User may have selected a binning which is different from the default
    Rebin(InputWorkspace=ADD_FILES_SUM_TEMPORARY,OutputWorkspace=ADD_FILES_SUM_TEMPORARY,Params= binning)
    # For now the monitor binning must be the same as the detector binning
    # since otherwise both cannot exist in the same output histogram file
    Rebin(InputWorkspace=ADD_FILES_SUM_TEMPORARY_MONITORS,OutputWorkspace=ADD_FILES_SUM_TEMPORARY_MONITORS,Params= binning)

    wsInMonitor = mtd[ADD_FILES_SUM_TEMPORARY_MONITORS]
    wsOut = mtd[ADD_FILES_SUM_TEMPORARY]
    wsInDetector = mtd['AddFilesSumTempory_Rebin']

    # We loose added sample log information since we reload a single run workspace
    # and conjoin with the added workspace. In order to preserve some added sample
    # logs we need to transfer them at this point
    transfer_special_sample_logs(from_ws = wsInDetector, to_ws = wsOut)

    mon_n = wsInMonitor.getNumberHistograms()
    for i in range(mon_n):
        wsOut.setY(i, wsInMonitor.dataY(i))
        wsOut.setE(i, wsInMonitor.dataE(i))
    ConjoinWorkspaces(wsOut, wsInDetector, CheckOverlapping=True)

    if 'AddFilesSumTempory_Rebin' in mtd :
        DeleteWorkspace('AddFilesSumTempory_Rebin')
Пример #3
0
def add_runs(runs, inst='sans2d', defType='.nxs', rawTypes=('.raw', '.s*', 'add','.RAW'), lowMem=False, binning='Monitors', saveAsEvent=False, isOverlay = False, time_shifts = []):
    if inst.upper() == "SANS2DTUBES":
        inst = "SANS2D"
  #check if there is at least one file in the list
    if len(runs) < 1 : return

    if not defType.startswith('.') : defType = '.'+defType

    # Create the correct format of adding files
    adder = AddOperation(isOverlay, time_shifts)

  #these input arguments need to be arrays of strings, enforce this
    if type(runs) == str : runs = (runs, )
    if type(rawTypes) == str : rawTypes = (rawTypes, )

    if lowMem:
        lowMem = _can_load_periods(runs, defType, rawTypes)
    if lowMem:
        period = 1
    else:
        period = _NO_INDIVIDUAL_PERIODS

    userEntry = runs[0]

    counter_run = 0

    while True:

        isFirstDataSetEvent = False
    #we need to catch all exceptions to ensure that a dialog box is raised with the error
        try :
            lastPath, lastFile, logFile, num_periods, isFirstDataSetEvent = _loadWS(\
        userEntry, defType, inst, 'AddFilesSumTempory', rawTypes, period)

      # if event data prevent loop over periods makes no sense
            if isFirstDataSetEvent:
                period = _NO_INDIVIDUAL_PERIODS

            if inst.upper() != 'SANS2D' and isFirstDataSetEvent:
                error = 'Adding event data not supported for ' + inst + ' for now'
                print error
                logger.notice(error)
                for workspaceName in ('AddFilesSumTempory','AddFilesSumTempory_monitors'):
                    if workspaceName in mtd:
                        DeleteWorkspace(workspaceName)
                return ""

            for i in range(len(runs)-1):
                userEntry = runs[i+1]
                lastPath, lastFile, logFile, dummy, isDataSetEvent = _loadWS(\
          userEntry, defType, inst,'AddFilesNewTempory', rawTypes, period)

                if isDataSetEvent != isFirstDataSetEvent:
                    error = 'Datasets added must be either ALL histogram data or ALL event data'
                    print error
                    logger.notice(error)
                    for workspaceName in ('AddFilesSumTempory','AddFilesNewTempory'):
                        if workspaceName in mtd:
                            DeleteWorkspace(workspaceName)
                    return ""

                adder.add(LHS_workspace='AddFilesSumTempory',RHS_workspace= 'AddFilesNewTempory',
                          output_workspace= 'AddFilesSumTempory', run_to_add = counter_run)
                if isFirstDataSetEvent:
                    adder.add(LHS_workspace='AddFilesSumTempory_monitors',RHS_workspace= 'AddFilesNewTempory_monitors',
                              output_workspace= 'AddFilesSumTempory_monitors', run_to_add = counter_run)
                DeleteWorkspace("AddFilesNewTempory")
                if isFirstDataSetEvent:
                    DeleteWorkspace("AddFilesNewTempory_monitors")
                # Increment the run number
                counter_run +=1
        except ValueError as e:
            error = 'Error opening file ' + userEntry+': ' + str(e)
            print error
            logger.notice(error)
            if 'AddFilesSumTempory' in mtd :
                DeleteWorkspace('AddFilesSumTempory')
            return ""
        except Exception as e:
            error = 'Error finding files: ' + str(e)
            print error
            logger.notice(error)
            for workspaceName in ('AddFilesSumTempory','AddFilesNewTempory'):
                if workspaceName in mtd:
                    DeleteWorkspace(workspaceName)
            return ""

    # in case of event file force it into a histogram workspace
        if isFirstDataSetEvent and saveAsEvent == False:
            wsInMonitor = mtd['AddFilesSumTempory_monitors']
            if binning == 'Monitors':
                monX = wsInMonitor.dataX(0)
                binning = str(monX[0])
                binGap = monX[1] - monX[0]
                binning = binning + "," + str(binGap)
                for j in range(2,len(monX)):
                    nextBinGap = monX[j] - monX[j-1]
                    if nextBinGap != binGap:
                        binGap = nextBinGap
                        binning = binning + "," + str(monX[j-1]) + "," + str(binGap)
                binning = binning + "," + str(monX[len(monX)-1])

            logger.notice(binning)
            Rebin(InputWorkspace='AddFilesSumTempory',OutputWorkspace='AddFilesSumTempory_Rebin',Params= binning, PreserveEvents=False)

        # loading the nexus file using LoadNexus is necessary because it has some metadata
        # that is not in LoadEventNexus. This must be fixed.
            filename, ext = _makeFilename(runs[0], defType, inst)
            LoadNexus(Filename=filename, OutputWorkspace='AddFilesSumTempory', SpectrumMax=wsInMonitor.getNumberHistograms())
        # User may have selected a binning which is different from the default
            Rebin(InputWorkspace='AddFilesSumTempory',OutputWorkspace='AddFilesSumTempory',Params= binning)
        # For now the monitor binning must be the same as the detector binning
        # since otherwise both cannot exist in the same output histogram file
            Rebin(InputWorkspace='AddFilesSumTempory_monitors',OutputWorkspace='AddFilesSumTempory_monitors',Params= binning)

            wsInMonitor = mtd['AddFilesSumTempory_monitors']
            wsOut = mtd['AddFilesSumTempory']
            wsInDetector = mtd['AddFilesSumTempory_Rebin']

            # We loose added sample log information since we reload a single run workspace
            # and conjoin with the added workspace. In order to preserve some added sample
            # logs we need to transfer them at this point
            transfer_special_sample_logs(from_ws = wsInDetector, to_ws = wsOut)

            mon_n = wsInMonitor.getNumberHistograms()
            for i in range(mon_n):
                wsOut.setY(i,wsInMonitor.dataY(i))
                wsOut.setE(i,wsInMonitor.dataE(i))
            ConjoinWorkspaces(wsOut, wsInDetector, CheckOverlapping=True)

            if 'AddFilesSumTempory_Rebin' in mtd :
                DeleteWorkspace('AddFilesSumTempory_Rebin')


        lastFile = os.path.splitext(lastFile)[0]
    # now save the added file
        outFile = lastFile+'-add.'+'nxs'
        outFile_monitors = lastFile+'-add_monitors.'+'nxs'
        logger.notice('writing file:   '+outFile)

        if period == 1 or period == _NO_INDIVIDUAL_PERIODS:
        #replace the file the first time around
            SaveNexusProcessed(InputWorkspace="AddFilesSumTempory",
                               Filename=outFile, Append=False)
            # If we are saving event data, then we need to save also the monitor file
            if isFirstDataSetEvent and saveAsEvent:
                SaveNexusProcessed(InputWorkspace="AddFilesSumTempory_monitors",
                                   Filename=outFile_monitors , Append=False)

        else:
      #then append
            SaveNexusProcessed("AddFilesSumTempory", outFile, Append=True)
            if isFirstDataSetEvent and saveAsEvent:
                SaveNexusProcessed("AddFilesSumTempory_monitors", outFile_monitors , Append=True)

        DeleteWorkspace("AddFilesSumTempory")
        if isFirstDataSetEvent:
            DeleteWorkspace("AddFilesSumTempory_monitors")

        if period == num_periods:
            break

        if period == _NO_INDIVIDUAL_PERIODS:
            break
        else:
            period += 1

    if isFirstDataSetEvent and saveAsEvent:
        outFile = bundle_added_event_data_as_group(outFile, outFile_monitors)

  #this adds the path to the filename
    path,base = os.path.split(outFile)
    if path == '' or base not in os.listdir(path):
        # Try the default save directory
        path = config['defaultsave.directory'] + path
        # If the path is still an empty string check in the current working directory
        if path == '':
            path = os.getcwd()
        assert base in os.listdir(path)
    pathout = path
    if logFile:
        _copyLog(lastPath, logFile, pathout)

    return 'The following file has been created:\n'+outFile
Пример #4
0
def add_runs(runs,
             inst='sans2d',
             defType='.nxs',
             rawTypes=('.raw', '.s*', 'add', '.RAW'),
             lowMem=False,
             binning='Monitors',
             saveAsEvent=False,
             isOverlay=False,
             time_shifts=[]):
    if inst.upper() == "SANS2DTUBES":
        inst = "SANS2D"
#check if there is at least one file in the list
    if len(runs) < 1: return

    if not defType.startswith('.'): defType = '.' + defType

    # Create the correct format of adding files
    adder = AddOperation(isOverlay, time_shifts)

    #these input arguments need to be arrays of strings, enforce this
    if type(runs) == str: runs = (runs, )
    if type(rawTypes) == str: rawTypes = (rawTypes, )

    if lowMem:
        lowMem = _can_load_periods(runs, defType, rawTypes)
    if lowMem:
        period = 1
    else:
        period = _NO_INDIVIDUAL_PERIODS

    userEntry = runs[0]

    counter_run = 0

    while True:

        isFirstDataSetEvent = False
        #we need to catch all exceptions to ensure that a dialog box is raised with the error
        try:
            lastPath, lastFile, logFile, num_periods, isFirstDataSetEvent = _loadWS(\
        userEntry, defType, inst, 'AddFilesSumTempory', rawTypes, period)

            # if event data prevent loop over periods makes no sense
            if isFirstDataSetEvent:
                period = _NO_INDIVIDUAL_PERIODS

            if inst.upper() != 'SANS2D' and isFirstDataSetEvent:
                error = 'Adding event data not supported for ' + inst + ' for now'
                print error
                logger.notice(error)
                for workspaceName in ('AddFilesSumTempory',
                                      'AddFilesSumTempory_monitors'):
                    if workspaceName in mtd:
                        DeleteWorkspace(workspaceName)
                return ""

            for i in range(len(runs) - 1):
                userEntry = runs[i + 1]
                lastPath, lastFile, logFile, dummy, isDataSetEvent = _loadWS(\
          userEntry, defType, inst,'AddFilesNewTempory', rawTypes, period)

                if isDataSetEvent != isFirstDataSetEvent:
                    error = 'Datasets added must be either ALL histogram data or ALL event data'
                    print error
                    logger.notice(error)
                    for workspaceName in ('AddFilesSumTempory',
                                          'AddFilesNewTempory'):
                        if workspaceName in mtd:
                            DeleteWorkspace(workspaceName)
                    return ""

                adder.add(LHS_workspace='AddFilesSumTempory',
                          RHS_workspace='AddFilesNewTempory',
                          output_workspace='AddFilesSumTempory',
                          run_to_add=counter_run)
                if isFirstDataSetEvent:
                    adder.add(LHS_workspace='AddFilesSumTempory_monitors',
                              RHS_workspace='AddFilesNewTempory_monitors',
                              output_workspace='AddFilesSumTempory_monitors',
                              run_to_add=counter_run)
                DeleteWorkspace("AddFilesNewTempory")
                if isFirstDataSetEvent:
                    DeleteWorkspace("AddFilesNewTempory_monitors")
                # Increment the run number
                counter_run += 1
        except ValueError as e:
            error = 'Error opening file ' + userEntry + ': ' + str(e)
            print error
            logger.notice(error)
            if 'AddFilesSumTempory' in mtd:
                DeleteWorkspace('AddFilesSumTempory')
            return ""
        except Exception as e:
            error = 'Error finding files: ' + str(e)
            print error
            logger.notice(error)
            for workspaceName in ('AddFilesSumTempory', 'AddFilesNewTempory'):
                if workspaceName in mtd:
                    DeleteWorkspace(workspaceName)
            return ""

    # in case of event file force it into a histogram workspace
        if isFirstDataSetEvent and saveAsEvent == False:
            wsInMonitor = mtd['AddFilesSumTempory_monitors']
            if binning == 'Monitors':
                monX = wsInMonitor.dataX(0)
                binning = str(monX[0])
                binGap = monX[1] - monX[0]
                binning = binning + "," + str(binGap)
                for j in range(2, len(monX)):
                    nextBinGap = monX[j] - monX[j - 1]
                    if nextBinGap != binGap:
                        binGap = nextBinGap
                        binning = binning + "," + str(
                            monX[j - 1]) + "," + str(binGap)
                binning = binning + "," + str(monX[len(monX) - 1])

            logger.notice(binning)
            Rebin(InputWorkspace='AddFilesSumTempory',
                  OutputWorkspace='AddFilesSumTempory_Rebin',
                  Params=binning,
                  PreserveEvents=False)

            # loading the nexus file using LoadNexus is necessary because it has some metadata
            # that is not in LoadEventNexus. This must be fixed.
            filename, ext = _makeFilename(runs[0], defType, inst)
            LoadNexus(Filename=filename,
                      OutputWorkspace='AddFilesSumTempory',
                      SpectrumMax=wsInMonitor.getNumberHistograms())
            # User may have selected a binning which is different from the default
            Rebin(InputWorkspace='AddFilesSumTempory',
                  OutputWorkspace='AddFilesSumTempory',
                  Params=binning)
            # For now the monitor binning must be the same as the detector binning
            # since otherwise both cannot exist in the same output histogram file
            Rebin(InputWorkspace='AddFilesSumTempory_monitors',
                  OutputWorkspace='AddFilesSumTempory_monitors',
                  Params=binning)

            wsInMonitor = mtd['AddFilesSumTempory_monitors']
            wsOut = mtd['AddFilesSumTempory']
            wsInDetector = mtd['AddFilesSumTempory_Rebin']

            # We loose added sample log information since we reload a single run workspace
            # and conjoin with the added workspace. In order to preserve some added sample
            # logs we need to transfer them at this point
            transfer_special_sample_logs(from_ws=wsInDetector, to_ws=wsOut)

            mon_n = wsInMonitor.getNumberHistograms()
            for i in range(mon_n):
                wsOut.setY(i, wsInMonitor.dataY(i))
                wsOut.setE(i, wsInMonitor.dataE(i))
            ConjoinWorkspaces(wsOut, wsInDetector, CheckOverlapping=True)

            if 'AddFilesSumTempory_Rebin' in mtd:
                DeleteWorkspace('AddFilesSumTempory_Rebin')

        lastFile = os.path.splitext(lastFile)[0]
        # now save the added file
        outFile = lastFile + '-add.' + 'nxs'
        outFile_monitors = lastFile + '-add_monitors.' + 'nxs'
        logger.notice('writing file:   ' + outFile)

        if period == 1 or period == _NO_INDIVIDUAL_PERIODS:
            #replace the file the first time around
            SaveNexusProcessed(InputWorkspace="AddFilesSumTempory",
                               Filename=outFile,
                               Append=False)
            # If we are saving event data, then we need to save also the monitor file
            if isFirstDataSetEvent and saveAsEvent:
                SaveNexusProcessed(
                    InputWorkspace="AddFilesSumTempory_monitors",
                    Filename=outFile_monitors,
                    Append=False)

        else:
            #then append
            SaveNexusProcessed("AddFilesSumTempory", outFile, Append=True)
            if isFirstDataSetEvent and saveAsEvent:
                SaveNexusProcessed("AddFilesSumTempory_monitors",
                                   outFile_monitors,
                                   Append=True)

        DeleteWorkspace("AddFilesSumTempory")
        if isFirstDataSetEvent:
            DeleteWorkspace("AddFilesSumTempory_monitors")

        if period == num_periods:
            break

        if period == _NO_INDIVIDUAL_PERIODS:
            break
        else:
            period += 1

    if isFirstDataSetEvent and saveAsEvent:
        outFile = bundle_added_event_data_as_group(outFile, outFile_monitors)

#this adds the path to the filename
    path, base = os.path.split(outFile)
    if path == '' or base not in os.listdir(path):
        # Try the default save directory
        path = config['defaultsave.directory'] + path
        # If the path is still an empty string check in the current working directory
        if path == '':
            path = os.getcwd()
        assert base in os.listdir(path)
    pathout = path
    if logFile:
        _copyLog(lastPath, logFile, pathout)

    return 'The following file has been created:\n' + outFile