示例#1
0
    def _save(self, runnumber, basename, norm):
        if not self.getProperty("SaveData").value:
            return

        saveDir = self.getProperty("OutputDirectory").value.strip()
        if len(saveDir) <= 0:
            self.log().notice('Using default save location')
            saveDir = os.path.join(
                self.get_IPTS_Local(runnumber), 'shared', 'data')
        self.log().notice('Writing to \'' + saveDir + '\'')

        if norm == 'None':
            SaveNexusProcessed(InputWorkspace='WS_red',
                               Filename=os.path.join(saveDir, 'nexus', basename + '.nxs'))
            SaveAscii(InputWorkspace='WS_red',
                      Filename=os.path.join(saveDir, 'd_spacing', basename + '.dat'))
            ConvertUnits(InputWorkspace='WS_red', OutputWorkspace='WS_tof',
                         Target="TOF", AlignBins=False)
        else:
            SaveNexusProcessed(InputWorkspace='WS_nor',
                               Filename=os.path.join(saveDir, 'nexus', basename + '.nxs'))
            SaveAscii(InputWorkspace='WS_nor',
                      Filename=os.path.join(saveDir, 'd_spacing', basename + '.dat'))
            ConvertUnits(InputWorkspace='WS_nor', OutputWorkspace='WS_tof',
                         Target="TOF", AlignBins=False)

        SaveGSS(InputWorkspace='WS_tof',
                Filename=os.path.join(saveDir, 'gsas', basename + '.gsa'),
                Format='SLOG', SplitFiles=False, Append=False, ExtendedHeader=True)
        SaveFocusedXYE(InputWorkspace='WS_tof',
                       Filename=os.path.join(
                           saveDir, 'fullprof', basename + '.dat'),
                       SplitFiles=True, Append=False)
        DeleteWorkspace(Workspace='WS_tof')
示例#2
0
    def _save(self, runnumber, basename, outputWksp):
        if not self.getProperty("SaveData").value:
            return

        # determine where to save the data
        saveDir = self.getPropertyValue("OutputDirectory").strip()
        if len(saveDir) <= 0:
            self.log().notice('Using default save location')
            saveDir = os.path.join(self.get_IPTS_Local(runnumber), 'shared', 'data')

        self.log().notice('Writing to \'' + saveDir + '\'')

        SaveNexusProcessed(InputWorkspace=outputWksp,
                           Filename=os.path.join(saveDir, 'nexus', basename + '.nxs'))
        SaveAscii(InputWorkspace=outputWksp,
                  Filename=os.path.join(saveDir, 'd_spacing', basename + '.dat'))
        ConvertUnits(InputWorkspace=outputWksp, OutputWorkspace='WS_tof',
                     Target="TOF", AlignBins=False)

        # GSAS and FullProf require data in time-of-flight
        SaveGSS(InputWorkspace='WS_tof',
                Filename=os.path.join(saveDir, 'gsas', basename + '.gsa'),
                Format='SLOG', SplitFiles=False, Append=False, ExtendedHeader=True)
        SaveFocusedXYE(InputWorkspace='WS_tof',
                       Filename=os.path.join(
                           saveDir, 'fullprof', basename + '.dat'),
                       SplitFiles=True, Append=False)
        DeleteWorkspace(Workspace='WS_tof')
示例#3
0
    def _save(self, saveDir, basename, outputWksp):
        if not self.getProperty("SaveData").value:
            return

        self.log().notice('Writing to \'' + saveDir + '\'')

        SaveNexusProcessed(InputWorkspace=outputWksp,
                           Filename=os.path.join(saveDir, 'nexus',
                                                 basename + '.nxs'))
        SaveAscii(InputWorkspace=outputWksp,
                  Filename=os.path.join(saveDir, 'd_spacing',
                                        basename + '.dat'))
        ConvertUnits(InputWorkspace=outputWksp,
                     OutputWorkspace='WS_tof',
                     Target="TOF",
                     AlignBins=False)

        # GSAS and FullProf require data in time-of-flight
        SaveGSS(InputWorkspace='WS_tof',
                Filename=os.path.join(saveDir, 'gsas', basename + '.gsa'),
                Format='SLOG',
                SplitFiles=False,
                Append=False,
                ExtendedHeader=True)
        SaveFocusedXYE(InputWorkspace='WS_tof',
                       Filename=os.path.join(saveDir, 'fullprof',
                                             basename + '.dat'),
                       SplitFiles=True,
                       Append=False)
        DeleteWorkspace(Workspace='WS_tof')
示例#4
0
def save_reduction(worksspace_names, formats, x_units='DeltaE'):
    """
    Saves the workspaces to the default save directory.

    @param worksspace_names List of workspace names to save
    @param formats List of formats to save in
    @param Output X units
    """
    from mantid.simpleapi import (SaveSPE, SaveNexusProcessed, SaveNXSPE,
                                  SaveAscii, Rebin, DeleteWorkspace,
                                  ConvertSpectrumAxis, SaveDaveGrp)

    for workspace_name in worksspace_names:
        if 'spe' in formats:
            SaveSPE(InputWorkspace=workspace_name,
                    Filename=workspace_name + '.spe')

        if 'nxs' in formats:
            SaveNexusProcessed(InputWorkspace=workspace_name,
                               Filename=workspace_name + '.nxs')

        if 'nxspe' in formats:
            SaveNXSPE(InputWorkspace=workspace_name,
                      Filename=workspace_name + '.nxspe')

        if 'ascii' in formats:
            # Version 1 of SaveAscii produces output that works better with excel/origin
            # For some reason this has to be done with an algorithm object, using the function
            # wrapper with Version did not change the version that was run
            saveAsciiAlg = AlgorithmManager.createUnmanaged('SaveAscii', 1)
            saveAsciiAlg.initialize()
            saveAsciiAlg.setProperty('InputWorkspace', workspace_name)
            saveAsciiAlg.setProperty('Filename', workspace_name + '.dat')
            saveAsciiAlg.execute()

        if 'aclimax' in formats:
            if x_units == 'DeltaE_inWavenumber':
                bins = '24, -0.005, 4000' #cm-1
            else:
                bins = '3, -0.005, 500' #meV

            Rebin(InputWorkspace=workspace_name,
                  OutputWorkspace=workspace_name + '_aclimax_save_temp',
                  Params=bins)
            SaveAscii(InputWorkspace=workspace_name + '_aclimax_save_temp',
                      Filename=workspace_name + '_aclimax.dat',
                      Separator='Tab')
            DeleteWorkspace(Workspace=workspace_name + '_aclimax_save_temp')

        if 'davegrp' in formats:
            ConvertSpectrumAxis(InputWorkspace=workspace_name,
                                OutputWorkspace=workspace_name + '_davegrp_save_temp',
                                Target='ElasticQ',
                                EMode='Indirect')
            SaveDaveGrp(InputWorkspace=workspace_name + '_davegrp_save_temp',
                        Filename=workspace_name + '.grp')
            DeleteWorkspace(Workspace=workspace_name + '_davegrp_save_temp')
def save_reduction(workspace_names, formats, x_units='DeltaE'):
    """
    Saves the workspaces to the default save directory.

    @param workspace_names List of workspace names to save
    @param formats List of formats to save in
    @param x_units X units
    """
    from mantid.simpleapi import (SaveSPE, SaveNexusProcessed, SaveNXSPE,
                                  SaveAscii, Rebin, DeleteWorkspace,
                                  ConvertSpectrumAxis, SaveDaveGrp)

    for workspace_name in workspace_names:
        if 'spe' in formats:
            SaveSPE(InputWorkspace=workspace_name,
                    Filename=workspace_name + '.spe')

        if 'nxs' in formats:
            SaveNexusProcessed(InputWorkspace=workspace_name,
                               Filename=workspace_name + '.nxs')

        if 'nxspe' in formats:
            SaveNXSPE(InputWorkspace=workspace_name,
                      Filename=workspace_name + '.nxspe')

        if 'ascii' in formats:

            # Changed to version 2 to enable re-loading of files into mantid
            saveAsciiAlg = AlgorithmManager.createUnmanaged('SaveAscii', 2)
            saveAsciiAlg.initialize()
            saveAsciiAlg.setProperty('InputWorkspace', workspace_name)
            saveAsciiAlg.setProperty('Filename', workspace_name + '.dat')
            saveAsciiAlg.execute()

        if 'aclimax' in formats:
            if x_units == 'DeltaE_inWavenumber':
                bins = '24, -0.005, 4000'  #cm-1
            else:
                bins = '3, -0.005, 500'  #meV

            Rebin(InputWorkspace=workspace_name,
                  OutputWorkspace=workspace_name + '_aclimax_save_temp',
                  Params=bins)
            SaveAscii(InputWorkspace=workspace_name + '_aclimax_save_temp',
                      Filename=workspace_name + '_aclimax.dat',
                      Separator='Tab')
            DeleteWorkspace(Workspace=workspace_name + '_aclimax_save_temp')

        if 'davegrp' in formats:
            ConvertSpectrumAxis(InputWorkspace=workspace_name,
                                OutputWorkspace=workspace_name +
                                '_davegrp_save_temp',
                                Target='ElasticQ',
                                EMode='Indirect')
            SaveDaveGrp(InputWorkspace=workspace_name + '_davegrp_save_temp',
                        Filename=workspace_name + '.grp')
            DeleteWorkspace(Workspace=workspace_name + '_davegrp_save_temp')
示例#6
0
    def write_workspaces_to_ascii(scale: float = 1., *, ws_name: str) -> None:
        """Write all with given root name to ascii files

        :param ws_name: Workspace name (to be searched for in Mantid context)
        :param scale: Scale factor to apply to data (typically 1 / bin_width)
        """
        num_workspaces = mtd[ws_name].getNumberOfEntries()
        for wrk_num in range(num_workspaces):
            wrk = mtd[ws_name].getItem(wrk_num)
            SaveAscii(InputWorkspace=Scale(wrk, scale, "Multiply"),
                      Filename=wrk.name() + ".dat", Separator="Space", WriteSpectrumID=False)
    def _output_det_pos_file(filename, tbl):
        """
        Writes a text (TSV) file with the detector positions information that also goes into the
        output DetectorPostions table workspace.

        @param filename :: name of the file to write. If it is empty nothing is saved/written.
        @param tbl :: detector positions table workspace
        """
        if not filename:
            return

        if filename.strip():
            SaveAscii(InputWorkspace=tbl, Filename=filename, WriteXError=True, WriteSpectrumID=False, Separator="Tab")
示例#8
0
def save_reduction(workspace_names, formats, x_units='DeltaE'):
    """
    Saves the workspaces to the default save directory.

    @param workspace_names List of workspace names to save
    @param formats List of formats to save in
    @param x_units X units
    """
    from mantid.simpleapi import (SaveSPE, SaveNexusProcessed, SaveNXSPE,
                                  SaveAscii, Rebin, DeleteWorkspace,
                                  ConvertSpectrumAxis, SaveDaveGrp)

    for workspace_name in workspace_names:
        if 'spe' in formats:
            SaveSPE(InputWorkspace=workspace_name,
                    Filename=workspace_name + '.spe')

        if 'nxs' in formats:
            SaveNexusProcessed(InputWorkspace=workspace_name,
                               Filename=workspace_name + '.nxs')

        if 'nxspe' in formats:
            SaveNXSPE(InputWorkspace=workspace_name,
                      Filename=workspace_name + '.nxspe')

        if 'ascii' in formats:
            _save_ascii(workspace_name, workspace_name + ".dat")

        if 'aclimax' in formats:
            if x_units == 'DeltaE_inWavenumber':
                bins = '24, -0.005, 4000'  # cm-1
            else:
                bins = '3, -0.005, 500'  # meV

            Rebin(InputWorkspace=workspace_name,
                  OutputWorkspace=workspace_name + '_aclimax_save_temp',
                  Params=bins)
            SaveAscii(InputWorkspace=workspace_name + '_aclimax_save_temp',
                      Filename=workspace_name + '_aclimax.dat',
                      Separator='Tab')
            DeleteWorkspace(Workspace=workspace_name + '_aclimax_save_temp')

        if 'davegrp' in formats:
            ConvertSpectrumAxis(InputWorkspace=workspace_name,
                                OutputWorkspace=workspace_name +
                                '_davegrp_save_temp',
                                Target='ElasticQ',
                                EMode='Indirect')
            SaveDaveGrp(InputWorkspace=workspace_name + '_davegrp_save_temp',
                        Filename=workspace_name + '.grp')
            DeleteWorkspace(Workspace=workspace_name + '_davegrp_save_temp')
示例#9
0
def save_file(ws, filename, header=None):
    """
    Small wrapper to Mantid `SaveAscii` algorithm to add a header lines.

    :param ws: Mantid workspace to save out
    :type ws: MatrixWorkspace
    :param filename: Filename to save output
    :type filename: str
    :param headers: A list of header comments
    :type headers: list
    """
    with open(filename, 'w') as f:
        if header:
            for line in header:
                f.write('# %s \n' % line)
    SaveAscii(InputWorkspace=ws,
              Filename=filename,
              Separator='Space',
              ColumnHeader=False,
              AppendToFile=True)
示例#10
0
def applyPowder(ipts_list):
    from mantid.simpleapi import BASISPowderDiffraction, SaveAscii, DeleteWorkspace
    minimal_size = int(10e6)  # 10MB
    for ipt in ipts_list:
        print('ipts =', ipt)
        for root, dirs, files in os.walk('/SNS/BSS/IPTS-{}/0'.format(ipt)):
            print('nfiles =', len(files))
            time.sleep(3)
            for file in files:
                if file.endswith('event.nxs'):
                    full_path = os.path.join(root, file)
                    if os.stat(full_path).st_size > minimal_size:
                        print(full_path)
                        run_number = full_path.split('BSS_')[1].split(
                            '_event')[0]
                        print(run_number)
                        out_name = 'ipts_{}_run_{}'.format(ipt, run_number)
                        out_name = os.path.join(outdir, out_name)
                        title = 'IPTS {} RUN {}'.format(ipt, run_number)
                        print(out_name)
                        print(title)
                        try:
                            BASISPowderDiffraction(
                                RunNumbers=run_number,
                                MomentumTransferBins=[0.1, 0.0025, 3.0],
                                OutputWorkspace='w',
                                MonitorNormalization=0)
                            SaveAscii(InputWorkspace='w_angle',
                                      Filename=out_name + '_angle.dat',
                                      WriteSpectrumID=False,
                                      Separator='Space',
                                      ColumnHeader=False)
                            DeleteWorkspace(Workspace='w_angle')
                        except:
                            pass
                        finally:
                            print('\n\n\n**********************************')
                            print('\n   {}   '.format(title))
                            print('\n**********************************\n\n')
                            time.sleep(2)
示例#11
0
文件: Abins.py 项目: liquidmet/mantid
    def PyExec(self):

        # 0) Create reporter to report progress
        steps = 9
        begin = 0
        end = 1.0
        prog_reporter = Progress(self, begin, end, steps)

        # 1) get input parameters from a user
        self._get_properties()
        prog_reporter.report("Input data from the user has been collected.")

        # 2) read ab initio data
        ab_initio_loaders = {"CASTEP": AbinsModules.LoadCASTEP, "CRYSTAL": AbinsModules.LoadCRYSTAL,
                             "DMOL3": AbinsModules.LoadDMOL3, "GAUSSIAN": AbinsModules.LoadGAUSSIAN}
        rdr = ab_initio_loaders[self._ab_initio_program](input_ab_initio_filename=self._vibrational_or_phonon_data_file)
        ab_initio_data = rdr.get_formatted_data()
        prog_reporter.report("Vibrational/phonon data has been read.")

        # 3) calculate S
        s_calculator = AbinsModules.CalculateS.init(filename=self._vibrational_or_phonon_data_file,
                                                    temperature=self._temperature,
                                                    sample_form=self._sample_form, abins_data=ab_initio_data,
                                                    instrument=self._instrument,
                                                    quantum_order_num=self._num_quantum_order_events,
                                                    bin_width=self._bin_width)
        s_data = s_calculator.get_formatted_data()
        prog_reporter.report("Dynamical structure factors have been determined.")

        # 4) get atoms for which S should be plotted
        self._extracted_ab_initio_data = ab_initio_data.get_atoms_data().extract()
        num_atoms = len(self._extracted_ab_initio_data)
        all_atms_smbls = list(set([self._extracted_ab_initio_data["atom_%s" % atom]["symbol"]
                                   for atom in range(num_atoms)]))
        all_atms_smbls.sort()

        if len(self._atoms) == 0:  # case: all atoms
            atoms_symbol = all_atms_smbls
        else:  # case selected atoms
            if len(self._atoms) != len(set(self._atoms)):  # only different types
                raise ValueError("Not all user defined atoms are unique.")

            for atom_symbol in self._atoms:
                if atom_symbol not in all_atms_smbls:
                    raise ValueError("User defined atom not present in the system.")
            atoms_symbol = self._atoms
        prog_reporter.report("Atoms, for which dynamical structure factors should be plotted, have been determined.")

        # at the moment only types of atom, e.g, for  benzene three options -> 1) C, H;  2) C; 3) H
        # 5) create workspaces for atoms in interest
        workspaces = []
        if self._sample_form == "Powder":
            workspaces.extend(self._create_partial_s_per_type_workspaces(atoms_symbols=atoms_symbol, s_data=s_data))
        prog_reporter.report("Workspaces with partial dynamical structure factors have been constructed.")

        # 6) Create a workspace with sum of all atoms if required
        if self._sum_contributions:
            total_atom_workspaces = []
            for ws in workspaces:
                if "total" in ws:
                    total_atom_workspaces.append(ws)
            total_workspace = self._create_total_workspace(partial_workspaces=total_atom_workspaces)
            workspaces.insert(0, total_workspace)
            prog_reporter.report("Workspace with total S  has been constructed.")

        # 7) add experimental data if available to the collection of workspaces
        if self._experimental_file != "":
            workspaces.insert(0, self._create_experimental_data_workspace().name())
            prog_reporter.report("Workspace with the experimental data has been constructed.")

        GroupWorkspaces(InputWorkspaces=workspaces, OutputWorkspace=self._out_ws_name)

        # 8) save workspaces to ascii_file
        num_workspaces = mtd[self._out_ws_name].getNumberOfEntries()
        for wrk_num in range(num_workspaces):
            wrk = mtd[self._out_ws_name].getItem(wrk_num)
            SaveAscii(InputWorkspace=Scale(wrk, 1.0/self._bin_width, "Multiply"),
                      Filename=wrk.name() + ".dat", Separator="Space", WriteSpectrumID=False)
        prog_reporter.report("All workspaces have been saved to ASCII files.")

        # 9) set  OutputWorkspace
        self.setProperty('OutputWorkspace', self._out_ws_name)
        prog_reporter.report("Group workspace with all required  dynamical structure factors has been constructed.")
示例#12
0
    def PyExec(self):

        # 0) Create reporter to report progress
        steps = 9
        begin = 0
        end = 1.0
        prog_reporter = Progress(self, begin, end, steps)

        # 1) get input parameters from a user
        self._get_properties()
        prog_reporter.report("Input data from the user has been collected.")

        # 2) read ab initio data
        ab_initio_data = abins.AbinsData.from_calculation_data(
            self._vibrational_or_phonon_data_file, self._ab_initio_program)
        prog_reporter.report("Vibrational/phonon data has been read.")

        # 3) calculate S
        s_calculator = abins.SCalculatorFactory.init(
            filename=self._vibrational_or_phonon_data_file,
            temperature=self._temperature,
            sample_form=self._sample_form,
            abins_data=ab_initio_data,
            instrument=self._instrument,
            quantum_order_num=self._num_quantum_order_events,
            bin_width=self._bin_width)
        s_data = s_calculator.get_formatted_data()
        prog_reporter.report(
            "Dynamical structure factors have been determined.")

        # 4) get atoms for which S should be plotted
        self._extracted_ab_initio_data = ab_initio_data.get_atoms_data(
        ).extract()
        num_atoms = len(self._extracted_ab_initio_data)
        all_atms_smbls = list(
            set([
                self._extracted_ab_initio_data["atom_%s" % atom]["symbol"]
                for atom in range(num_atoms)
            ]))
        all_atms_smbls.sort()

        if len(self._atoms) == 0:  # case: all atoms
            atom_symbols = all_atms_smbls
            atom_numbers = []
        else:  # case selected atoms
            # Specific atoms are identified with prefix and integer index, e.g 'atom_5'. Other items are element symbols
            # A regular expression match is used to make the underscore separator optional and check the index format
            prefix = abins.constants.ATOM_PREFIX
            atom_symbols = [
                item for item in self._atoms if item[:len(prefix)] != prefix
            ]
            if len(atom_symbols) != len(
                    set(atom_symbols)):  # only different types
                raise ValueError(
                    "User atom selection (by symbol) contains repeated species. This is not permitted as "
                    "Abins cannot create multiple workspaces with the same name."
                )

            numbered_atom_test = re.compile('^' + prefix + r'_?(\d+)$')
            atom_numbers = [
                numbered_atom_test.findall(item) for item in self._atoms
            ]  # Matches will be lists of str
            atom_numbers = [int(match[0]) for match in atom_numbers
                            if match]  # Remove empty matches, cast rest to int

            if len(atom_numbers) != len(set(atom_numbers)):
                raise ValueError(
                    "User atom selection (by number) contains repeated atom. This is not permitted as Abins"
                    " cannot create multiple workspaces with the same name.")

            for atom_symbol in atom_symbols:
                if atom_symbol not in all_atms_smbls:
                    raise ValueError(
                        "User defined atom selection (by element) '%s': not present in the system."
                        % atom_symbol)

            for atom_number in atom_numbers:
                if atom_number < 1 or atom_number > num_atoms:
                    raise ValueError(
                        "Invalid user atom selection (by number) '%s%s': out of range (%s - %s)"
                        % (prefix, atom_number, 1, num_atoms))

            # Final sanity check that everything in "atoms" field was understood
            if len(atom_symbols) + len(atom_numbers) < len(self._atoms):
                elements_report = " Symbols: " + ", ".join(
                    atom_symbols) if len(atom_symbols) else ""
                numbers_report = " Numbers: " + ", ".join(atom_numbers) if len(
                    atom_numbers) else ""
                raise ValueError(
                    "Not all user atom selections ('atoms' option) were understood."
                    + elements_report + numbers_report)

        prog_reporter.report(
            "Atoms, for which dynamical structure factors should be plotted, have been determined."
        )

        # at the moment only types of atom, e.g, for  benzene three options -> 1) C, H;  2) C; 3) H
        # 5) create workspaces for atoms in interest
        workspaces = []
        if self._sample_form == "Powder":
            workspaces.extend(
                self._create_partial_s_per_type_workspaces(
                    atoms_symbols=atom_symbols, s_data=s_data))
            workspaces.extend(
                self._create_partial_s_per_type_workspaces(
                    atom_numbers=atom_numbers, s_data=s_data))
        prog_reporter.report(
            "Workspaces with partial dynamical structure factors have been constructed."
        )

        # 6) Create a workspace with sum of all atoms if required
        if self._sum_contributions:
            total_atom_workspaces = []
            for ws in workspaces:
                if "total" in ws:
                    total_atom_workspaces.append(ws)
            total_workspace = self._create_total_workspace(
                partial_workspaces=total_atom_workspaces)
            workspaces.insert(0, total_workspace)
            prog_reporter.report(
                "Workspace with total S has been constructed.")

        # 7) add experimental data if available to the collection of workspaces
        if self._experimental_file != "":
            workspaces.insert(
                0,
                self._create_experimental_data_workspace().name())
            prog_reporter.report(
                "Workspace with the experimental data has been constructed.")

        GroupWorkspaces(InputWorkspaces=workspaces,
                        OutputWorkspace=self._out_ws_name)

        # 8) save workspaces to ascii_file
        num_workspaces = mtd[self._out_ws_name].getNumberOfEntries()
        for wrk_num in range(num_workspaces):
            wrk = mtd[self._out_ws_name].getItem(wrk_num)
            SaveAscii(InputWorkspace=Scale(wrk, 1.0 / self._bin_width,
                                           "Multiply"),
                      Filename=wrk.name() + ".dat",
                      Separator="Space",
                      WriteSpectrumID=False)
        prog_reporter.report("All workspaces have been saved to ASCII files.")

        # 9) set  OutputWorkspace
        self.setProperty('OutputWorkspace', self._out_ws_name)
        prog_reporter.report(
            "Group workspace with all required  dynamical structure factors has been constructed."
        )
示例#13
0
ws = HB2AReduce(filename, Scale=20000)

def_y = ws.getRun().getLogData('def_y').value
def_x = ws.getRun().getLogData('def_x').value

anode = None
if 'anode' in def_y:  # Plot anode intensity instead
    try:
        anode = int(def_y.replace('anode', ''))
    except ValueError:
        pass

if anode:  # Re-reduce data for anode plot
    ws = HB2AReduce(filename, IndividualDetectors=True, Scale=20000)
    SaveAscii(ws, Filename=os.path.join(outdir, output_file), SpectrumList=anode-1, Separator='Space', ColumnHeader=False, WriteSpectrumID=False)
    div = SavePlot1D(ws, OutputType='plotly', SpectraList=anode)
else:
    # Check binning is correct, if not re-reduce
    if ws.getRun().hasProperty(def_x):
        x = ws.getRun().getLogData(def_x).value
        if len(x) > 1:
            step_size = (x[-1]-x[0])/(len(x)-1)
            if not np.isclose(step_size, 0.05, atol=0.001):
                ws = HB2AReduce(filename, BinWidth=step_size, Scale=20000)
    SaveFocusedXYE(ws, Filename=os.path.join(outdir, output_file), SplitFiles=False, IncludeHeader=False)
    div = SavePlot1D(ws, OutputType='plotly')

################################################################################
# login to oncat