def cleanup(self): # Delete all workspaces for ws in AnalysisDataService.getObjectNames(): DeleteWorkspace(Workspace=ws) # Delete the stored files os.remove(os.path.join(config['defaultsave.directory'], 'SANS2D00028793-add.nxs')) os.remove(os.path.join(config['defaultsave.directory'], 'SANS2D00028797-add.nxs'))
def _save_workspaces(self, directory): """ Save all workspaces present in the ADS to the given directory :param directory: String; Path to where to save the workspaces """ # Get all present workspaces ws_list = ADS.getObjectNames() if len(ws_list) == 0: return start_time = UsageService.getStartTime().toISO8601String() alg_name = "GeneratePythonScript" alg = AlgorithmManager.createUnmanaged(alg_name, 1) alg.setChild(True) alg.setLogging(False) for index, ws in enumerate(ws_list): if self._empty_group_workspace(ws): continue filename = str(index) + ".py" filename = os.path.join(directory, filename) alg.initialize() alg.setProperty("AppendTimestamp", True) alg.setProperty("AppendExecCount", True) alg.setProperty("InputWorkspace", ws) alg.setPropertyValue("Filename", filename) alg.setPropertyValue("StartTimestamp", start_time) alg.setProperty("IgnoreTheseAlgs", ALGS_TO_IGNORE) alg.setProperty("IgnoreTheseAlgProperties", ALG_PROPERTIES_TO_IGNORE) alg.execute()
def _do_test_output(self, load_alg, expected_number_of_workspaces, expected_number_on_ads, workspace_type): # Check the number of workspaces tags_numbers = ["NumberOfSampleScatterWorkspaces", "NumberOfSampleTransmissionWorkspaces", "NumberOfSampleDirectWorkspaces", "NumberOfCanScatterWorkspaces", "NumberOfCanTransmissionWorkspaces", "NumberOfCanDirectWorkspaces"] for num_workspaces, num_name in zip(expected_number_of_workspaces, tags_numbers): number_of_workspaces = load_alg.getProperty(num_name).value self.assertTrue(number_of_workspaces == num_workspaces) # Check that workspaces were loaded tags_workspaces = ["SampleScatterWorkspace", "SampleTransmissionWorkspace", "SampleDirectWorkspace", "CanScatterWorkspace", "CanTransmissionWorkspace", "CanDirectWorkspace"] index = 0 for num_workspaces, workspace_name in zip(expected_number_of_workspaces, tags_workspaces): self._evaluate_workspace_type(load_alg, num_workspaces, workspace_name, workspace_type, index) index += 1 # Check for the monitor workspaces num_monitor_workspaces = [expected_number_of_workspaces[0], expected_number_of_workspaces[3]] tags_monitors = ["SampleScatterMonitorWorkspace", "CanScatterMonitorWorkspace"] workspace_type_monitor = [Workspace2D, Workspace2D] index = 0 for num_workspaces, workspace_name in zip(num_monitor_workspaces, tags_monitors): self._evaluate_workspace_type(load_alg, num_workspaces, workspace_name, workspace_type_monitor, index) index += 1 # Confirm there is nothing on the ADS workspaces_on_the_ads = AnalysisDataService.getObjectNames() self.assertTrue(len(workspaces_on_the_ads) == expected_number_on_ads)
def get_current_workspaces(self): """ Get current workspaces' names Returns ------- a list of strings """ return AnalysisDataService.getObjectNames()
def _save(self): workspaces_to_save = AnalysisDataService.getObjectNames() plots_to_save = self.plot_gfm.figs interfaces_to_save = self.interface_populating_function() project_saver = ProjectSaver(self.project_file_ext) project_saver.save_project(file_name=self.last_project_location, workspace_to_save=workspaces_to_save, plots_to_save=plots_to_save, interfaces_to_save=interfaces_to_save) self.__saved = True
def populate_combobox(self, combo): ws_list = AnalysisDataService.getObjectNames() for ws in ws_list: ws_object = AnalysisDataService.retrieve(ws) if not ws.startswith("__") and combo.findText(ws)<0\ and hasattr(ws_object, "getNumberHistograms")\ and ws_object.getNumberHistograms()==1: combo.addItem(ws)
def test_that_show_all_calculates_and_shows_all_pairs_with_rebin(self): self.gui_context['RebinType'] = 'Fixed' self.gui_context['RebinFixed'] = 2 self.context.show_all_pairs() self.assertEquals(AnalysisDataService.getObjectNames(), ['EMU19489', 'EMU19489 Pairs', 'EMU19489; Pair Asym; long; #1', 'EMU19489; Pair Asym; long; Rebin; #1', 'Muon Data'])
def get_ads_workspace_references(): """ Gets a list of handles of available workspaces on the ADS @return: the workspaces on the ADS. """ for workspace_name in AnalysisDataService.getObjectNames(): yield AnalysisDataService.retrieve(workspace_name)
def test_that_show_all_calculates_and_shows_all_groups_with_rebin(self): self.gui_context['RebinType'] = 'Fixed' self.gui_context['RebinFixed'] = 2 self.context.show_all_groups() self.assertEquals(AnalysisDataService.getObjectNames(), ['EMU19489', 'EMU19489 Groups', 'EMU19489; Group; bwd; Asymmetry; #1', 'EMU19489; Group; bwd; Asymmetry; Rebin; #1', 'EMU19489; Group; bwd; Counts; #1', 'EMU19489; Group; bwd; Counts; Rebin; #1', 'EMU19489; Group; fwd; Asymmetry; #1', 'EMU19489; Group; fwd; Asymmetry; Rebin; #1', 'EMU19489; Group; fwd; Counts; #1', 'EMU19489; Group; fwd; Counts; Rebin; #1', 'Muon Data'])
def test_load_calls_loads_successfully(self): working_directory = tempfile.mkdtemp() return_value_for_load = os.path.join(working_directory, os.path.basename(working_directory) + ".mtdproj") self.project._save_file_dialog = mock.MagicMock(return_value=working_directory) CreateSampleWorkspace(OutputWorkspace="ws1") self.project.save_as() self.assertEqual(self.project._save_file_dialog.call_count, 1) ADS.clear() self.project._load_file_dialog = mock.MagicMock(return_value=return_value_for_load) self.project.load() self.assertEqual(self.project._load_file_dialog.call_count, 1) self.assertEqual(["ws1"], ADS.getObjectNames())
def recovery_save(self): """ The function to save a recovery checkpoint """ # Set that recovery thread is not running anymore self.pr.thread_on = False try: # Get the interfaces_list interfaces_list = find_all_windows_that_are_savable() # Check if there is anything to be saved or not if len(ADS.getObjectNames()) == 0 and len(interfaces_list) == 0: logger.debug("Project Recovery: Nothing to save") self._spin_off_another_time_thread() return logger.debug("Project Recovery: Saving started") # Create directory for save location recovery_dir = os.path.join(self.pr.recovery_directory_pid, datetime.datetime.now().strftime('%d-%m-%YT%H-%M-%S')) if not os.path.exists(recovery_dir): os.makedirs(recovery_dir) self._add_lock_file(directory=recovery_dir) # Save workspaces self._save_workspaces(directory=recovery_dir) # Save project self._save_project(directory=recovery_dir, interfaces_list=interfaces_list) self._remove_lock_file(directory=recovery_dir) # Clear the oldest checkpoints self.pr.remove_oldest_checkpoints() logger.debug("Project Recovery: Saving finished") except Exception as e: if isinstance(e, KeyboardInterrupt): raise # Fail and print to debugger logger.debug("Project Recovery: Failed to save error msg: " + str(e)) # Spin off another timer thread if not self.pr.closing_workbench: self._spin_off_another_time_thread()
def save_project(self, file_name, workspace_to_save=None, plots_to_save=None, interfaces_to_save=None, project_recovery=True): """ The method that will actually save the project and call relevant savers for workspaces, plots, interfaces etc. :param file_name: String; The file_name of the :param workspace_to_save: List; of Strings that will have workspace names in it, if None will save all :param plots_to_save: List; of matplotlib.figure objects to save to the project file. :param interfaces_to_save: List of Lists of Window and Encoder; the interfaces to save and the encoders to use :param project_recovery: Bool; If the behaviour of Project Save should be altered to function correctly inside of project recovery :return: None; If the method cannot be completed. """ # Check if the file_name doesn't exist if file_name is None: logger.warning("Please select a valid file name") return # Check this isn't saving a blank project file if (workspace_to_save is None and plots_to_save is None and interfaces_to_save is None) and project_recovery: logger.warning("Can not save an empty project") return directory = os.path.dirname(file_name) # Save workspaces to that location if project_recovery: workspace_saver = WorkspaceSaver(directory=directory) workspace_saver.save_workspaces(workspaces_to_save=workspace_to_save) saved_workspaces = workspace_saver.get_output_list() else: # Assume that this is project recovery so pass a list of workspace names saved_workspaces = ADS.getObjectNames() # Generate plots plots_to_save_list = PlotsSaver().save_plots(plots_to_save) # Save interfaces if interfaces_to_save is None: interfaces_to_save = [] interfaces = self._return_interfaces_dicts(directory=directory, interfaces_to_save=interfaces_to_save) # Pass dicts to Project Writer writer = ProjectWriter(workspace_names=saved_workspaces, plots_to_save=plots_to_save_list, interfaces_to_save=interfaces, save_location=file_name, project_file_ext=self.project_file_ext) writer.write_out()
def get_workspaces_from_ads_if_exist(file_tags, full_calibration_file_path, workspaces): """ Retrieves workspaces from the ADS depending on their file tags and calibration file tags which would have been set by the sans loading mechanism when they were loaded the first time. :param file_tags: a list of file tags which we look for on the workspaces on the ADS :param full_calibration_file_path: the calibration file name which we look for on the workspaces on the ADS :param workspaces: a list of workspaces which is being updated in this function. """ for workspace_name in AnalysisDataService.getObjectNames(): workspace = AnalysisDataService.retrieve(workspace_name) try: if has_tag(SANS_FILE_TAG, workspace): file_tag = get_tag(SANS_FILE_TAG, workspace) if file_tag in file_tags and is_calibration_correct(workspace, full_calibration_file_path): workspaces.append(workspace) except RuntimeError: continue
def pyexec_setup(new_options): """ Backup keys of mantid.config and clean up temporary files and workspaces upon algorithm completion or exception raised. Workspaces with names beginning with '_t_' are assumed temporary. Parameters ---------- new_options: dict Dictionary of mantid configuration options to be modified. """ # Hold in this tuple all temporary objects to be removed after completion temp_objects = namedtuple('temp_objects', 'files workspaces') temps = temp_objects(list(), list()) previous_config = dict() for key, value in new_options.items(): previous_config[key] = mantid_config[key] mantid_config[key] = value try: yield temps finally: # reinstate the mantid options for key, value in previous_config.items(): mantid_config[key] = value if debug_flag is True: return # delete temporary files for file_name in temps.files: os.remove(file_name) # remove any workspace added to temps.workspaces or whose name begins # with "_t_" to_be_removed = set() for name in AnalysisDataService.getObjectNames(): if '_t_' == name[0:3]: to_be_removed.add(name) for workspace in temps.workspaces: if isinstance(workspace, str): to_be_removed.add(workspace) else: to_be_removed.add(workspace.name()) for name in to_be_removed: DeleteWorkspace(name)
def runTest(self): UseCompatibilityMode() LARMOR() Set1D() Detector("DetectorBench") MaskFile('USER_LARMOR_151B_LarmorTeam_80tubes_BenchRot1p4_M4_r3699.txt') Gravity(True) AddRuns(('13065', '13065'), 'LARMOR', 'nxs', lowMem=True) AssignSample('13065-add.nxs') WavRangeReduction(2, 4, DefaultTrans) # Clean up for element in AnalysisDataService.getObjectNames(): if AnalysisDataService.doesExist(element) and element != "13065p1rear_1D_2.0_4.0": AnalysisDataService.remove(element) paths = [os.path.join(config['defaultsave.directory'], 'LARMOR00013065-add.nxs'), os.path.join(config['defaultsave.directory'], 'SANS2D00013065.log')] # noqa for path in paths: if os.path.exists(path): os.remove(path)
def _assert_list_in_ADS(self, workspace_name_list): ads_list = AnalysisDataService.getObjectNames() for item in workspace_name_list: self.assertTrue(item in ads_list)
def test_workspace_loading(self): workspace_loader = workspaceloader.WorkspaceLoader() workspace_loader.load_workspaces(self.working_directory, workspaces_to_load=[self.ws1_name]) self.assertEqual(ADS.getObjectNames(), [self.ws1_name])
def test_show_raw_data_puts_raw_data_into_the_ADS(self): self.context.show_raw_data() self.assertEquals(AnalysisDataService.getObjectNames(), ['EMU19489', 'EMU19489 Raw Data', 'EMU19489_raw_data', 'Muon Data'])
def _remove_workspaces(): for element in AnalysisDataService.getObjectNames(): AnalysisDataService.remove(element)
def test_that_instantiated_WorkspaceGroup_can_be_added_to_the_ADS(self): ws_group = WorkspaceGroup() mtd.add("group1", ws_group) self.assertEqual(AnalysisDataService.getObjectNames(), ["group1"]) self.assertIsInstance(mtd["group1"], WorkspaceGroup)
def test_that_instantiated_WorkspaceGroup_is_not_added_to_the_ADS(self): ws_group = WorkspaceGroup() self.assertEqual(len(AnalysisDataService.getObjectNames()), 0)
def remove_all_workspaces_from_ads(): workspaces_on_the_ads = AnalysisDataService.getObjectNames() for name in workspaces_on_the_ads: AnalysisDataService.remove(name)
def test_show_all_groups_calculates_and_shows_all_groups(self): self.context.show_all_groups() self.assertEquals(AnalysisDataService.getObjectNames(), ['EMU19489', 'EMU19489 Groups', 'EMU19489; Group; bwd; Asymmetry; #1', 'EMU19489; Group; bwd; Counts; #1', 'EMU19489; Group; fwd; Asymmetry; #1', 'EMU19489; Group; fwd; Counts; #1', 'Muon Data'])
def test_project_loading(self): project_loader = projectloader.ProjectLoader(project_file_ext) self.assertTrue(project_loader.load_project(working_project_file)) self.assertEqual(ADS.getObjectNames(), ["ws1"])
def remove_all_workspaces_from_ads(): workspaces_on_the_ads = AnalysisDataService.getObjectNames() for name in workspaces_on_the_ads: AnalysisDataService.remove(name)
def test_that_instantiated_WorkspaceGroup_can_be_added_to_the_ADS(self): ws_group = WorkspaceGroup() mtd.add("group1", ws_group) self.assertEqual(AnalysisDataService.getObjectNames(), ["group1"]) self.assertIsInstance(mtd["group1"], WorkspaceGroup)
def test_that_instantiated_WorkspaceGroup_is_not_added_to_the_ADS(self): ws_group = WorkspaceGroup() self.assertEqual(len(AnalysisDataService.getObjectNames()), 0)
def _assert_workspaces_exist(self, workspace_names: list): ads_list = AnalysisDataService.getObjectNames() for workspace_name in workspace_names: self.assertTrue(workspace_name in ads_list)
def test_show_all_pairs_calculates_and_shows_all_pairs(self): self.context.show_all_pairs() self.assertEquals(AnalysisDataService.getObjectNames(), ['EMU19489', 'EMU19489 Pairs', 'EMU19489; Pair Asym; long; #1', 'Muon Data'])
def test_project_loading(self): project_loader = projectloader.ProjectLoader(project_file_ext) self.assertTrue(project_loader.load_project(working_project_file)) self.assertEqual(ADS.getObjectNames(), ["ws1"])
def get_all_workspace_history_from_ads(): workspace_histories = [] for workspace in ADS.getObjectNames(): workspace_histories.append(get_workspace_history_list(workspace)) script = convert_list_to_string(workspace_histories) return guarantee_unique_lines(script)
def _remove_workspaces(): for element in AnalysisDataService.getObjectNames(): AnalysisDataService.remove(element)
def test_workspace_loading(self): workspace_loader = workspaceloader.WorkspaceLoader() workspace_loader.load_workspaces(self.working_directory, workspaces_to_load=[self.ws1_name]) self.assertEqual(ADS.getObjectNames(), [self.ws1_name])