def showRegions(self, visibility, item): isregion, par = self.isRegion(item) if isregion: par.opts['expanded'] = visibility if not self.isInitializing: try: self.parent.plotwidget.draw_regions() except Exception as e: scp.error_(e)
def addDataset(self, dataset=None): scp.debug_('Add a dataset') # Read the dataset try: if not dataset: dataset = scp.read(Qt_parent=self.parent, default_filter='omnic') if dataset is None: # still not determined. return except Exception as e: scp.error_(e) # Create a subproject with this dataset subproj = scp.Project() self.project.add_project(subproj, dataset.name) subproj.add_dataset(dataset, f'{dataset.name}/original') # Signal self.dirty = True self.sigProjectChanged.emit('dataset added')
def _loadProject(self, *args, **kwargs): """ Load a project. """ if len(args)<1: return fname = args[0] proj = None if fname is None or fname in ['', 'untitled']: # create a void project proj = scp.Project(name='untitled') else: try: proj = scp.Project.load(fname, **kwargs) proj.meta['project_file'] = fname except Exception as e: scp.error_(e) self.closeProject() return proj
nd = NDDataset() # %% [markdown] # If something goes wrong with during a cell execution, a ``traceback`` is displayed. # # For instance, the object or method ``toto`` does not exist in the API, so an error (**ImportError**) is generated # when trying to import this from the API. # # Here we catch the error with a conventional `try-except` structure # %% try: from spectrochempy import toto except ImportError as e: scp.error_("OOPS, THAT'S AN IMPORT ERROR! : %s" % e) # %% [markdown] # The error will stop the execution if not catched. # # This is a basic behavior of python : on way to avoid. stoppping the execution without displaying a message is : # %% try: from spectrochempy import toto # noqa: F811, F401 except Exception: pass # %% [markdown] # ## API Configuration #
d3D # %% [markdown] # Programmatically, we can use the attribute `is_empty` or `has_data` to check this # %% d3D.v.has_data, d3D.v.is_empty # %% [markdown] # An error is raised when a coordinate doesn't exist # %% try: d3D.x except KeyError as e: scp.error_(e) # %% [markdown] # In some case it can also be useful to get a coordinate from its title instead of its name (the limitation is that if # several coordinates have the same title, then only the first ones that is found in the coordinate list, will be # returned - this can be ambiguous) # %% d3D["time"] # %% d3D.time # %% [markdown] # ## Labels
colormap='viridis', nrow=2, ncol=2, sharex=True, sharey=True, dpi=100) # %% [markdown] # The four datasets `nd1` to `nd4` have some overlapping in both dimensions. But it we want for example to add `nd2` # with `nd4`. This will fail because the dimension are not aligned. # %% try: nd2 + nd4 except Exception as e: scp.error_(str(e) + ' Cannot add unaligned datasets.') # %% [markdown] # Let try to align them, in the `y` dimension (*i.e.* the first) as this the one which differ in size. # (NOTE: to find the actual names of the dimensions, just get the `dims` attribute of the datasets. # %% nd2.dims, nd4.dims # %% [markdown] # To align we can use different methods, depending on the expected results (missing values in the aligned datasets # will be masked) # %% # `outer` method => union of the coordinates nd2a, nd4a = scp.align(nd2, nd4, dim='y', method='outer')
# ##### exp # Exponential of all elements in the input array, element-wise # %% out = np.exp(dataset) _ = out.plot(figsize=(6, 2.5)) # %% [markdown] # Obviously numpy exponential functions applies only to dimensionless array. Else an error is generated. # %% x = scp.NDDataset(np.arange(5), units="m") try: np.exp(x) # A dimensionality error will be generated except DimensionalityError as e: error_(e) # %% [markdown] # ##### exp2 # Calculate 2\*\*p for all p in the input array. # %% out = np.exp2(dataset) _ = out.plot(figsize=(6, 2.5)) # %% [markdown] # ##### expm1 # Calculate `exp(x) - 1` for all elements in the array. # %% out = np.expm1(dataset)
max_relative_error = max(error[nonzero] / abs(y.data[nonzero])) return max(error), max_relative_error # %% max_error, max_rel_error = difference(S0, newS0) print(f"Max absolute difference in absorbance: {max_error:.3g}") print(f"Max relative difference in absorbance: {max_rel_error:.3g}") # %% max_error, max_rel_error = difference(S0.x, newS0.x) print(f"Max absolute difference in wavenumber: {max_error:.3g}") print(f"Max relative difference in wavenumber: {max_rel_error:.3g}") # %% [markdown] # This is much beyond the experimental accuracy but can lead to undesirable effects. # # For instance: # %% try: S0 - newS0 except Exception as e: scp.error_(e) # %% [markdown] # returns an error because of the small shift of coordinates. We will see in another tutorial how to re-align datasets # and deal with these small problems. It is worth noticing that similar distortions arise in commercial software,... # except that the user is not notified. #
def test_logger(caplog): logger = logging.getLogger('SpectroChemPy') logger.propagate = True caplog.set_level(logging.DEBUG) # We can set the level using strings set_loglevel("DEBUG") assert logger.level == logging.DEBUG set_loglevel(WARNING) assert logger.level == logging.WARNING error_('\n' + '*' * 80 + '\n') debug_('debug in WARNING level - should not appear') info_('info in WARNING level - should not appear') warning_('OK this is a Warning') error_('OK This is an Error') error_('\n' + '*' * 80 + '\n') set_loglevel(INFO) assert logger.level == logging.INFO debug_('debug in INFO level - should not appear') info_('OK - info in INFO level') warning_('OK this is a Warning') error_('OK This is an Error') error_('\n' + '*' * 80 + '\n') set_loglevel('DEBUG') assert logger.level == logging.DEBUG debug_('OK - debug in DEBUG level') info_('OK - info in DEBUG level') assert caplog.records[-1].levelname == 'INFO' assert caplog.records[-1].message == 'OK - info in DEBUG level' warning_('OK this is a Warning') assert caplog.records[-1].levelname == 'WARNING' assert caplog.records[-1].message == 'OK this is a Warning' error_('OK This is an Error') assert caplog.records[-1].levelname == 'ERROR' assert caplog.records[-1].message == 'OK This is an Error'
np.sqrt(x) # %% x = 10 * ur.radians np.sin(x) # %% [markdown] # Consistency of the units are checked and errors are generated if quantities have not appropriate units # with the math operation... # %% x = 10 * ur.meters try: np.cos(x) except scp.DimensionalityError as e: # catch the error scp.error_(e) # generate the error message (see API configuration) # Consistency of the units are checked and errors are generated if quantities have not appropriate units # with the math operation... # %% [markdown] # ## Stripping the units # # If for any reason - including quick and dirty checks - # unitless numbers are needed, the `magnitude` field can be used: # %% x = 10 * ur.meters np.cos(x.magnitude) # %% [markdown]
nrow=2, ncol=2, sharex=True, sharey=True, dpi=100, ) # %% [markdown] # The four datasets `nd1` to `nd4` have some overlapping in both dimensions. But if we want for example to add `nd2` # with `nd4`. This will fail because the dimension are not aligned. # %% try: nd2 + nd4 except Exception as e: scp.error_(str(e) + " Cannot add unaligned datasets.") # %% [markdown] # Let try to align them, in the `y` dimension (*i.e.* the first) as this the one which differ in size. # (NOTE: to find the actual names of the dimensions, just get the `dims` attribute of the datasets). # %% nd2.dims, nd4.dims # %% [markdown] # To align we can use different methods, depending on the expected results (missing values in the aligned datasets # will be masked) # %% # `outer` method => union of the coordinates nd2a, nd4a = scp.align(nd2, nd4, dim="y", method="outer")
def test_logger(caplog): logger = logging.getLogger("SpectroChemPy") logger.propagate = True caplog.set_level(DEBUG) # We can set the level using strings set_loglevel("DEBUG") assert logger.handlers[0].level == INFO # DEBUG only on the file assert logger.handlers[1].level == DEBUG set_loglevel(WARNING) assert logger.handlers[0].level == WARNING assert logger.handlers[1].level == WARNING error_("\n" + "*" * 80 + "\n") debug_("debug in WARNING level - should not appear") info_("info in WARNING level - should not appear") warning_("OK this is a Warning") error_("OK This is an Error") error_("\n" + "*" * 80 + "\n") set_loglevel(INFO) assert logger.handlers[0].level == INFO assert logger.handlers[1].level == INFO debug_("debug in INFO level - should not appear on stdout") info_("OK - info in INFO level") warning_("OK this is a Warning") error_("OK This is an Error") error_("\n" + "*" * 80 + "\n") set_loglevel("DEBUG") assert logger.handlers[0].level == INFO assert logger.handlers[1].level == DEBUG debug_("OK - debug in DEBUG level") info_("OK - info in DEBUG level") assert caplog.records[-1].levelname == "INFO" assert caplog.records[-1].message.endswith("OK - info in DEBUG level") warning_("OK this is a Warning") assert caplog.records[-1].levelname == "WARNING" assert caplog.records[-1].message.endswith("OK this is a Warning") error_("OK This is an Error") assert caplog.records[-1].levelname == "ERROR" assert caplog.records[-1].message.endswith("OK This is an Error")