def __init__(self, dataset, *args, **kwargs): self.dataset = dataset self.corrected = self.dataset.copy() if args or kwargs: warning_( "DEPRECATION WARNING: Pass all arguments such range, and method definition in the " "``compute`` method, not during the initialisation of the BaselineCorrection instance.\n" "Here they are ignored.")
def __init__(self, data=None, coordset=None, coordunits=None, coordtitles=None, **kwargs): super().__init__(data, **kwargs) self._parent = None # eventually set the coordinates with optional units and title if isinstance(coordset, CoordSet): self.set_coordset(**coordset) else: if coordset is None: coordset = [None] * self.ndim if coordunits is None: coordunits = [None] * self.ndim if coordtitles is None: coordtitles = [None] * self.ndim _coordset = [] for c, u, t in zip(coordset, coordunits, coordtitles): if not isinstance(c, CoordSet): if isinstance(c, LinearCoord): coord = LinearCoord(c) else: coord = Coord(c) if u is not None: coord.units = u if t is not None: coord.title = t else: if u: # pragma: no cover warning_( "units have been set for a CoordSet, but this will be ignored " "(units are only defined at the coordinate level") if t: # pragma: no cover warning_( "title will be ignored as they are only defined at the coordinates level" ) coord = c _coordset.append(coord) if _coordset and set(_coordset) != { Coord() }: # if they are no coordinates do nothing self.set_coordset(*_coordset)
def _switch_protocol(self, key, files, **kwargs): protocol = kwargs.get("protocol", None) if protocol is not None and protocol != "ALL": if not isinstance(protocol, list): protocol = [protocol] if key and key[1:] not in protocol and self.alias[ key[1:]] not in protocol: return datasets = [] for filename in files[key]: filename = pathclean(filename) read_ = getattr(self, f"_read_{key[1:]}") try: res = read_(self.objtype(), filename, **kwargs) # sometimes read_ can return None (e.g. non labspec text file) except FileNotFoundError as e: # try to get the file from github kwargs["read_method"] = read_ try: res = _read_remote(self.objtype(), filename, **kwargs) except OSError: raise e except IOError as e: warning_(str(e)) res = None except NotImplementedError as e: warning_(str(e)) res = None except Exception: raise e except IOError as e: warning_(str(e)) res = None except NotImplementedError as e: warning_(str(e)) res = None if res is not None: if not isinstance(res, list): datasets.append(res) else: datasets.extend(res) if len(datasets) > 1: datasets = self._do_merge(datasets, **kwargs) if kwargs.get("merge", False): datasets[0].name = pathclean(filename).stem datasets[0].filename = pathclean(filename) self.datasets.extend(datasets)
def _load_clicked(self, b=None): # read data and reset defaults ds = read() if ds is not None: if isinstance(ds, NDDataset): self._X = ds self._method_selector.value = "sequential" self._interpolation_selector.value = "polynomial" self._order_slider.value = 1 self._npc_slider.value = 1 self._done = False self._enabled_process(True) self._process_clicked() else: raise IOError("Could not read or merge uploaded files") else: self._enabled_process(False) warning_("process canceled because X is None")
def _switch_protocol(self, key, files, **kwargs): protocol = kwargs.get('protocol', None) if protocol is not None and protocol != 'ALL': if not isinstance(protocol, list): protocol = [protocol] if key and key[1:] not in protocol and self.alias[key[1:]] not in protocol: return datasets = [] for filename in files[key]: try: read_ = getattr(self, f"_read_{key[1:]}") except AttributeError: warning_(f'a file with extension {key} was found in this directory but will be ignored') try: res = read_(self.objtype(), filename, **kwargs) if not isinstance(res, list): datasets.append(res) else: datasets.extend(res) except FileNotFoundError: warning_(f'No file with name `{filename}` could be found. Sorry! ') except IOError as e: warning_(str(e)) except Exception: warning_(f'The file `{filename}` has a known extension but it could not be read. It is ignored!') if len(datasets) > 1: datasets = self._do_merge(datasets, **kwargs) if kwargs.get('merge', False): datasets[0].name = pathclean(filename).stem datasets[0].filename = pathclean(filename) self.datasets.extend(datasets)
def align(dataset, *others, **kwargs): """ Align individual |NDDataset| along given dimensions using various methods. Parameters ----------- dataset : |NDDataset| Dataset on which we want to salign other objects. *others : |NDDataset| Objects to align. dim : str. Optional, default='x' Along which axis to perform the alignment. dims : list of str, optional, default=None Align along all dims defined in dims (if dim is also defined, then dims have higher priority). method : enum ['outer', 'inner', 'first', 'last', 'interpolate'], optional, default='outer' Which method to use for the alignment. If align is defined : * 'outer' means that a union of the different coordinates is achieved (missing values are masked) * 'inner' means that the intersection of the coordinates is used * 'first' means that the first dataset is used as reference * 'last' means that the last dataset is used as reference * 'interpolate' means that interpolation is performed relative to the first dataset. interpolate_method : enum ['linear','pchip']. Optional, default='linear' Method of interpolation to performs for the alignment. interpolate_sampling : 'auto', int or float. Optional, default='auto' * 'auto' : sampling is determined automatically from the existing data. * int : if an integer values is specified, then the sampling interval for the interpolated data will be splitted in this number of points. * float : If a float value is provided, it determines the interval between the interpolated data. coord : |Coord|, optional, default=None coordinates to use for alignment. Ignore those corresponding to the dimensions to align. copy : bool, optional, default=True If False then the returned objects will share memory with the original objects, whenever it is possible : in principle only if reindexing is not necessary. Returns -------- aligned_datasets : tuple of |NDDataset| Same objects as datasets with dimensions aligned. Raises ------ ValueError issued when the dimensions given in `dim` or `dims` argument are not compatibles (units, titles, etc...). """ # DEVELOPPER NOTE # There is probably better methods, but to simplify dealing with # LinearCoord, we transform them in Coord before treatment (going back # to linear if possible at the end of the process) # TODO: Perform an alignment along numeric labels # TODO: add example in docs # copy objects? copy = kwargs.pop('copy', True) # make a single list with dataset and the remaining object objects = [dataset] + list(others) # should we align on given external coordinates extern_coord = kwargs.pop('coord', None) if extern_coord and extern_coord.implements('LinearCoord'): extern_coord = Coord(extern_coord, linear=False, copy=True) # what's the method to use (by default='outer') method = kwargs.pop('method', 'outer') # trivial cases where alignment is not possible or unecessary if not objects: warning_('No object provided for alignment!') return None if len(objects) == 1 and objects[0].implements( 'NDDataset') and extern_coord is None: # no necessary alignment return objects # evaluate on which axis we align axis, dims = dataset.get_axis(only_first=False, **kwargs) # check compatibility of the dims and prepare the dimension for alignment for axis, dim in zip(axis, dims): # get all objets to align _objects = {} _nobj = 0 for idx, object in enumerate(objects): if not object.implements('NDDataset'): error_( f'Bad object(s) found: {object}. Note that only NDDataset ' f'objects are accepted ' f'for alignment') return None _objects[_nobj] = { 'obj': object.copy(), 'idx': idx, } _nobj += 1 _last = _nobj - 1 # get the reference object (by default the first, except if method if # set to 'last' ref_obj_index = 0 if method == 'last': ref_obj_index = _last ref_obj = _objects[ref_obj_index]['obj'] # as we will sort their coordinates at some point, we need to know # if the coordinates need to be reversed at # the end of the alignment process reversed = ref_obj.coordset[dim].reversed if reversed: ref_obj.sort(descend=False, dim=dim, inplace=True) # get the coordset corresponding to the reference object ref_obj_coordset = ref_obj.coordset # get the coordinate for the reference dimension ref_coord = ref_obj_coordset[dim] # as we will sort their coordinates at some point, we need to know # if the coordinates need to be reversed at # the end of the alignment process reversed = ref_coord.reversed # prepare a new Coord object to store the final new dimension new_coord = ref_coord.copy() ndec = get_n_decimals(new_coord.data.max(), 1.e-5) if new_coord.implements('LinearCoord'): new_coord = Coord(new_coord, linear=False, copy=True) # loop on all object for index, object in _objects.items(): obj = object['obj'] if obj is ref_obj: # not necessary to compare with itself! continue if reversed: obj.sort(descend=False, dim=dim, inplace=True) # get the current objet coordinates and check compatibility coord = obj.coordset[dim] if coord.implements('LinearCoord') or coord.linear: coord = Coord(coord, linear=False, copy=True) if not coord.is_units_compatible(ref_coord): # not compatible, stop everything raise UnitsCompatibilityError( 'NDataset to align must have compatible units!') # do units transform if necesssary so coords can be compared if coord.units != ref_coord.units: coord.ito(ref_coord) # adjust the new_cord depending on the method of alignement new_coord_data = set(np.around(new_coord.data, ndec)) coord_data = set(np.around(coord.data, ndec)) if method in ['outer', 'interpolate']: # in this case we do a union of the coords (masking the # missing values) # For method=`interpolate`, the interpolation will be # performed in a second step new_coord._data = sorted(coord_data | new_coord_data) elif method == 'inner': # take only intersection of the coordinates # and generate a warning if it result something null or new_coord._data = sorted(coord_data & new_coord_data) elif method in ['first', 'last']: # we take the reference coordinates already determined as # basis (masking the missing values) continue else: raise NotImplementedError(f'The method {method} is unknown!') # Now perform alignment of all objects on the new coordinates for index, object in _objects.items(): obj = object['obj'] # get the dim index for the given object dim_index = obj.dims.index(dim) # prepare slicing keys ; set slice(None) for the untouched # dimensions preceeding the dimension of interest prepend_keys = [slice(None)] * dim_index # New objects for obj must be created with the new coordinates # change the data shape new_obj_shape = list(obj.shape) new_obj_shape[dim_index] = len(new_coord) new_obj_data = np.full(new_obj_shape, np.NaN) # create new dataset for obj and ref_objects if copy: new_obj = obj.copy() else: new_obj = obj # update the data and mask coord = obj.coordset[dim] coord_data = set(np.around(coord.data, ndec)) dim_loc = new_coord._loc2index(sorted(coord_data)) loc = tuple(prepend_keys + [dim_loc]) new_obj._data = new_obj_data # mask all the data then unmask later the relevant data in # the next step if not new_obj.is_masked: new_obj.mask = MASKED new_obj.mask[loc] = False else: mask = new_obj.mask.copy() new_obj.mask = MASKED new_obj.mask[loc] = mask # set the data for the loc new_obj._data[loc] = obj.data # update the coordinates new_coordset = obj.coordset.copy() if coord.is_labeled: label_shape = list(coord.labels.shape) label_shape[0] = new_coord.size new_coord._labels = np.zeros(tuple(label_shape)).astype( coord.labels.dtype) new_coord._labels[:] = '--' new_coord._labels[dim_loc] = coord.labels setattr(new_coordset, dim, new_coord) new_obj._coordset = new_coordset # reversed? if reversed: # we must reverse the given coordinates new_obj.sort(descend=reversed, dim=dim, inplace=True) # update the _objects _objects[index]['obj'] = new_obj if method == 'interpolate': warning_( 'Interpolation not yet implemented - for now equivalent ' 'to `outer`') # the new transformed object must be in the same order as the passed # objects # and the missing values must be masked (for the moment they are defined to NaN for index, object in _objects.items(): obj = object['obj'] # obj[np.where(np.isnan(obj))] = MASKED # mask NaN values obj[np.where(np.isnan( obj))] = 99999999999999. # replace NaN values (to simplify # comparisons) idx = int(object['idx']) objects[idx] = obj # we also transform into linear coord if possible ? pass # TODO: # Now return return tuple(objects)
def optimize(func, fp0, args=(), constraints={}, method="SIMPLEX", maxfun=None, maxiter=1000, ftol=1e-8, xtol=1e-8, callback=None): """ Parameters ---------- func fp0 args constraints method maxfun maxiter ftol xtol callback Returns ------- """ global keys def restore_external(fp, p, keys): # restore external parameters for key in list(fp.keys()): keysp = key.split('_') if keysp[0] in fp.expvars: ps = [] for i in range(fp.expnumber): ks = "%s_exp%d" % (key, i) if ks not in keys: break k = keys.index(ks) ps.append(p[k]) if len(ps) > 0: fp.to_external(key, ps) else: if key not in keys: continue k = keys.index(key) fp.to_external(key, p[k]) return fp def internal_func(p, dat, fp, keys, *args): fp = restore_external(fp, p, keys) return func(fp, dat, *args) def internal_callback(*args): if callback is None: return return callback(*args) if not isinstance(fp0, FitParameters): raise TypeError('fp0 is not of FitParameter type') # make internal parameters par = [] keys = [] for key in sorted(fp0.keys()): if not fp0.fixed[key]: # we make internal parameters in case of bounding # We also take care of the multiple experiments keysp = key.split('_')[0] if keysp in fp0.expvars: for i in range(fp0.expnumber): par.append(fp0.to_internal(key, i)) keys.append("%s_exp%d" % (key, i)) else: par.append(fp0.to_internal(key)) keys.append(key) args = list(args) args.append(fp0) args.append(keys) if constraints: args.append(constraints) if not maxfun: maxfun = 4 * maxiter if method.upper() == "SIMPLEX": result = scipy.optimize.fmin(internal_func, par, args=tuple(args), maxfun=maxfun, maxiter=maxiter, ftol=ftol, xtol=xtol, full_output=True, disp=False, callback=internal_callback) res, fopt, iterations, funcalls, warnmess = result elif method.upper() == "HOPPING": result = scipy.optimize.basinhopping( internal_func, par, niter=100, T=1.0, stepsize=0.5, minimizer_kwargs={'args': tuple(args)}, take_step=None, accept_test=None, callback=internal_callback, interval=50, disp=False, niter_success=None) # fmin(func, par, args=args, maxfun=maxfun, maxiter=maxiter, ftol=ftol, xtol=xtol, # full_output=True, disp=False, callback=callback) res, fopt, warnmess = result.x, result.fun, result.message elif method == "XXXX": raise NotImplementedError("method: %s" % method) # TODO: implement other algorithms else: raise NotImplementedError("method: %s" % method) # restore the external parameter fpe = restore_external(fp0, res, keys) # for i, key in enumerate(keys): # fp0.to_external(key, res[i]) if warnmess == 1: warning_("Maximum number of function evaluations made.") if warnmess == 2: warning_("Maximum number of iterations reached.") return fpe, fopt