def __randbelow(self, n, _int=int, maxsize=1 << BPF, _type=type, Method=_MethodType, BuiltinMethod=_BuiltinMethodType): random = self.random getrandbits = self.getrandbits if _type(random) is BuiltinMethod or _type(getrandbits) is Method: k = n.bit_length() # don't use (n-1) here because n can be 1 r = getrandbits(k) # 0 <= r < 2**k while r >= n: r = getrandbits(k) return r # There's an overridden random() method but no new getrandbits() method, # so we can only use random() from here. if n >= maxsize: _warn( "Underlying random() generator does not supply \n" "enough bits to choose from a population range this large.\n" "To remove the range limitation, add a getrandbits() method.") return _int(random() * n) rem = maxsize % n limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0 r = random() while r >= limit: r = random() return _int(r * maxsize) % n
def __init__(self, config=None, identity_cache=None, state_cache=None, virtual_organization="", config_file="", msg_cb=None): """ :param config: A saml2.config.Config instance :param identity_cache: Where the class should store identity information :param state_cache: Where the class should keep state information :param virtual_organization: A specific virtual organization """ Entity.__init__(self, "sp", config, config_file, virtual_organization, msg_cb=msg_cb) self.users = Population(identity_cache) self.lock = threading.Lock() # for server state storage if state_cache is None: self.state = {} # in memory storage else: self.state = state_cache attribute_defaults = { "logout_requests_signed": False, "logout_responses_signed": False, "allow_unsolicited": False, "authn_requests_signed": False, "want_assertions_signed": False, "want_response_signed": True, "want_assertions_or_response_signed": False, } for attr, val_default in attribute_defaults.items(): val_config = self.config.getattr(attr, "sp") val = (val_config if val_config is not None else val_default) if val == 'true': val = True setattr(self, attr, val) if self.entity_type == "sp" and not any([ self.want_assertions_signed, self.want_response_signed, self.want_assertions_or_response_signed, ]): warn_msg = ( "The SAML service provider accepts " "unsigned SAML Responses and Assertions. " "This configuration is insecure. " "Consider setting want_assertions_signed, want_response_signed " "or want_assertions_or_response_signed configuration options.") logger.warning(warn_msg) _warn(warn_msg) self.artifact2response = {}
def print_progress(iteration, total, prefix='', suffix='', decimals=2, bar_length=30): """ Call in a loop to create terminal progress bar @params: iteration - Required : current iteration (Int) total - Required : total iterations (Int) prefix - Optional : prefix _string (Str) suffix - Optional : suffix _string (Str) decimals - Optional : number of decimals in percent complete (Int) bar_length - Optional : character length of progbar (Int) """ if total == 0: _warn('Total iterations was set to zero.') return filled_length = int(round(bar_length * iteration / float(total))) if total > 0 else 0 if iteration / float(total) > 1: total = iteration percents = round(100.00 * (iteration / float(total)), decimals) if bar_length > 0: progbar = '#' * filled_length + '-' * (bar_length - filled_length) else: progbar = '' _sys.stdout.write( '%s [%s] %s%s %s\r' % (prefix, progbar, percents, '%', suffix)), _sys.stdout.flush() if iteration == total: print("\n")
def _randbelow(self, n, _log=_log, _int=int, _maxwidth=1<<BPF, _Method=_MethodType, _BuiltinMethod=_BuiltinMethodType): """Return a random int in the range [0,n) Handles the case where n has more bits than returned by a single call to the underlying generator. """ try: getrandbits = self.getrandbits except AttributeError: pass else: # Only call self.getrandbits if the original random() builtin method # has not been overridden or if a new getrandbits() was supplied. # This assures that the two methods correspond. if type(self.random) is _BuiltinMethod or type(getrandbits) is _Method: k = _int(1.00001 + _log(n-1, 2.0)) # 2**k > n-1 > 2**(k-2) r = getrandbits(k) while r >= n: r = getrandbits(k) return r if n >= _maxwidth: _warn("Underlying random() generator does not supply \n" "enough bits to choose from a population range this large") return _int(self.random() * n)
def _randbelow(self, n, int=int, maxsize=1 << BPF, type=type, Method=_MethodType, BuiltinMethod=_BuiltinMethodType): "Return a random int in the range [0,n). Raises ValueError if n==0." random = self.random getrandbits = self.getrandbits # Only call self.getrandbits if the original random() builtin method # has not been overridden or if a new getrandbits() was supplied. if type(random) is BuiltinMethod or type(getrandbits) is Method: k = n.bit_length() # don't use (n-1) here because n can be 1 r = getrandbits(k) # 0 <= r < 2**k while r >= n: r = getrandbits(k) return r # There's an overriden random() method but no new getrandbits() method, # so we can only use random() from here. if n >= maxsize: _warn( "Underlying random() generator does not supply \n" "enough bits to choose from a population range this large.\n" "To remove the range limitation, add a getrandbits() method.") return int(random() * n) rem = maxsize % n limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0 r = random() while r >= limit: r = random() return int(r * maxsize) % n
def __eq__(self, other): try: r = isinstance(self, other) _warn("Do not use == to check for event type. That is hacky. Use isinstance instead.") return r except TypeError: return False
def discontinued(_type, name, version, reason=None, stack_level=2): """convenience func to warn about discontinued attributes Parameters ---------- _type should be one of class, method, function, argument name the attributes name version the version by which support for the old name will be discontinued reason why, and what choices users have stack_level as per warnings.warn """ msg = ( f"{_type} {name} is discontinued, support will be stopped in version {version}" ) if reason is not None: msg = f"{msg}\n{reason}" with catch_warnings(): simplefilter("always") _warn(msg, DeprecationWarning, stack_level)
def build_cipher(self, *args, **kwargs): _deprecation_msg = ( "The 'Fernet' class does not need a build_cipher method." "Remove any calls to this method. " "In the next version, this method will be removed.") logger.warning(_deprecation_msg) _warn(_deprecation_msg, DeprecationWarning)
def catch(code=None, caught=None, always=None): """Catch a certain error code. Note: This function is deprecated and remains only for backwards compatibility. ``code`` (either a string or an object with a __contains__ method) is the error code(s) to catch. If it is a string, it is directly compared. Otherwise, it is checked for membership. If it is None, all exceptions are caught. ``caught`` is a callback for the ``except`` part of try/except/finally. It must take a single argument for the exception object. ``always`` is a callback for the ``finally`` part of the try/except/finally. Use functools.partial for arguments. If ``caught`` or ``always`` are None, behavior is to pass. """ _warn( '``catch`` is deprecated in favor of normal try...except blocks with' ' arbitrary attributes of WikiError. It may be removed in future' ' releases.', DeprecationWarning) try: yield except WikiError as exc: if (code is not None) and isinstance(code, str): if type(exc).__name__ != code: raise elif code is not None: if type(exc).__name__ not in code: raise if caught is not None: caught(exc) finally: if always is not None: always()
def shuffle(self, x, random=None): """Shuffle list x in place, and return None. Optional argument random is a 0-argument function returning a random float in [0.0, 1.0); if it is the default None, the standard random.random will be used. """ if random is None: randbelow = self._randbelow for i in reversed(range(1, len(x))): # pick an element in x[:i+1] with which to exchange x[i] j = randbelow(i + 1) x[i], x[j] = x[j], x[i] else: _warn('The *random* parameter to shuffle() has been deprecated\n' 'since Python 3.9 and will be removed in a subsequent ' 'version.', DeprecationWarning, 2) floor = _floor for i in reversed(range(1, len(x))): # pick an element in x[:i+1] with which to exchange x[i] j = floor(random() * (i + 1)) x[i], x[j] = x[j], x[i]
def __init__(self, E, lconst, langle, unitcell, N, repeats=None, rotangles=(0, 0, 0), fwhm=None, rho=2): if fwhm is not None: cosa, cosb, cosc = _np.cos(_np.array(langle)) sina, sinb, sinc = _np.sin(_np.array(langle)) neededrepeats = _np.ceil( 2 * _np.array(fwhm) / (_np.array([1, sinc, _np.sqrt(sinb ** 2 - ((cosa - cosb * cosc) / sinc) ** 2)]) * _np.array(lconst)) ).astype(int) if repeats is not None: if _np.any(neededrepeats > repeats): _warn('Number of repeats small for choosen fwhm') else: repeats = neededrepeats if repeats is None: repeats = 3 * [int(_np.rint((N / len(unitcell)) ** (1 / 3.0)))] if _np.prod(repeats) * len(unitcell) < N: _warn('Number of atoms high for atoms in focus') allpos = crystal._lattice(lconst, langle, unitcell, repeats, rng=self.rng) if _np.any(rotangles): self._rotmatrix = _rotation(*rotangles) allpos = _np.matmul(self._rotmatrix, allpos.T, order='F').T else: self._rotmatrix = False self._allpos = allpos self.rndOrientation = False self._p = None self.fwhm = None if fwhm is None else _np.array(fwhm) self.rho = _np.array(2) if rho is None else _np.array(rho) super().__init__(E, N) self._resetproperties = ['rho', 'fwhm']
def update(self, *arg, **kw): if arg: if len(arg) > 1: raise TypeError("at most one non-keyword argument permitted") arg = arg[0] if hasattr(arg, "keys"): if not self.is_ordered(arg): _warn(_WRNnoOrderArg, RuntimeWarning, stacklevel=2) super(StableDict, self).update(arg) ksl = self.__ksl for k in arg.keys(): if k not in ksl: ksl.append(k) self.__ksl = ksl else: # Must be a sequence of 2-tuples. for pair in arg: if len(pair) != 2: raise ValueError("not a 2-tuple", pair) self.__setitem__(pair[0], pair[1]) if kw: # There have been additionial keyword arguments. # Since Python passes them in an (unordered) dict # we cannot possibly preserve their order (without # inspecting the source or byte code of the call). if len(kw) > 1: _warn(_WRNnoOrderKW, RuntimeWarning, stacklevel=2) super(StableDict, self).update(kw) ksl = self.__ksl for k in kw.iterkeys(): if k not in ksl: ksl.append(k) self.__ksl = ksl
def _randbelow(self, n, _log=_log, int=int, _maxwidth=1 << BPF, _Method=_MethodType, _BuiltinMethod=_BuiltinMethodType): """Return a random int in the range [0,n) Handles the case where n has more bits than returned by a single call to the underlying generator. """ try: getrandbits = self.getrandbits except AttributeError: pass else: if type(self.random) is _BuiltinMethod or type( getrandbits) is _Method: k = int(1.00001 + _log(n - 1, 2.0)) r = getrandbits(k) while r >= n: r = getrandbits(k) return r if n >= _maxwidth: _warn( 'Underlying random() generator does not supply \nenough bits to choose from a population range this large' ) return int(self.random() * n)
def viewMolecules(files, idxs=None): """View the molecules contained in the passed file(s). Optionally supply a list of indices of molecules you want to view. This views the molecules and also returns a view object that will allow you to change the view, e.g. choosing different molecules to view etc. """ if not _is_notebook(): _warn("You can only view molecules from within a Jupyter notebook.") return None if isinstance(files, str): files = [files] print("Reading molecules from '%s'" % files) s = IO.readMolecules(files) print("Rendering the molecules...") v = Notebook.View(s) if idxs: v.molecules(idxs) else: v.molecules() return v
def _randbelow(self, n, int=int, maxsize=1 << BPF, type=type, Method=_MethodType, BuiltinMethod=_BuiltinMethodType): getrandbits = self.getrandbits if type(self.random) is BuiltinMethod or type(getrandbits) is Method: k = n.bit_length() r = getrandbits(k) while r >= n: r = getrandbits(k) return r random = self.random if n >= maxsize: _warn( 'Underlying random() generator does not supply \nenough bits to choose from a population range this large.\nTo remove the range limitation, add a getrandbits() method.' ) return int(random() * n) rem = maxsize % n limit = (maxsize - rem) / maxsize r = random() while r >= limit: r = random() return int(r * maxsize) % n
def deprecated(_type, old, new, version, reason=None, stack_level=2): """a convenience function for deprecating classes, functions, arguments. Parameters ---------- _type should be one of class, method, function, argument old, new the old and new names version the version by which support for the old name will be discontinued reason why, and what choices users have stack_level as per warnings.warn """ msg = ( f"use {_type} {new} instead of {old}, support discontinued in version {version}" ) if reason is not None: msg = f"{msg}\n{reason}" with catch_warnings(): simplefilter("always") _warn(msg, DeprecationWarning, stack_level)
def _randbelow(self, n, int=int, maxsize=1<<BPF, type=type, Method=_MethodType, BuiltinMethod=_BuiltinMethodType): "Return a random int in the range [0,n). Raises ValueError if n==0." getrandbits = self.getrandbits # Only call self.getrandbits if the original random() builtin method # has not been overridden or if a new getrandbits() was supplied. if type(self.random) is BuiltinMethod or type(getrandbits) is Method: k = n.bit_length() # don't use (n-1) here because n can be 1 r = getrandbits(k) # 0 <= r < 2**k while r >= n: r = getrandbits(k) return r # There's an overriden random() method but no new getrandbits() method, # so we can only use random() from here. random = self.random if n >= maxsize: _warn("Underlying random() generator does not supply \n" "enough bits to choose from a population range this large.\n" "To remove the range limitation, add a getrandbits() method.") return int(random() * n) rem = maxsize % n limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0 r = random() while r >= limit: r = random() return int(r*maxsize) % n
def discontinued(_type, name, version, stack_level=2): """convenience func to warn about discontinued attributes Parameters ---------- _type should be one of class, method, function, argument name the attributes name version the version by which support for the old name will be discontinued stack_level as per warnings.warn """ msg = "%s %s is discontinued, support will be stopped in version %s" % ( _type, name, version, ) with catch_warnings(): simplefilter("always") _warn(msg, DeprecationWarning, stack_level)
def inner_decorator(init_or_class): if _isclass(init_or_class): func = getattr(init_or_class, '__init__') elif _isfunction(init_or_class): func = init_or_class else: raise ValueError("autoinit decorator should be applied to class or its __init__ method") if (func.__name__ != '__init__' or func.__code__.co_name != '__init__') and not no_warn: _warn(AutoinitWarning("autoinit decorator intended to be applied only to __init__" " method (use autoinit(no_warn=True) to suppress this warning)")) args_names = func.__code__.co_varnames[1:func.__code__.co_argcount] @_wraps(func) def inner(self, *args, **kwargs): if reverse: func(self, *args, **kwargs) args_vals = args[:] if func.__defaults__: args_vals += func.__defaults__[len(args) - len(args_names):] for key, val in zip(args_names, args_vals): if key not in exclude: if (type(self.__class__).__name__ != 'classobj' and hasattr(self, '__slots__') and key not in self.__slots__): raise AttributeError("Can not assign attribute '%s': it is not " "listed in %s.__slots__" % (key, self.__class__)) setattr(self, key, val) if not reverse: func(self, *args, **kwargs) if _isclass(init_or_class): init_or_class.__init__ = inner return init_or_class return inner
def deprecated(_type, old, new, version, stack_level=2): """a convenience function for deprecating classes, functions, arguments. Parameters ---------- _type should be one of class, method, function, argument old, new the old and new names version the version by which support for the old name will be discontinued stack_level as per warnings.warn """ msg = "use %s %s instead of %s, support discontinued in version %s" % ( _type, new, old, version, ) # DeprecationWarnings are ignored by default in python 2.7, so temporarily # force them to be handled. with catch_warnings(): simplefilter("always") _warn(msg, DeprecationWarning, stack_level)
def _randbelow(self, n, int=int, maxsize=1 << BPF, type=type, Method=_MethodType, BuiltinMethod=_BuiltinMethodType): """Return a random int in the range [0,n). Raises ValueError if n==0.""" random = self.random getrandbits = self.getrandbits if type(random) is BuiltinMethod or type(getrandbits) is Method: k = n.bit_length() r = getrandbits(k) while 1: if r >= n: r = getrandbits(k) return r elif n >= maxsize: _warn( 'Underlying random() generator does not supply \nenough bits to choose from a population range this large.\nTo remove the range limitation, add a getrandbits() method.' ) return int(random() * n) else: if n == 0: raise ValueError('Boundary cannot be zero') rem = maxsize % n limit = (maxsize - rem) / maxsize r = random() while 1: if r >= limit: r = random() return int(r * maxsize) % n
def _randbelow( self, n, _log=_log, _int=int, _maxwidth=1 << BPF, _Method=_MethodType, _BuiltinMethod=_BuiltinMethodType, ): """Return a random int in the range [0,n) Handles the case where n has more bits than returned by a single call to the underlying generator. """ try: getrandbits = self.getrandbits except AttributeError: pass else: # Only call self.getrandbits if the original random() builtin method # has not been overridden or if a new getrandbits() was supplied. # This assures that the two methods correspond. if type(self.random) is _BuiltinMethod or type( getrandbits) is _Method: k = _int(1.00001 + _log(n - 1, 2.0)) # 2**k > n-1 > 2**(k-2) r = getrandbits(k) while r >= n: r = getrandbits(k) return r if n >= _maxwidth: _warn("Underlying random() generator does not supply \n" "enough bits to choose from a population range this large") return _int(self.random() * n)
def pbkdf2_hmac(hash_name, password, salt, iterations, dklen=None): """Password based key derivation function 2 (PKCS #5 v2.0) This Python implementations based on the hmac module about as fast as OpenSSL's PKCS5_PBKDF2_HMAC for short passwords and much faster for long passwords. """ _warn("Python implementation of pbkdf2_hmac() is deprecated.", category=DeprecationWarning, stacklevel=2) if not isinstance(hash_name, str): raise TypeError(hash_name) if not isinstance(password, (bytes, bytearray)): password = bytes(memoryview(password)) if not isinstance(salt, (bytes, bytearray)): salt = bytes(memoryview(salt)) # Fast inline HMAC implementation inner = new(hash_name) outer = new(hash_name) blocksize = getattr(inner, 'block_size', 64) if len(password) > blocksize: password = new(hash_name, password).digest() password = password + b'\x00' * (blocksize - len(password)) inner.update(password.translate(_trans_36)) outer.update(password.translate(_trans_5C)) def prf(msg, inner=inner, outer=outer): # PBKDF2_HMAC uses the password as key. We can re-use the same # digest objects and just update copies to skip initialization. icpy = inner.copy() ocpy = outer.copy() icpy.update(msg) ocpy.update(icpy.digest()) return ocpy.digest() if iterations < 1: raise ValueError(iterations) if dklen is None: dklen = outer.digest_size if dklen < 1: raise ValueError(dklen) dkey = b'' loop = 1 from_bytes = int.from_bytes while len(dkey) < dklen: prev = prf(salt + loop.to_bytes(4)) # endianness doesn't matter here as long to / from use the same rkey = from_bytes(prev) for i in range(iterations - 1): prev = prf(prev) # rkey = rkey ^ prev rkey ^= from_bytes(prev) loop += 1 dkey += rkey.to_bytes(inner.digest_size) return dkey[:dklen]
def readPDB(id, property_map={}): """Read a molecular system from a Protein Data Bank (PDBP) ID in the RSCB PDB website. Parameters ---------- id : str The PDB ID string. property_map : dict A dictionary that maps system "properties" to their user defined values. This allows the user to refer to properties with their own naming scheme, e.g. { "charge" : "my-charge" } Returns ------- system : :class:`System <BioSimSpace._SireWrappers.System>` A molecular system. Examples -------- Create a molecular system from the deoxy human haemoglobin Protein Data Bank (PDB) record. >>> import BioSimSpace as BSS >>> system = BSS.readPDB("1a3n") """ if not _has_pypdb: _warn("BioSimSpace.IO: PyPDB could not be imported on this system.") return None if type(id) is not str: raise TypeError("'id' must be of type 'str'") # Strip any whitespace from the PDB ID and convert to upper case. id = id.replace(" ", "").upper() # Create a temporary directory to write the PDB file. tmp_dir = _tempfile.TemporaryDirectory() # Attempt to download the PDB file. (Compression is currently broken!) try: pdb_string = _pypdb.get_pdb_file(id, filetype="pdb", compression=False) except: raise IOError("Invalid PDB ID: '%s'" % id) # Create the name of the PDB file. pdb_file = "%s/%s.pdb" % (tmp_dir.name, id) # Now write the PDB string to file. with open(pdb_file, "w") as file: file.write(pdb_string) # Read the file and return a molecular system. return readMolecules(pdb_file, property_map)
def code_blocks(readme: _Readme) -> None: """If no code blocks in file, warn. :param readme. Instantiated ``Readme`` object. """ if not readme: _warn("file contains no code-blocks", RuntimeWarning) _sys.exit(0)
def _check_dtype_mistake(dtype): """ It's a very common mistake (at least for me) to pass-in a float64 when I really want to pass in a `floatX`, and it would go unnoticed and slow-down the computations a lot if I wouldn't check it here. """ if _np.issubdtype(dtype, _np.floating) and dtype != df.floatX: _warn("Input array of floating-point dtype {} != df.floatX detected. Is this really what you want?".format(dtype))
def rotangles(self): if self._rotmatrix is None: _warn('Rotation not used for this object') return [0, 0, 0] elif self._rotmatrix is False: return [0, 0, 0] else: return _angles(self._rotmatrix)
def evaluate(self): df.Module.evaluate(self) self.Winf.set_value(self.W.get_value() / _np.sqrt(self.buf_var.get_value() + self.eps)) self.binf.set_value(self.b.get_value() - self.Winf.get_value() * self.buf_mean.get_value()) # This check saved me from WTF'ing countless times! if self.buf_count.get_value() == 0: _warn("You're switching a BN-net to eval mode without having collected any statistics, that can't go well!")
def filter(self, ava, sp_entity_id, mdstore=None, required=None, optional=None): """ What attribute and attribute values returns depends on what the SP or the registration authority has said it wants in the request or in the metadata file and what the IdP/AA wants to release. An assumption is that what the SP or the registration authority asks for overrides whatever is in the metadata. But of course the IdP never releases anything it doesn't want to. :param ava: The information about the subject as a dictionary :param sp_entity_id: The entity ID of the SP :param required: Attributes that the SP requires in the assertion :param optional: Attributes that the SP regards as optional :return: A possibly modified AVA """ if mdstore is not None: warn_msg = ( "The mdstore parameter for saml2.assertion.Policy.filter " "is deprecated; " "instead, initialize the Policy object setting the mds param.") logger.warning(warn_msg) _warn(warn_msg, DeprecationWarning) # acs MUST have a value, fall back to default. if not self.acs: self.acs = ac_factory() subject_ava = ava.copy() # entity category restrictions _ent_rest = self.get_entity_categories(sp_entity_id, mds=mdstore, required=required) if _ent_rest: subject_ava = filter_attribute_value_assertions( subject_ava, _ent_rest) elif required or optional: logger.debug("required: %s, optional: %s", required, optional) subject_ava = filter_on_attributes( subject_ava, required, optional, self.acs, self.get_fail_on_missing_requested(sp_entity_id), ) # attribute restrictions _attr_rest = self.get_attribute_restrictions(sp_entity_id) subject_ava = filter_attribute_value_assertions( subject_ava, _attr_rest) return subject_ava or {}
def deprecated(func, *args, **kwargs): _warn(message, DeprecationWarning, stacklevel=2) if not warned[0]: _logger.warn(message) warned[0] = True if allow: return func(*args, **kwargs) else: raise DeprecationWarning(message)
def scatter_contour(x, y, z, ncontours=50, colorbar=True, fig=None, ax=None, cmap=None, outfile=None): """Contour plot on scattered data (x,y,z) and plots the positions of the points (x,y) on top. Parameters ---------- x : ndarray(T) x-coordinates y : ndarray(T) y-coordinates z : ndarray(T) z-coordinates ncontours : int, optional, default=50 number of contour levels fig : matplotlib Figure object, optional, default=None the figure to plot into. When set to None the default Figure object will be used ax : matplotlib Axes object, optional, default=None the axes to plot to. When set to None the default Axes object will be used. cmap : matplotlib colormap, optional, default=None the color map to use. None will use pylab.cm.jet. outfile : str, optional, default=None output file to write the figure to. When not given, the plot will be displayed Returns ------- ax : Axes object containing the plot """ _warn( 'scatter_contour is deprected; use plot_contour instead' ' and manually add a scatter plot on top.', DeprecationWarning) ax = contour(x, y, z, ncontours=ncontours, colorbar=colorbar, fig=fig, ax=ax, cmap=cmap) # scatter points ax.scatter(x, y, marker='o', c='b', s=5) # show or save if outfile is not None: ax.get_figure().savefig(outfile) return ax
def _fwarn(msg): """ Issue a FileWriterWarning with message `msg`. """ # count consecutive stackframes from this module to find stacklevel frame = inspect.currentframe() this_filename = frame.f_code.co_filename local_stackframes = 0 while frame and frame.f_code.co_filename == this_filename: local_stackframes += 1 frame = frame.f_back _warn(msg, FileWriterWarning, stacklevel=local_stackframes+1)
def discontinued(_type, name, version, stack_level=2): """convenience func to warn about discontinued attributes Arguments: - _type should be one of class, method, function, argument - name: the attributes name - version: the version by which support for the old name will be discontinued - stack_level: as per warnings.warn""" msg = "%s %s is discontinued, support will be stopped in version %s" % (_type, name, version) _warn(msg, DeprecationWarning, stack_level)
def _deprecation_notice(cls): """Warn about deprecation of this class.""" _deprecation_msg = ('{name} {type} is deprecated. ' 'It will be removed in the next version. ' 'Use saml2.cryptography.symmetric.Default ' 'or saml2.cryptography.symmetric.Fernet ' 'instead.').format(name=cls.__name__, type=type(cls).__name__) logger.warning(_deprecation_msg) _warn(_deprecation_msg, DeprecationWarning)
def __init__(self, depth=0, stencil=0, antialiasing=0, major=2, minor=0, **kwargs): self._sfContextSettings = _ffi.new('sfContextSettings*') if antialiasing and not depth: _warn("Antialiasing may not work if depth is not set") self.depth_bits = depth self.stencil_bits = stencil self.antialiasing_level = antialiasing self.major_version = major self.minor_version = minor if kwargs: self._set(**kwargs)
def _fwarn(msg): """ Issue a FileWriterWarning with message `msg`. """ # count consecutive stackframes from this module to find stacklevel frame = inspect.currentframe() this_filename = frame.f_code.co_filename local_stackframes = 0 while frame and frame.f_code.co_filename == this_filename: local_stackframes += 1 frame = frame.f_back _warn(msg, FileWriterWarning, stacklevel=local_stackframes + 1)
def _process_row(self, row, **kwargs): """ Stores each row in the relationship definition table as ``Relation`` objects. """ formatted = [(v.upper().strip() if isinstance(v, basestring) else v) for v in row] key, values = formatted[0], Relation(*formatted[1:]) if not key: return if key in self: _warn("Source table '{}' participates in multiple {} relationships".format(key.upper(), values.relate_type), RelationWarning) return self[key] = values
def locus(self, value): ''' alias for name property ''' if len(value) > 16: shortvalue = value[:16] _warn("locus property {} truncated to 16 chars {}".format( value, shortvalue), _PydnaWarning, stacklevel=2) value = shortvalue self.name = value return
def deprecated(_type, old, new, version, stack_level=2): """a convenience function for deprecating classes, functions, arguments. Arguments: - _type should be one of class, method, function, argument - old, new: the old and new names - version: the version by which support for the old name will be discontinued - stack_level: as per warnings.warn""" msg = "use %s %s instead of %s, support discontinued in version %s" % (_type, new, old, version) _warn(msg, DeprecationWarning, stack_level)
def pendingDeprecation(new_func): """ Raise `PendingDeprecationWarning` and display a message. Uses inspect.stack() to determine the name of the item that this is called from. :param new_func: The name of the function that should be used instead. :type new_func: string. """ warn_txt = "`{}` is pending deprecation. Please use `{}` instead." _warn(warn_txt.format(inspect.stack()[1][3], new_func), PlotPendingDeprecation)
def deprecated(_type, old, new, version, stack_level=2): """a convenience function for deprecating classes, functions, arguments. Arguments: - _type should be one of class, method, function, argument - old, new: the old and new names - version: the version by which support for the old name will be discontinued - stack_level: as per warnings.warn""" msg = "use %s %s instead of %s, support discontinued in version %s" % \ (_type, new, old, version) # DeprecationWarnings are ignored by default in python 2.7, so temporarily # force them to be handled. with catch_warnings(): simplefilter("always") _warn(msg, DeprecationWarning, stack_level)
def _randbelow(self, n, int=int, maxsize=1 << BPF, type=type, Method=_MethodType, BuiltinMethod=_BuiltinMethodType): getrandbits = self.getrandbits if type(self.random) is BuiltinMethod or type(getrandbits) is Method: k = n.bit_length() r = getrandbits(k) while r >= n: r = getrandbits(k) return r random = self.random if n >= maxsize: _warn('Underlying random() generator does not supply \nenough bits to choose from a population range this large.\nTo remove the range limitation, add a getrandbits() method.') return int(random()*n) rem = maxsize % n limit = (maxsize - rem)/maxsize r = random() while r >= limit: r = random() return int(r*maxsize) % n
def _randbelow_without_getrandbits(self, n, int=int, maxsize=1<<BPF): """Return a random int in the range [0,n). Raises ValueError if n==0. The implementation does not use getrandbits, but only random. """ random = self.random if n >= maxsize: _warn("Underlying random() generator does not supply \n" "enough bits to choose from a population range this large.\n" "To remove the range limitation, add a getrandbits() method.") return int(random() * n) if n == 0: raise ValueError("Boundary cannot be zero") rem = maxsize % n limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0 r = random() while r >= limit: r = random() return int(r*maxsize) % n
def __init__(self, f=None, g=None, A=None, B=None, gamma=1.0, alpha=None, epsilon=None, *args, **kwargs): super(ForwardBackward, self).__init__(*args, **kwargs) if (A is None) == (f is None): raise TypeError("must specify either A or f, but not both") if A is None: A = f.gradient if (B is None) == (g is None): raise TypeError("must specify either B or g, but not both") if B is None: B = g.gradient if A.shape[0] is None: assert B.shape[0] is not None self.x = _np.zeros(B.shape[0]) elif B.shape[0] is None: assert A.shape[0] is not None self.x = _np.zeros(A.shape[0]) else: assert A.shape[0] == B.shape[0] self.x = _np.zeros(A.shape[0]) if not 0 < gamma < 2: _warn("convergence is only guaranteed for 0 < gamma < 2") if alpha is None: if epsilon is not None: if not 0 < epsilon < (9.0 - 4 * gamma) / (2.0 * gamma): _warn("convergence is only guaranteed for 0 < epsilon < (9.0 - 4 * gamma) / (2.0 * gamma)") alpha = 1 + (_np.sqrt(9.0 - 4 * gamma - 2 * epsilon * gamma) - 3) / gamma else: alpha = 0 else: if not 0 <= alpha < 1: _warn("convergence is only guaranteed for 0 <= alpha < 1") if epsilon is not None: _warn("ignoring epsilon since alpha is given") self._A = A self._B = B self._tau = gamma / B.lipschitz self._alpha = alpha if self._alpha: self._last_x = self.x
def __new__(cls, name, bases, dct): newcls = super(cls, CallbackMeta).__new__(cls, name, bases, dct) try: argspec = _getargspec(dct["__init__"]) except (KeyError, TypeError): pass else: args = argspec.args[1:] if argspec.varargs: _warn("varargs in %r" % cls) if argspec.keywords: _warn("keywords in %r" % cls) for arg in args: if arg in cls.registry: _warn("ambiguous constructor argument %r in %r" % (arg, cls)) else: cls.registry[arg] = newcls return newcls
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. """Deprecated module which sets the default GLib main context as the mainloop implementation within D-Bus, as a side-effect of being imported! This API is highly non-obvious, so instead of importing this module, new programs which don't need pre-0.80 compatibility should use this equivalent code:: from dbus.mainloop.glib import DBusGMainLoop DBusGMainLoop(set_as_default=True) """ __docformat__ = 'restructuredtext' from dbus.mainloop.glib import DBusGMainLoop, threads_init from warnings import warn as _warn init_threads = threads_init DBusGMainLoop(set_as_default=True) _warn(DeprecationWarning("""\ Importing dbus.glib to use the GLib main loop with dbus-python is deprecated. Instead, use this sequence: from dbus.mainloop.glib import DBusGMainLoop DBusGMainLoop(set_as_default=True) """), DeprecationWarning, stacklevel=2)
def find_executable_linenos(filename): _warn("The trace.find_executable_linenos() function is deprecated", DeprecationWarning, 2) return _find_executable_linenos(filename)
def find_strings(filename, encoding=None): _warn("The trace.find_strings() function is deprecated", DeprecationWarning, 2) return _find_strings(filename, encoding=None)
def __init__(self, filename=None): _warn('rst7 is deprecated. Use Rst7 instead.', DeprecationWarning) super(rst7, self).__init__(filename=filename)
def usage(outfile): _warn("The trace.usage() function is deprecated", DeprecationWarning, 2) _usage(outfile)
or i.endswith(".pyd"): possible_solvers.add(i.split(".")[0]) for solver in possible_solvers: nicer_name = solver[:-7] if solver.endswith("_solver") else solver try: add_solver(solver, nicer_name) except Exception: pass del solver, nicer_name del path, listdir del i, possible_solvers if len(solver_dict) == 0: _warn("No LP solvers found") def get_solver_name(mip=False, qp=False): """returns a solver name""" if len(solver_dict) == 0: return None # glpk only does lp, not qp. Gurobi and cplex are better at mip mip_order = ["gurobi", "cplex", "glpk", "cglpk"] lp_order = ["glpk", "cglpk", "gurobi", "cplex"] qp_order = ["gurobi", "cplex"] if mip is False and qp is False: for solver_name in lp_order: if solver_name in solver_dict: return solver_name elif qp: # mip does not yet matter for this determination
def newfunc(*args, **kwargs): msg = "{!r} is deprecated".format(func.func_name) if extra: msg += " ({!s})".format(extra) _warn(msg, DeprecationWarning, stacklevel=2) return func(*args, **kwargs)
from .h5 import get_config from .h5r import Reference, RegionReference from .h5t import special_dtype, check_dtype from . import version from .version import version as __version__ if version.hdf5_version_tuple[:3] >= get_config().vds_min_hdf5_version: from ._hl.vds import VirtualSource, VirtualLayout if version.hdf5_version_tuple != version.hdf5_built_version_tuple: _warn(("h5py is running against HDF5 {0} when it was built against {1}, " "this may cause problems").format( '{0}.{1}.{2}'.format(*version.hdf5_version_tuple), '{0}.{1}.{2}'.format(*version.hdf5_built_version_tuple) )) def run_tests(args=''): """Run tests with pytest and returns the exit status as an int. """ # Lazy-loading of tests package to avoid strong dependency on test # requirements, e.g. pytest from .tests import run_tests return run_tests(args) def enable_ipython_completer(): """ Call this from an interactive IPython session to enable tab-completion
def __init__(self, modules=None, dirs=None): _warn("The class trace.Ignore is deprecated", DeprecationWarning, 2) _Ignore.__init__(self, modules, dirs)
# included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. import sys from warnings import warn as _warn _warn(DeprecationWarning("""\ dbus.gobject_service is deprecated, and is not available under Python 3. Porting from gobject (PyGObject 2) to gi.repository.GObject (PyGObject 3), and using dbus.gi_service instead of dbus.gobject_service, is recommended. """), DeprecationWarning, stacklevel=2) if 'gi' in sys.modules: # this worked in dbus-python 1.0, so preserve the functionality from gi.repository import GObject as gobject else: # this worked in dbus-python < 1.0 import gobject import dbus.service class ExportedGObjectType(gobject.GObjectMeta, dbus.service.InterfaceType): """A metaclass which inherits from both GObjectMeta and `dbus.service.InterfaceType`. Used as the metaclass for `ExportedGObject`.
try: getrandbits = self.getrandbits except AttributeError: pass else: # Only call self.getrandbits if the original random() builtin method # has not been overridden or if a new getrandbits() was supplied. # This assures that the two methods correspond. if type(self.random) is _BuiltinMethod or type(getrandbits) is _Method: k = _int(1.00001 + _log(n-1, 2.0)) # 2**k > n-1 > 2**(k-2) r = getrandbits(k) while r >= n: r = getrandbits(k) return r if n >= _maxwidth: _warn("Underlying random() generator does not supply \n" "enough bits to choose from a population range this large") return _int(self.random() * n) ## -------------------- sequence methods ------------------- def choice(self, seq): """Choose a random element from a non-empty sequence.""" return seq[int(self.random() * len(seq))] # raises IndexError if seq is empty def shuffle(self, x, random=None): """x, random=random.random -> shuffle list x in place; return None. Optional arg random is a 0-argument function returning a random float in [0.0, 1.0); by default, the standard random.random. """
def fullmodname(path): _warn("The trace.fullmodname() function is deprecated", DeprecationWarning, 2) return _fullmodname(path)
def find_lines(code, strs): _warn("The trace.find_lines() function is deprecated", DeprecationWarning, 2) return _find_lines(code, strs)
def LoadParm(parmname, xyz=None, box=None, rst7_name=None): """ Loads a topology file using the correct class. Parameters ---------- parmname : ``str`` The name of the topology file to load xyz : str or array, optional If provided, the coordinates and unit cell dimensions from the provided Amber inpcrd/restart file will be loaded into the molecule, or the coordinates will be loaded from the coordinate array box : array, optional If provided, the unit cell information will be set from the provided unit cell dimensions (a, b, c, alpha, beta, and gamma, respectively) Returns ------- parm : :class:`AmberParm` (or subclass) This function parses the topology file, determines if it is an Amber-style (i.e., *traditional* Amber force field), Chamber-style (i.e., CHARMM force field), or Amoeba-style (i.e., Amoeba force field), and then returns an instance of the appropriate type. """ from parmed import load_file from parmed.constants import IFBOX parm = AmberFormat(parmname) if "CTITLE" in parm.flag_list: parm = parm.view_as(ChamberParm) elif "AMOEBA_FORCEFIELD" in parm.flag_list: parm = parm.view_as(AmoebaParm) else: parm = parm.view_as(AmberParm) # Now read the coordinate file if applicable if xyz is None and rst7_name is not None: _warn("rst7_name keyword is deprecated. Use xyz instead", DeprecationWarning) xyz = rst7_name elif xyz is not None and rst7_name is not None: _warn("rst7_name keyword is deprecated and ignored in favor of xyz", DeprecationWarning) if isinstance(xyz, string_types): f = load_file(xyz) if not hasattr(f, "coordinates") or f.coordinates is None: raise TypeError("%s does not have coordinates" % xyz) parm.coordinates = f.coordinates if hasattr(f, "box") and f.box is not None and box is None: parm.box = f.box else: parm.coordinates = xyz if box is not None: parm.box = box # If all else fails, set the box from the prmtop file if parm.parm_data["POINTERS"][IFBOX] > 0 and parm.box is None: box = parm.parm_data["BOX_DIMENSIONS"] parm.box = list(box[1:]) + [box[0], box[0], box[0]] parm.hasbox = parm.box is not None return parm