def _is_a(self, x): """ Check if a Sage object ``x`` belongs to ``self``. This methods is a helper for :meth:`__contains__` and the constructor :meth:`_element_constructor_`. EXAMPLES:: sage: U4 = DisjointUnionEnumeratedSets( ....: Family(NonNegativeIntegers(), Compositions)) sage: U4._is_a(Composition([3,2,1,1])) doctest:...: UserWarning: Disjoint union of Lazy family (<class 'sage.combinat.composition.Compositions'>(i))_{i in Non negative integers} is an infinite union The default implementation of __contains__ can loop forever. Please overload it. True """ if self._keepkey: return (isinstance(x, tuple) and x[0] in self._family.keys() and x[1] in self._family[x[0]]) else: from warnings import warn if self._family.cardinality() == Infinity: warn("%s is an infinite union\nThe default implementation of __contains__ can loop forever. Please overload it."%(self)) return any(x in a for a in self._family)
def serialize(v): global _have_warned_about_timestamps try: converted = calendar.timegm(v.utctimetuple()) converted = converted * 1e3 + getattr(v, 'microsecond', 0) / 1e3 except AttributeError: # Ints and floats are valid timestamps too if type(v) not in _number_types: raise TypeError('DateType arguments must be a datetime or timestamp') if not _have_warned_about_timestamps: _have_warned_about_timestamps = True warnings.warn("timestamp columns in Cassandra hold a number of " "milliseconds since the unix epoch. Currently, when executing " "prepared statements, this driver multiplies timestamp " "values by 1000 so that the result of time.time() " "can be used directly. However, the driver cannot " "match this behavior for non-prepared statements, " "so the 2.0 version of the driver will no longer multiply " "timestamps by 1000. It is suggested that you simply use " "datetime.datetime objects for 'timestamp' values to avoid " "any ambiguity and to guarantee a smooth upgrade of the " "driver.") converted = v * 1e3 return int64_pack(long(converted))
def tcp_server(listensocket, server, *args, **kw): """ Given a socket, accept connections forever, spawning greenlets and executing *server* for each new incoming connection. When *listensocket* is closed, the ``tcp_server()`` greenlet will end. listensocket The socket from which to accept connections. server The callable to call when a new connection is made. \*args The positional arguments to pass to *server*. \*\*kw The keyword arguments to pass to *server*. """ import warnings warnings.warn("gevent.socket.tcp_server is deprecated", DeprecationWarning, stacklevel=2) try: try: while True: client_socket = listensocket.accept() spawn_raw(server, client_socket, *args, **kw) except error, e: # Broken pipe means it was shutdown if e[0] != 32: raise finally: listensocket.close()
def __init__(self, filepath, monitor='val_loss', verbose=0, save_best_only=False, mode='auto'): super(Callback, self).__init__() self.monitor = monitor self.verbose = verbose self.filepath = filepath self.save_best_only = save_best_only if mode not in ['auto', 'min', 'max']: warnings.warn('ModelCheckpoint mode %s is unknown, ' 'fallback to auto mode.' % (mode), RuntimeWarning) mode = 'auto' if mode == 'min': self.monitor_op = np.less self.best = np.Inf elif mode == 'max': self.monitor_op = np.greater self.best = -np.Inf else: if 'acc' in self.monitor: self.monitor_op = np.greater self.best = -np.Inf else: self.monitor_op = np.less self.best = np.Inf
def __init__(self, address, *, prefix=b'', RE=None, zmq=None, serializer=pickle.dumps): if RE is not None: warnings.warn("The RE argument to Publisher is deprecated and " "will be removed in a future release of bluesky. " "Update your code to subscribe this Publisher " "instance to (and, if needed, unsubscribe from) to " "the RunEngine manually.") if isinstance(prefix, str): raise ValueError("prefix must be bytes, not string") if b' ' in prefix: raise ValueError("prefix {!r} may not contain b' '".format(prefix)) if zmq is None: import zmq if isinstance(address, str): address = address.split(':', maxsplit=1) self.address = (address[0], int(address[1])) self.RE = RE url = "tcp://%s:%d" % self.address self._prefix = bytes(prefix) self._context = zmq.Context() self._socket = self._context.socket(zmq.PUB) self._socket.connect(url) if RE: self._subscription_token = RE.subscribe(self) self._serializer = serializer
def mean_scaling(Y, axis=0): """Scaling of the data to have percent of baseline change along the specified axis Parameters ---------- Y : array of shape (n_time_points, n_voxels) The input data. Returns ------- Y : array of shape (n_time_points, n_voxels), The data after mean-scaling, de-meaning and multiplication by 100. mean : array of shape (n_voxels,) The data mean. """ mean = Y.mean(axis=axis) if (mean == 0).any(): warn('Mean values of 0 observed.' 'The data have probably been centered.' 'Scaling might not work as expected') mean = np.maximum(mean, 1) Y = 100 * (Y / mean - 1) return Y, mean
def TH2_to_FITS(hist, flipx=True): """Convert ROOT 2D histogram to FITS format. Parameters ---------- hist : ROOT.TH2 2-dim ROOT histogram Returns ------- hdu : `~astropy.io.fits.ImageHDU` Histogram in FITS format. Examples -------- >>> import ROOT >>> from gammapy.utils.root import TH2_to_FITS >>> root_hist = ROOT.TH2F() >>> fits_hdu = TH2_to_FITS(root_hist) >>> fits_hdu.writetofits('my_image.fits') """ header = TH2_to_FITS_header(hist, flipx) if header['CDELT1'] > 0: warnings.warn('CDELT1 > 0 might not be handled properly.' 'A TH2 representing an astro image should have ' 'a reversed x-axis, i.e. xlow > xhi') data = TH2_to_FITS_data(hist, flipx) hdu = fits.ImageHDU(data=data, header=header) return hdu
def train(self, features, labels, normalisedlabels=False): idxs = self.selector(features, labels) if len(idxs) == 0: import warnings warnings.warn('milk.featureselection: No features selected! Using all features as fall-back.') idxs = np.arange(len(features[0])) return filterfeatures(idxs)
def Window_(self, **criteria): warnings.warn( "WindowSpecification.Window() WindowSpecification.Window_(), " "WindowSpecification.window() and WindowSpecification.window_() " "are deprecated, please switch to WindowSpecification.ChildWindow()", DeprecationWarning) return self.ChildWindow(**criteria)
def column_or_1d(y, warn=False): """ Ravel column or 1d numpy array, else raises an error Parameters ---------- y : array-like warn : boolean, default False To control display of warnings. Returns ------- y : array """ shape = np.shape(y) if len(shape) == 1: return np.ravel(y) if len(shape) == 2 and shape[1] == 1: if warn: warnings.warn("A column-vector y was passed when a 1d array was" " expected. Please change the shape of y to " "(n_samples, ), for example using ravel().", DataConversionWarning, stacklevel=2) return np.ravel(y) raise ValueError("bad input shape {0}".format(shape))
def __init__(self, unit="ns", tz=None): if isinstance(unit, DatetimeTZDtype): unit, tz = unit.unit, unit.tz if unit != 'ns': if isinstance(unit, str) and tz is None: # maybe a string like datetime64[ns, tz], which we support for # now. result = type(self).construct_from_string(unit) unit = result.unit tz = result.tz msg = ( "Passing a dtype alias like 'datetime64[ns, {tz}]' " "to DatetimeTZDtype is deprecated. Use " "'DatetimeTZDtype.construct_from_string()' instead." ) warnings.warn(msg.format(tz=tz), FutureWarning, stacklevel=2) else: raise ValueError("DatetimeTZDtype only supports ns units") if tz: tz = timezones.maybe_get_tz(tz) tz = timezones.tz_standardize(tz) elif tz is not None: raise pytz.UnknownTimeZoneError(tz) elif tz is None: raise TypeError("A 'tz' is required.") self._unit = unit self._tz = tz
def variable(value, dtype=_FLOATX, name=None): '''Instantiates a tensor. # Arguments value: numpy array, initial value of the tensor. dtype: tensor type. name: optional name string for the tensor. # Returns Tensor variable instance. ''' v = tf.Variable(value, dtype=_convert_string_dtype(dtype), name=name) if _MANUAL_VAR_INIT: return v if tf.get_default_graph() is get_session().graph: try: get_session().run(v.initializer) except tf.errors.InvalidArgumentError: warnings.warn('Could not automatically initialize variable, ' 'make sure you do it manually (e.g. via ' '`tf.initialize_all_variables()`).') else: warnings.warn('The default TensorFlow graph is not the graph ' 'associated with the TensorFlow session currently ' 'registered with Keras, and as such Keras ' 'was not able to automatically initialize a variable. ' 'You should consider registering the proper session ' 'with Keras via `K.set_session(sess)`.') return v
def load(self, path=None, force=True): """Load state from local file. If no path is specified, attempts to load from ``self.path``. :type path: str :arg path: local file to load from :type force: bool :param force: if ``force=False``, only load from ``self.path`` if file has changed since last load. .. deprecated:: 1.6 This keyword will be removed in Passlib 1.8; Applications should use :meth:`load_if_changed` instead. """ if path is not None: with open(path, "rb") as fh: self._mtime = 0 self._load_lines(fh) elif not force: warn("%(name)s.load(force=False) is deprecated as of Passlib 1.6," "and will be removed in Passlib 1.8; " "use %(name)s.load_if_changed() instead." % dict(name=self.__class__.__name__), DeprecationWarning, stacklevel=2) return self.load_if_changed() elif self._path: with open(self._path, "rb") as fh: self._mtime = os.path.getmtime(self._path) self._load_lines(fh) else: raise RuntimeError("%s().path is not set, an explicit path is required" % self.__class__.__name__) return True
def __init__(self, path=None, new=False, autoload=True, autosave=False, encoding="utf-8", return_unicode=PY3, ): # set encoding if not encoding: warn("``encoding=None`` is deprecated as of Passlib 1.6, " "and will cause a ValueError in Passlib 1.8, " "use ``return_unicode=False`` instead.", DeprecationWarning, stacklevel=2) encoding = "utf-8" return_unicode = False elif not is_ascii_codec(encoding): # htpasswd/htdigest files assumes 1-byte chars, and use ":" separator, # so only ascii-compatible encodings are allowed. raise ValueError("encoding must be 7-bit ascii compatible") self.encoding = encoding # set other attrs self.return_unicode = return_unicode self.autosave = autosave self._path = path self._mtime = 0 # init db if not autoload: warn("``autoload=False`` is deprecated as of Passlib 1.6, " "and will be removed in Passlib 1.8, use ``new=True`` instead", DeprecationWarning, stacklevel=2) new = True if path and not new: self.load() else: self._records = OrderedDict()
def form_for_instance(instance, form=BaseForm, fields=None, formfield_callback=lambda f, **kwargs: f.formfield(**kwargs)): """ Returns a Form class for the given Django model instance. Provide ``form`` if you want to use a custom BaseForm subclass. Provide ``formfield_callback`` if you want to define different logic for determining the formfield for a given database field. It's a callable that takes a database Field instance, plus **kwargs, and returns a form Field instance with the given kwargs (i.e. 'initial'). """ warn("form_for_instance is deprecated. Use ModelForm instead.", PendingDeprecationWarning, stacklevel=3) model = instance.__class__ opts = model._meta field_list = [] for f in opts.fields + opts.many_to_many: if not f.editable: continue if fields and not f.name in fields: continue current_value = f.value_from_object(instance) formfield = formfield_callback(f, initial=current_value) if formfield: field_list.append((f.name, formfield)) base_fields = SortedDict(field_list) return type(opts.object_name + 'InstanceForm', (form,), {'base_fields': base_fields, '_model': model, 'save': make_instance_save(instance, fields, 'changed')})
def parent(self): import warnings warnings.warn( "[v0.4] URL.parent has been deprecated and replaced with parentdir (which does what parent used to do) and up (which does what you probably thought parent would do ;-))", DeprecationWarning, stacklevel=2) return self.parentdir()
def form_for_model(model, form=BaseForm, fields=None, formfield_callback=lambda f: f.formfield()): """ Returns a Form class for the given Django model class. Provide ``form`` if you want to use a custom BaseForm subclass. Provide ``formfield_callback`` if you want to define different logic for determining the formfield for a given database field. It's a callable that takes a database Field instance and returns a form Field instance. """ warn("form_for_model is deprecated. Use ModelForm instead.", PendingDeprecationWarning, stacklevel=3) opts = model._meta field_list = [] for f in opts.fields + opts.many_to_many: if not f.editable: continue if fields and not f.name in fields: continue formfield = formfield_callback(f) if formfield: field_list.append((f.name, formfield)) base_fields = SortedDict(field_list) return type(opts.object_name + 'Form', (form,), {'base_fields': base_fields, '_model': model, 'save': make_model_save(model, fields, 'created')})
def here(self): import warnings warnings.warn( "URL.here() is deprecated, please use URL.curdir() instead!", DeprecationWarning, stacklevel=2) return self.curdir()
def load(cls, dirname=''): normdir = os.path.normpath(dirname) code, data = _run_command(['svn', 'info', normdir]) # Must check for some contents, as some use empty directories # in testcases svn_dir = os.path.join(normdir, '.svn') has_svn = (os.path.isfile(os.path.join(svn_dir, 'entries')) or os.path.isfile(os.path.join(svn_dir, 'dir-props')) or os.path.isfile(os.path.join(svn_dir, 'dir-prop-base'))) svn_version = tuple(cls.get_svn_version().split('.')) try: base_svn_version = tuple(int(x) for x in svn_version[:2]) except ValueError: base_svn_version = tuple() if has_svn and (code or not base_svn_version or base_svn_version < (1, 3)): warnings.warn(("No SVN 1.3+ command found: falling back " "on pre 1.7 .svn parsing"), DeprecationWarning) return SvnFileInfo(dirname) elif not has_svn: return SvnInfo(dirname) elif base_svn_version < (1, 5): return Svn13Info(dirname) else: return Svn15Info(dirname)
def __call__(self, method_name, **method_kwargs): response = self.method_request(method_name, **method_kwargs) response.raise_for_status() # there are may be 2 dicts in 1 json # for example: {'error': ...}{'response': ...} errors = [] error_codes = [] for data in json_iter_parse(response.text): if 'error' in data: error_data = data['error'] if error_data['error_code'] == CAPTCHA_IS_NEEDED: return self.captcha_is_needed(error_data, method_name, **method_kwargs) error_codes.append(error_data['error_code']) errors.append(error_data) if 'response' in data: for error in errors: warnings.warn(str(error)) # return make_handy(data['response']) return data['response'] if INTERNAL_SERVER_ERROR in error_codes: # invalid access token self.get_access_token() return self(method_name, **method_kwargs) else: raise VkAPIMethodError(errors[0])
def _get_json(self, urlpath): """Retrieve a JSON from the HiSPARC API :param urlpath: api urlpath to retrieve (i.e. after API_BASE). :return: the data returned by the api as dictionary or integer. """ if self.force_fresh and self.force_stale: raise Exception('Can not force fresh and stale simultaneously.') try: if self.force_stale: raise Exception json_data = self._retrieve_url(urlpath) data = json.loads(json_data) except Exception: if self.force_fresh: raise Exception('Couldn\'t get requested data from server.') localpath = path.join(LOCAL_BASE, urlpath.strip('/') + extsep + 'json') try: with open(localpath) as localdata: data = json.load(localdata) except: if self.force_stale: raise Exception('Couldn\'t find requested data locally.') raise Exception('Couldn\'t get requested data from server ' 'nor find it locally.') if not self.force_stale: warnings.warn('Using local data. Possibly outdated.') return data
def __init__(self, app_id=None, user_login=None, user_password=None, access_token=None, user_email=None, scope='offline', timeout=1, api_version='5.20'): user_login = user_login or user_email if (not user_login or not user_password) and not access_token: raise ValueError('Arguments user_login and user_password, or access_token are required') if user_email: # deprecated at April 11, 2014 warnings.simplefilter('once') warnings.warn("Use 'user_login' instead of deprecated 'user_email'", DeprecationWarning, stacklevel=2) self.app_id = app_id self.user_login = user_login self.user_password = user_password self.access_token = access_token self.scope = scope or '' self.api_version = api_version self._default_timeout = timeout self.session = requests.Session() self.session.headers['Accept'] = 'application/json' self.session.headers['Content-Type'] = 'application/x-www-form-urlencoded' if not access_token and user_login and user_password: self.get_access_token()
def __init__(self, monitor='val_loss', patience=0, verbose=0, mode='auto'): super(Callback, self).__init__() self.monitor = monitor self.patience = patience self.verbose = verbose self.wait = 0 if mode not in ['auto', 'min', 'max']: warnings.warn('EarlyStopping mode %s is unknown, ' 'fallback to auto mode.' % (self.mode), RuntimeWarning) mode = 'auto' if mode == 'min': self.monitor_op = np.less self.best = np.Inf elif mode == 'max': self.monitor_op = np.greater self.best = -np.Inf else: if 'acc' in self.monitor: self.monitor_op = np.greater self.best = -np.Inf else: self.monitor_op = np.less self.best = np.Inf
def wrap(f): if modules: # attach import to function setattr(f, 'imports', modules) for alternatives in modules: # alternatives are comma seperated alternatives = alternatives.split(',') # we import the part of the import X.Y.Z -> Z mod_name = alternatives[0].split('.')[-1] for mod in alternatives: mod = mod.strip().split('.') try: if len(mod) == 1: module = __import__(mod[0]) else: module = getattr(__import__('.'.join(mod[:-1]), \ fromlist=[mod[-1]]), mod[-1]) f.func_globals[mod_name] = module break # import only one except ImportError: pass else: if forgive: # no break -> no import warnings.warn('Failed to import %s' % alternatives) else: raise ImportError('Failed to import %s' % alternatives) return f
def __init__(self, *args, **kargs): tkinter.Canvas.__init__(self, *args, **kargs) warnings.warn("RO.Wdg.PatchedCanvas is obsolete; please use Tkinter.Canvas instead.", category = DeprecationWarning, stacklevel = 2, )
def scaffold_list_columns(self): """ Return a list of columns from the model. """ columns = [] for p in self._get_model_iterator(): if hasattr(p, 'direction'): if self.column_display_all_relations or p.direction.name == 'MANYTOONE': columns.append(p.key) elif hasattr(p, 'columns'): if len(p.columns) > 1: filtered = tools.filter_foreign_columns(self.model.__table__, p.columns) if len(filtered) > 1: warnings.warn('Can not convert multiple-column properties (%s.%s)' % (self.model, p.key)) continue column = filtered[0] else: column = p.columns[0] if column.foreign_keys: continue if not self.column_display_pk and column.primary_key: continue columns.append(p.key) return columns
def set_reuse_addr(descriptor): import warnings warnings.warn("gevent.socket.set_reuse_addr is deprecated", DeprecationWarning, stacklevel=2) try: descriptor.setsockopt(SOL_SOCKET, SO_REUSEADDR, descriptor.getsockopt(SOL_SOCKET, SO_REUSEADDR) | 1) except error: pass
def init_dev(dev, name=None): global pygpu_activated if (pygpu.version.major, pygpu.version.minor) < (0, 6): raise ValueError("Your installed version of pygpu is too old, please upgrade to 0.6 or later") if dev not in init_dev.devmap: ctx = pygpu.init(dev, disable_alloc_cache=config.gpuarray.preallocate < 0, single_stream=config.gpuarray.single_stream, sched=config.gpuarray.sched) init_dev.devmap[dev] = ctx if config.gpuarray.preallocate < 0: print("Disabling allocation cache on %s" % (dev,)) elif config.gpuarray.preallocate > 0: MB = (1024 * 1024) if config.gpuarray.preallocate <= 1: gmem = min(config.gpuarray.preallocate, 0.95) * ctx.total_gmem else: gmem = config.gpuarray.preallocate * MB # This will allocate and immediatly free an object of size gmem # which will reserve that amount of memory on the GPU. pygpu.empty((gmem,), dtype='int8', context=ctx) if config.print_active_device: print("Preallocating %d/%d Mb (%f) on %s" % (gmem//MB, ctx.total_gmem//MB, gmem/ctx.total_gmem, dev), file=sys.stderr) context = init_dev.devmap[dev] # This will map the context name to the real context object. reg_context(name, context) if config.print_active_device: try: pcibusid = context.pcibusid except pygpu.gpuarray.UnsupportedException: pcibusid = '(unsupported for device %s)' % dev except Exception: warnings.warn('Unable to get PCI Bus ID. Please consider updating libgpuarray and pygpu.') pcibusid = 'unknown' print("Mapped name %s to device %s: %s" % (name, dev, context.devname), file=sys.stderr) print("PCI Bus ID:", pcibusid, file=sys.stderr) pygpu_activated = True ctx_props = _get_props(name) ctx_props['dev'] = dev if dev.startswith('cuda'): if 'cudnn_version' not in ctx_props: try: ctx_props['cudnn_version'] = dnn.version() # 5200 should not print warning with cudnn 5.1 final. if ctx_props['cudnn_version'] >= 5200: warnings.warn("Your cuDNN version is more recent than " "Theano. If you encounter problems, try " "updating Theano or downgrading cuDNN to " "version 5.1.") if config.print_active_device: print("Using cuDNN version %d on context %s" % (ctx_props['cudnn_version'], name), file=sys.stderr) ctx_props['cudnn_handle'] = dnn._make_handle(context) except Exception: pass
def close(self): """Closes the NetCDF file.""" if not self.fp.closed: try: self.flush() finally: self.variables = {} if self._mm_buf is not None: ref = weakref.ref(self._mm_buf) self._mm_buf = None if ref() is None: # self._mm_buf is gc'd, and we can close the mmap self._mm.close() else: # we cannot close self._mm, since self._mm_buf is # alive and there may still be arrays referring to it warnings.warn(( "Cannot close a netcdf_file opened with mmap=True, when " "netcdf_variables or arrays referring to its data still exist. " "All data arrays obtained from such files refer directly to " "data on disk, and must be copied before the file can be cleanly " "closed. (See netcdf_file docstring for more information on mmap.)" ), category=RuntimeWarning) self._mm = None self.fp.close()
def _check_response(self, res, allowed=[200]): api_version = res.headers.get('x-binstar-api-version', '0.2.1') if pv(api_version) > pv(__version__): msg = ('The api server is running the binstar-api version %s. you are using %s\n' % (api_version, __version__) + 'Please update your client with pip install -U binstar or conda update binstar') warnings.warn(msg, stacklevel=4) if not res.status_code in allowed: short, long = STATUS_CODES.get(res.status_code, ('?', 'Undefined error')) msg = '%s: %s (status code: %s)' % (short, long, res.status_code) try: data = res.json() except: pass else: msg = data.get('error', msg) ErrCls = BinstarError if res.status_code == 401: ErrCls = Unauthorized elif res.status_code == 404: ErrCls = NotFound elif res.status_code == 409: ErrCls = Conflict raise ErrCls(msg, res.status_code)