示例#1
0
    def _check_response(self, res, allowed=[200]):
        api_version = res.headers.get('x-binstar-api-version', '0.2.1')
        if pv(api_version) > pv(__version__):
            msg = ('The api server is running the binstar-api version %s. you are using %s\n' % (api_version, __version__)
                   + 'Please update your client with pip install -U binstar or conda update binstar')
            warnings.warn(msg, stacklevel=4)



        if not res.status_code in allowed:
            short, long = STATUS_CODES.get(res.status_code, ('?', 'Undefined error'))
            msg = '%s: %s (status code: %s)' % (short, long, res.status_code)
            try:
                data = res.json()
            except:
                pass
            else:
                msg = data.get('error', msg)

            ErrCls = BinstarError
            if res.status_code == 401:
                ErrCls = Unauthorized
            elif res.status_code == 404:
                ErrCls = NotFound
            elif res.status_code == 409:
                ErrCls = Conflict
            raise ErrCls(msg, res.status_code)
示例#2
0
 def close(self):
     """Closes the NetCDF file."""
     if not self.fp.closed:
         try:
             self.flush()
         finally:
             self.variables = {}
             if self._mm_buf is not None:
                 ref = weakref.ref(self._mm_buf)
                 self._mm_buf = None
                 if ref() is None:
                     # self._mm_buf is gc'd, and we can close the mmap
                     self._mm.close()
                 else:
                     # we cannot close self._mm, since self._mm_buf is
                     # alive and there may still be arrays referring to it
                     warnings.warn((
                         "Cannot close a netcdf_file opened with mmap=True, when "
                         "netcdf_variables or arrays referring to its data still exist. "
                         "All data arrays obtained from such files refer directly to "
                         "data on disk, and must be copied before the file can be cleanly "
                         "closed. (See netcdf_file docstring for more information on mmap.)"
                     ), category=RuntimeWarning)
             self._mm = None
             self.fp.close()
示例#3
0
文件: socket.py 项目: strogo/pylibs
def set_reuse_addr(descriptor):
    import warnings
    warnings.warn("gevent.socket.set_reuse_addr is deprecated", DeprecationWarning, stacklevel=2)
    try:
        descriptor.setsockopt(SOL_SOCKET, SO_REUSEADDR, descriptor.getsockopt(SOL_SOCKET, SO_REUSEADDR) | 1)
    except error:
        pass
    def _is_a(self, x):
        """
        Check if a Sage object ``x`` belongs to ``self``.

        This methods is a helper for :meth:`__contains__` and the
        constructor :meth:`_element_constructor_`.

        EXAMPLES::

            sage: U4 = DisjointUnionEnumeratedSets(
            ....:          Family(NonNegativeIntegers(), Compositions))
            sage: U4._is_a(Composition([3,2,1,1]))
            doctest:...: UserWarning: Disjoint union of Lazy family (<class 'sage.combinat.composition.Compositions'>(i))_{i in Non negative integers} is an infinite union
            The default implementation of __contains__ can loop forever. Please overload it.
            True
        """
        if self._keepkey:
            return (isinstance(x, tuple) and
                    x[0] in self._family.keys() and
                    x[1] in self._family[x[0]])
        else:
            from warnings import warn
            if self._family.cardinality() == Infinity:
                warn("%s is an infinite union\nThe default implementation of __contains__ can loop forever. Please overload it."%(self))
            return any(x in a for a in self._family)
示例#5
0
文件: socket.py 项目: strogo/pylibs
def tcp_server(listensocket, server, *args, **kw):
    """
    Given a socket, accept connections forever, spawning greenlets
    and executing *server* for each new incoming connection.
    When *listensocket* is closed, the ``tcp_server()`` greenlet will end.

    listensocket
        The socket from which to accept connections.
    server
        The callable to call when a new connection is made.
    \*args
        The positional arguments to pass to *server*.
    \*\*kw
        The keyword arguments to pass to *server*.
    """
    import warnings
    warnings.warn("gevent.socket.tcp_server is deprecated", DeprecationWarning, stacklevel=2)
    try:
        try:
            while True:
                client_socket = listensocket.accept()
                spawn_raw(server, client_socket, *args, **kw)
        except error, e:
            # Broken pipe means it was shutdown
            if e[0] != 32:
                raise
    finally:
        listensocket.close()
示例#6
0
 def __init__(self, *args, **kargs):
     tkinter.Canvas.__init__(self, *args, **kargs)
     
     warnings.warn("RO.Wdg.PatchedCanvas is obsolete; please use Tkinter.Canvas instead.",
         category = DeprecationWarning,
         stacklevel = 2,
     )
示例#7
0
    def serialize(v):
        global _have_warned_about_timestamps
        try:
            converted = calendar.timegm(v.utctimetuple())
            converted = converted * 1e3 + getattr(v, 'microsecond', 0) / 1e3
        except AttributeError:
            # Ints and floats are valid timestamps too
            if type(v) not in _number_types:
                raise TypeError('DateType arguments must be a datetime or timestamp')

            if not _have_warned_about_timestamps:
                _have_warned_about_timestamps = True
                warnings.warn("timestamp columns in Cassandra hold a number of "
                    "milliseconds since the unix epoch.  Currently, when executing "
                    "prepared statements, this driver multiplies timestamp "
                    "values by 1000 so that the result of time.time() "
                    "can be used directly.  However, the driver cannot "
                    "match this behavior for non-prepared statements, "
                    "so the 2.0 version of the driver will no longer multiply "
                    "timestamps by 1000.  It is suggested that you simply use "
                    "datetime.datetime objects for 'timestamp' values to avoid "
                    "any ambiguity and to guarantee a smooth upgrade of the "
                    "driver.")
            converted = v * 1e3

        return int64_pack(long(converted))
示例#8
0
    def __init__(self, monitor='val_loss', patience=0, verbose=0, mode='auto'):
        super(Callback, self).__init__()

        self.monitor = monitor
        self.patience = patience
        self.verbose = verbose
        self.wait = 0

        if mode not in ['auto', 'min', 'max']:
            warnings.warn('EarlyStopping mode %s is unknown, '
                          'fallback to auto mode.' % (self.mode), RuntimeWarning)
            mode = 'auto'

        if mode == 'min':
            self.monitor_op = np.less
            self.best = np.Inf
        elif mode == 'max':
            self.monitor_op = np.greater
            self.best = -np.Inf
        else:
            if 'acc' in self.monitor:
                self.monitor_op = np.greater
                self.best = -np.Inf
            else:
                self.monitor_op = np.less
                self.best = np.Inf
示例#9
0
文件: zmq.py 项目: NSLS-II/bluesky
 def __init__(self, address, *, prefix=b'',
              RE=None, zmq=None, serializer=pickle.dumps):
     if RE is not None:
         warnings.warn("The RE argument to Publisher is deprecated and "
                       "will be removed in a future release of bluesky. "
                       "Update your code to subscribe this Publisher "
                       "instance to (and, if needed, unsubscribe from) to "
                       "the RunEngine manually.")
     if isinstance(prefix, str):
         raise ValueError("prefix must be bytes, not string")
     if b' ' in prefix:
         raise ValueError("prefix {!r} may not contain b' '".format(prefix))
     if zmq is None:
         import zmq
     if isinstance(address, str):
         address = address.split(':', maxsplit=1)
     self.address = (address[0], int(address[1]))
     self.RE = RE
     url = "tcp://%s:%d" % self.address
     self._prefix = bytes(prefix)
     self._context = zmq.Context()
     self._socket = self._context.socket(zmq.PUB)
     self._socket.connect(url)
     if RE:
         self._subscription_token = RE.subscribe(self)
     self._serializer = serializer
示例#10
0
    def _get_json(self, urlpath):
        """Retrieve a JSON from the HiSPARC API

        :param urlpath: api urlpath to retrieve (i.e. after API_BASE).
        :return: the data returned by the api as dictionary or integer.

        """
        if self.force_fresh and self.force_stale:
            raise Exception('Can not force fresh and stale simultaneously.')
        try:
            if self.force_stale:
                raise Exception
            json_data = self._retrieve_url(urlpath)
            data = json.loads(json_data)
        except Exception:
            if self.force_fresh:
                raise Exception('Couldn\'t get requested data from server.')
            localpath = path.join(LOCAL_BASE,
                                  urlpath.strip('/') + extsep + 'json')
            try:
                with open(localpath) as localdata:
                    data = json.load(localdata)
            except:
                if self.force_stale:
                    raise Exception('Couldn\'t find requested data locally.')
                raise Exception('Couldn\'t get requested data from server '
                                'nor find it locally.')
            if not self.force_stale:
                warnings.warn('Using local data. Possibly outdated.')

        return data
示例#11
0
    def __init__(self, filepath, monitor='val_loss', verbose=0,
                 save_best_only=False, mode='auto'):

        super(Callback, self).__init__()
        self.monitor = monitor
        self.verbose = verbose
        self.filepath = filepath
        self.save_best_only = save_best_only

        if mode not in ['auto', 'min', 'max']:
            warnings.warn('ModelCheckpoint mode %s is unknown, '
                          'fallback to auto mode.' % (mode),
                          RuntimeWarning)
            mode = 'auto'

        if mode == 'min':
            self.monitor_op = np.less
            self.best = np.Inf
        elif mode == 'max':
            self.monitor_op = np.greater
            self.best = -np.Inf
        else:
            if 'acc' in self.monitor:
                self.monitor_op = np.greater
                self.best = -np.Inf
            else:
                self.monitor_op = np.less
                self.best = np.Inf
示例#12
0
    def load(cls, dirname=''):
        normdir = os.path.normpath(dirname)
        code, data = _run_command(['svn', 'info', normdir])
        # Must check for some contents, as some use empty directories
        # in testcases
        svn_dir = os.path.join(normdir, '.svn')
        has_svn = (os.path.isfile(os.path.join(svn_dir, 'entries')) or
                   os.path.isfile(os.path.join(svn_dir, 'dir-props')) or
                   os.path.isfile(os.path.join(svn_dir, 'dir-prop-base')))

        svn_version = tuple(cls.get_svn_version().split('.'))

        try:
            base_svn_version = tuple(int(x) for x in svn_version[:2])
        except ValueError:
            base_svn_version = tuple()

        if has_svn and (code or not base_svn_version 
                             or base_svn_version < (1, 3)):
            warnings.warn(("No SVN 1.3+ command found: falling back "
                           "on pre 1.7 .svn parsing"), DeprecationWarning)
            return SvnFileInfo(dirname)
        elif not has_svn:
            return SvnInfo(dirname)
        elif base_svn_version < (1, 5):
            return Svn13Info(dirname)
        else:
            return Svn15Info(dirname)
示例#13
0
def TH2_to_FITS(hist, flipx=True):
    """Convert ROOT 2D histogram to FITS format.

    Parameters
    ----------
    hist : ROOT.TH2
        2-dim ROOT histogram

    Returns
    -------
    hdu : `~astropy.io.fits.ImageHDU`
        Histogram in FITS format.

    Examples
    --------
    >>> import ROOT
    >>> from gammapy.utils.root import TH2_to_FITS
    >>> root_hist = ROOT.TH2F()
    >>> fits_hdu = TH2_to_FITS(root_hist)
    >>> fits_hdu.writetofits('my_image.fits')
    """
    header = TH2_to_FITS_header(hist, flipx)
    if header['CDELT1'] > 0:
        warnings.warn('CDELT1 > 0 might not be handled properly.'
                      'A TH2 representing an astro image should have '
                      'a reversed x-axis, i.e. xlow > xhi')
    data = TH2_to_FITS_data(hist, flipx)
    hdu = fits.ImageHDU(data=data, header=header)
    return hdu
示例#14
0
def form_for_model(model, form=BaseForm, fields=None,
                   formfield_callback=lambda f: f.formfield()):
    """
    Returns a Form class for the given Django model class.

    Provide ``form`` if you want to use a custom BaseForm subclass.

    Provide ``formfield_callback`` if you want to define different logic for
    determining the formfield for a given database field. It's a callable that
    takes a database Field instance and returns a form Field instance.
    """
    warn("form_for_model is deprecated. Use ModelForm instead.",
        PendingDeprecationWarning, stacklevel=3)
    opts = model._meta
    field_list = []
    for f in opts.fields + opts.many_to_many:
        if not f.editable:
            continue
        if fields and not f.name in fields:
            continue
        formfield = formfield_callback(f)
        if formfield:
            field_list.append((f.name, formfield))
    base_fields = SortedDict(field_list)
    return type(opts.object_name + 'Form', (form,),
        {'base_fields': base_fields, '_model': model,
         'save': make_model_save(model, fields, 'created')})
def mean_scaling(Y, axis=0):
    """Scaling of the data to have percent of baseline change along the
    specified axis

    Parameters
    ----------
    Y : array of shape (n_time_points, n_voxels)
       The input data.

    Returns
    -------
    Y : array of shape (n_time_points, n_voxels),
       The data after mean-scaling, de-meaning and multiplication by 100.

    mean : array of shape (n_voxels,)
        The data mean.
    """
    mean = Y.mean(axis=axis)
    if (mean == 0).any():
        warn('Mean values of 0 observed.'
             'The data have probably been centered.'
             'Scaling might not work as expected')
    mean = np.maximum(mean, 1)
    Y = 100 * (Y / mean - 1)
    return Y, mean
示例#16
0
def init_dev(dev, name=None):
    global pygpu_activated
    if (pygpu.version.major, pygpu.version.minor) < (0, 6):
        raise ValueError("Your installed version of pygpu is too old, please upgrade to 0.6 or later")
    if dev not in init_dev.devmap:
        ctx = pygpu.init(dev,
                         disable_alloc_cache=config.gpuarray.preallocate < 0,
                         single_stream=config.gpuarray.single_stream,
                         sched=config.gpuarray.sched)
        init_dev.devmap[dev] = ctx
        if config.gpuarray.preallocate < 0:
            print("Disabling allocation cache on %s" % (dev,))
        elif config.gpuarray.preallocate > 0:
            MB = (1024 * 1024)
            if config.gpuarray.preallocate <= 1:
                gmem = min(config.gpuarray.preallocate, 0.95) * ctx.total_gmem
            else:
                gmem = config.gpuarray.preallocate * MB
            # This will allocate and immediatly free an object of size gmem
            # which will reserve that amount of memory on the GPU.
            pygpu.empty((gmem,), dtype='int8', context=ctx)
            if config.print_active_device:
                print("Preallocating %d/%d Mb (%f) on %s" %
                      (gmem//MB, ctx.total_gmem//MB, gmem/ctx.total_gmem, dev),
                      file=sys.stderr)
    context = init_dev.devmap[dev]
    # This will map the context name to the real context object.
    reg_context(name, context)
    if config.print_active_device:
        try:
            pcibusid = context.pcibusid
        except pygpu.gpuarray.UnsupportedException:
            pcibusid = '(unsupported for device %s)' % dev
        except Exception:
            warnings.warn('Unable to get PCI Bus ID. Please consider updating libgpuarray and pygpu.')
            pcibusid = 'unknown'

        print("Mapped name %s to device %s: %s" %
              (name, dev, context.devname),
              file=sys.stderr)
        print("PCI Bus ID:", pcibusid, file=sys.stderr)
    pygpu_activated = True
    ctx_props = _get_props(name)
    ctx_props['dev'] = dev
    if dev.startswith('cuda'):
        if 'cudnn_version' not in ctx_props:
            try:
                ctx_props['cudnn_version'] = dnn.version()
                # 5200 should not print warning with cudnn 5.1 final.
                if ctx_props['cudnn_version'] >= 5200:
                    warnings.warn("Your cuDNN version is more recent than "
                                  "Theano. If you encounter problems, try "
                                  "updating Theano or downgrading cuDNN to "
                                  "version 5.1.")
                if config.print_active_device:
                    print("Using cuDNN version %d on context %s" %
                          (ctx_props['cudnn_version'], name), file=sys.stderr)
                ctx_props['cudnn_handle'] = dnn._make_handle(context)
            except Exception:
                pass
示例#17
0
def form_for_instance(instance, form=BaseForm, fields=None,
                      formfield_callback=lambda f, **kwargs: f.formfield(**kwargs)):
    """
    Returns a Form class for the given Django model instance.

    Provide ``form`` if you want to use a custom BaseForm subclass.

    Provide ``formfield_callback`` if you want to define different logic for
    determining the formfield for a given database field. It's a callable that
    takes a database Field instance, plus **kwargs, and returns a form Field
    instance with the given kwargs (i.e. 'initial').
    """
    warn("form_for_instance is deprecated. Use ModelForm instead.",
        PendingDeprecationWarning, stacklevel=3)
    model = instance.__class__
    opts = model._meta
    field_list = []
    for f in opts.fields + opts.many_to_many:
        if not f.editable:
            continue
        if fields and not f.name in fields:
            continue
        current_value = f.value_from_object(instance)
        formfield = formfield_callback(f, initial=current_value)
        if formfield:
            field_list.append((f.name, formfield))
    base_fields = SortedDict(field_list)
    return type(opts.object_name + 'InstanceForm', (form,),
        {'base_fields': base_fields, '_model': model,
         'save': make_instance_save(instance, fields, 'changed')})
示例#18
0
    def wrap(f):
        if modules:
            # attach import to function
            setattr(f, 'imports', modules)
            for alternatives in modules:
                # alternatives are comma seperated
                alternatives = alternatives.split(',')
                # we import the part of the import X.Y.Z -> Z
                mod_name = alternatives[0].split('.')[-1]
                for mod in alternatives:
                    mod = mod.strip().split('.')

                    try:
                        if len(mod) == 1:
                            module = __import__(mod[0])
                        else:
                            module = getattr(__import__('.'.join(mod[:-1]), \
                                            fromlist=[mod[-1]]), mod[-1])
                        f.func_globals[mod_name] = module
                        break # import only one
                    except ImportError:
                        pass
                else:
                    if forgive: # no break -> no import
                        warnings.warn('Failed to import %s' % alternatives)
                    else:
                        raise ImportError('Failed to import %s' % alternatives)
        return f
示例#19
0
    def scaffold_list_columns(self):
        """
            Return a list of columns from the model.
        """
        columns = []

        for p in self._get_model_iterator():
            if hasattr(p, 'direction'):
                if self.column_display_all_relations or p.direction.name == 'MANYTOONE':
                    columns.append(p.key)
            elif hasattr(p, 'columns'):
                if len(p.columns) > 1:
                    filtered = tools.filter_foreign_columns(self.model.__table__, p.columns)

                    if len(filtered) > 1:
                        warnings.warn('Can not convert multiple-column properties (%s.%s)' % (self.model, p.key))
                        continue

                    column = filtered[0]
                else:
                    column = p.columns[0]

                if column.foreign_keys:
                    continue

                if not self.column_display_pk and column.primary_key:
                    continue

                columns.append(p.key)

        return columns
示例#20
0
文件: api.py 项目: RinsDev/vk
    def __call__(self, method_name, **method_kwargs):
        response = self.method_request(method_name, **method_kwargs)
        response.raise_for_status()

        # there are may be 2 dicts in 1 json
        # for example: {'error': ...}{'response': ...}
        errors = []
        error_codes = []
        for data in json_iter_parse(response.text):
            if 'error' in data:
                error_data = data['error']
                if error_data['error_code'] == CAPTCHA_IS_NEEDED:
                    return self.captcha_is_needed(error_data, method_name, **method_kwargs)

                error_codes.append(error_data['error_code'])
                errors.append(error_data)

            if 'response' in data:
                for error in errors:
                    warnings.warn(str(error))

                # return make_handy(data['response'])
                return data['response']
            
        if INTERNAL_SERVER_ERROR in error_codes:  # invalid access token
            self.get_access_token()
            return self(method_name, **method_kwargs)
        else:
            raise VkAPIMethodError(errors[0])
示例#21
0
文件: api.py 项目: RinsDev/vk
    def __init__(self, app_id=None, user_login=None, user_password=None, access_token=None, user_email=None,
                 scope='offline', timeout=1, api_version='5.20'):

        user_login = user_login or user_email

        if (not user_login or not user_password) and not access_token:
            raise ValueError('Arguments user_login and user_password, or access_token are required')

        if user_email:  # deprecated at April 11, 2014
            warnings.simplefilter('once')
            warnings.warn("Use 'user_login' instead of deprecated 'user_email'", DeprecationWarning, stacklevel=2)

        self.app_id = app_id

        self.user_login = user_login
        self.user_password = user_password

        self.access_token = access_token
        self.scope = scope or ''
        
        self.api_version = api_version

        self._default_timeout = timeout

        self.session = requests.Session()
        self.session.headers['Accept'] = 'application/json'
        self.session.headers['Content-Type'] = 'application/x-www-form-urlencoded'

        if not access_token and user_login and user_password:
            self.get_access_token()
示例#22
0
 def parent(self):
     import warnings
     warnings.warn(
         "[v0.4] URL.parent has been deprecated and replaced with parentdir (which does what parent used to do) and up (which does what you probably thought parent would do ;-))",
         DeprecationWarning,
         stacklevel=2)
     return self.parentdir()
示例#23
0
 def train(self, features, labels, normalisedlabels=False):
     idxs = self.selector(features, labels)
     if len(idxs) == 0:
         import warnings
         warnings.warn('milk.featureselection: No features selected! Using all features as fall-back.')
         idxs = np.arange(len(features[0]))
     return filterfeatures(idxs)
示例#24
0
def column_or_1d(y, warn=False):
    """ Ravel column or 1d numpy array, else raises an error

    Parameters
    ----------
    y : array-like

    warn : boolean, default False
       To control display of warnings.

    Returns
    -------
    y : array

    """
    shape = np.shape(y)
    if len(shape) == 1:
        return np.ravel(y)
    if len(shape) == 2 and shape[1] == 1:
        if warn:
            warnings.warn("A column-vector y was passed when a 1d array was"
                          " expected. Please change the shape of y to "
                          "(n_samples, ), for example using ravel().",
                          DataConversionWarning, stacklevel=2)
        return np.ravel(y)

    raise ValueError("bad input shape {0}".format(shape))
示例#25
0
    def __init__(self, unit="ns", tz=None):
        if isinstance(unit, DatetimeTZDtype):
            unit, tz = unit.unit, unit.tz

        if unit != 'ns':
            if isinstance(unit, str) and tz is None:
                # maybe a string like datetime64[ns, tz], which we support for
                # now.
                result = type(self).construct_from_string(unit)
                unit = result.unit
                tz = result.tz
                msg = (
                    "Passing a dtype alias like 'datetime64[ns, {tz}]' "
                    "to DatetimeTZDtype is deprecated. Use "
                    "'DatetimeTZDtype.construct_from_string()' instead."
                )
                warnings.warn(msg.format(tz=tz), FutureWarning, stacklevel=2)
            else:
                raise ValueError("DatetimeTZDtype only supports ns units")

        if tz:
            tz = timezones.maybe_get_tz(tz)
            tz = timezones.tz_standardize(tz)
        elif tz is not None:
            raise pytz.UnknownTimeZoneError(tz)
        elif tz is None:
            raise TypeError("A 'tz' is required.")

        self._unit = unit
        self._tz = tz
示例#26
0
    def load(self, path=None, force=True):
        """Load state from local file.
        If no path is specified, attempts to load from ``self.path``.

        :type path: str
        :arg path: local file to load from

        :type force: bool
        :param force:
            if ``force=False``, only load from ``self.path`` if file
            has changed since last load.

            .. deprecated:: 1.6
                This keyword will be removed in Passlib 1.8;
                Applications should use :meth:`load_if_changed` instead.
        """
        if path is not None:
            with open(path, "rb") as fh:
                self._mtime = 0
                self._load_lines(fh)
        elif not force:
            warn("%(name)s.load(force=False) is deprecated as of Passlib 1.6,"
                 "and will be removed in Passlib 1.8; "
                 "use %(name)s.load_if_changed() instead." %
                 dict(name=self.__class__.__name__),
                 DeprecationWarning, stacklevel=2)
            return self.load_if_changed()
        elif self._path:
            with open(self._path, "rb") as fh:
                self._mtime = os.path.getmtime(self._path)
                self._load_lines(fh)
        else:
            raise RuntimeError("%s().path is not set, an explicit path is required" %
                               self.__class__.__name__)
        return True
示例#27
0
def variable(value, dtype=_FLOATX, name=None):
    '''Instantiates a tensor.

    # Arguments
        value: numpy array, initial value of the tensor.
        dtype: tensor type.
        name: optional name string for the tensor.

    # Returns
        Tensor variable instance.
    '''
    v = tf.Variable(value, dtype=_convert_string_dtype(dtype), name=name)
    if _MANUAL_VAR_INIT:
        return v
    if tf.get_default_graph() is get_session().graph:
        try:
            get_session().run(v.initializer)
        except tf.errors.InvalidArgumentError:
            warnings.warn('Could not automatically initialize variable, '
                          'make sure you do it manually (e.g. via '
                          '`tf.initialize_all_variables()`).')
    else:
        warnings.warn('The default TensorFlow graph is not the graph '
                      'associated with the TensorFlow session currently '
                      'registered with Keras, and as such Keras '
                      'was not able to automatically initialize a variable. '
                      'You should consider registering the proper session '
                      'with Keras via `K.set_session(sess)`.')
    return v
 def Window_(self, **criteria):
     warnings.warn(
         "WindowSpecification.Window() WindowSpecification.Window_(), "
         "WindowSpecification.window() and WindowSpecification.window_() "
         "are deprecated, please switch to WindowSpecification.ChildWindow()",
         DeprecationWarning)
     return self.ChildWindow(**criteria)
示例#29
0
    def __init__(self, path=None, new=False, autoload=True, autosave=False,
                 encoding="utf-8", return_unicode=PY3,
                 ):
        # set encoding
        if not encoding:
            warn("``encoding=None`` is deprecated as of Passlib 1.6, "
                 "and will cause a ValueError in Passlib 1.8, "
                 "use ``return_unicode=False`` instead.",
                 DeprecationWarning, stacklevel=2)
            encoding = "utf-8"
            return_unicode = False
        elif not is_ascii_codec(encoding):
            # htpasswd/htdigest files assumes 1-byte chars, and use ":" separator,
            # so only ascii-compatible encodings are allowed.
            raise ValueError("encoding must be 7-bit ascii compatible")
        self.encoding = encoding

        # set other attrs
        self.return_unicode = return_unicode
        self.autosave = autosave
        self._path = path
        self._mtime = 0

        # init db
        if not autoload:
            warn("``autoload=False`` is deprecated as of Passlib 1.6, "
                 "and will be removed in Passlib 1.8, use ``new=True`` instead",
                 DeprecationWarning, stacklevel=2)
            new = True
        if path and not new:
            self.load()
        else:
            self._records = OrderedDict()
示例#30
0
 def here(self):
     import warnings
     warnings.warn(
         "URL.here() is deprecated, please use URL.curdir() instead!",
         DeprecationWarning,
         stacklevel=2)
     return self.curdir()
示例#31
0
    def __init__(
        self,
        *,
        host: str = "aiplatform.googleapis.com",
        credentials: ga_credentials.Credentials = None,
        credentials_file: str = None,
        scopes: Sequence[str] = None,
        channel: grpc.Channel = None,
        api_mtls_endpoint: str = None,
        client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
        ssl_channel_credentials: grpc.ChannelCredentials = None,
        client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
        quota_project_id: Optional[str] = None,
        client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
        always_use_jwt_access: Optional[bool] = False,
    ) -> None:
        """Instantiate the transport.

        Args:
            host (Optional[str]):
                 The hostname to connect to.
            credentials (Optional[google.auth.credentials.Credentials]): The
                authorization credentials to attach to requests. These
                credentials identify the application to the service; if none
                are specified, the client will attempt to ascertain the
                credentials from the environment.
                This argument is ignored if ``channel`` is provided.
            credentials_file (Optional[str]): A file with credentials that can
                be loaded with :func:`google.auth.load_credentials_from_file`.
                This argument is ignored if ``channel`` is provided.
            scopes (Optional(Sequence[str])): A list of scopes. This argument is
                ignored if ``channel`` is provided.
            channel (Optional[grpc.Channel]): A ``Channel`` instance through
                which to make calls.
            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
                If provided, it overrides the ``host`` argument and tries to create
                a mutual TLS channel with client SSL credentials from
                ``client_cert_source`` or applicatin default SSL credentials.
            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
                Deprecated. A callback to provide client SSL certificate bytes and
                private key bytes, both in PEM format. It is ignored if
                ``api_mtls_endpoint`` is None.
            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
                for grpc channel. It is ignored if ``channel`` is provided.
            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
                A callback to provide client certificate bytes and private key bytes,
                both in PEM format. It is used to configure mutual TLS channel. It is
                ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
            quota_project_id (Optional[str]): An optional project to use for billing
                and quota.
            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
                The client info used to send a user-agent string along with
                API requests. If ``None``, then default info will be used.
                Generally, you only need to set this if you're developing
                your own client library.
            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
                be used for service account credentials.

        Raises:
          google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
              creation failed for any reason.
          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
              and ``credentials_file`` are passed.
        """
        self._grpc_channel = None
        self._ssl_channel_credentials = ssl_channel_credentials
        self._stubs: Dict[str, Callable] = {}

        if api_mtls_endpoint:
            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
        if client_cert_source:
            warnings.warn("client_cert_source is deprecated", DeprecationWarning)

        if channel:
            # Ignore credentials if a channel was passed.
            credentials = False
            # If a channel was explicitly provided, set it.
            self._grpc_channel = channel
            self._ssl_channel_credentials = None

        else:
            if api_mtls_endpoint:
                host = api_mtls_endpoint

                # Create SSL credentials with client_cert_source or application
                # default SSL credentials.
                if client_cert_source:
                    cert, key = client_cert_source()
                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
                        certificate_chain=cert, private_key=key
                    )
                else:
                    self._ssl_channel_credentials = SslCredentials().ssl_credentials

            else:
                if client_cert_source_for_mtls and not ssl_channel_credentials:
                    cert, key = client_cert_source_for_mtls()
                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
                        certificate_chain=cert, private_key=key
                    )

        # The base transport sets the host, credentials and scopes
        super().__init__(
            host=host,
            credentials=credentials,
            credentials_file=credentials_file,
            scopes=scopes,
            quota_project_id=quota_project_id,
            client_info=client_info,
            always_use_jwt_access=always_use_jwt_access,
        )

        if not self._grpc_channel:
            self._grpc_channel = type(self).create_channel(
                self._host,
                credentials=self._credentials,
                credentials_file=credentials_file,
                scopes=self._scopes,
                ssl_credentials=self._ssl_channel_credentials,
                quota_project_id=quota_project_id,
                options=[
                    ("grpc.max_send_message_length", -1),
                    ("grpc.max_receive_message_length", -1),
                ],
            )

        # Wrap messages. This must be done after self._grpc_channel exists
        self._prep_wrapped_messages(client_info)
示例#32
0
 def __init__(cls, name, bases, d):
     warnings.warn(FSADeprecationWarning(
         '"_BoundDeclarativeMeta" has been renamed to "DefaultMeta". The'
         ' old name will be removed in 3.0.'),
                   stacklevel=3)
     super(_BoundDeclarativeMeta, cls).__init__(name, bases, d)
示例#33
0
#!/Users/yashodperera/Documents/Projects/centralSystem/venv/bin/python3
# When the django-admin.py deprecation ends, remove this script.
import warnings

from django.core import management

try:
    from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
    raise ImportError(
        'django-admin.py was deprecated in Django 3.1 and removed in Django '
        '4.0. Please manually remove this script from your virtual environment '
        'and use django-admin instead.'
    )

if __name__ == "__main__":
    warnings.warn(
        'django-admin.py is deprecated in favor of django-admin.',
        RemovedInDjango40Warning,
    )
    management.execute_from_command_line()
示例#34
0
blog_installed = "mezzanine.blog" in settings.INSTALLED_APPS
if blog_installed:
    BLOG_SLUG = settings.BLOG_SLUG
    if BLOG_SLUG:
        BLOG_SLUG += "/"
    blog_patterns = patterns(
        "",
        ("^%s" % BLOG_SLUG, include("mezzanine.blog.urls")),
    )
    urlpatterns += blog_patterns

# Mezzanine's Accounts app
_old_accounts_enabled = getattr(settings, "ACCOUNTS_ENABLED", False)
if _old_accounts_enabled:
    import warnings
    warnings.warn("The setting ACCOUNTS_ENABLED is deprecated. Please "
                  "add mezzanine.accounts to INSTALLED_APPS.")
if _old_accounts_enabled or "mezzanine.accounts" in settings.INSTALLED_APPS:
    # We don't define a URL prefix here such as /account/ since we want
    # to honour the LOGIN_* settings, which Django has prefixed with
    # /account/ by default. So those settings are used in accounts.urls
    urlpatterns += patterns(
        "",
        ("^", include("mezzanine.accounts.urls")),
    )

# Mezzanine's Pages app.
PAGES_SLUG = ""
if "mezzanine.pages" in settings.INSTALLED_APPS:
    # No BLOG_SLUG means catch-all patterns belong to the blog,
    # so give pages their own prefix and inject them before the
    # blog urlpatterns.
示例#35
0
"""Main Scikit Flow module."""
#  Copyright 2015-present Scikit Flow Authors. All Rights Reserved.
#
#  Licensed under the Apache License, Version 2.0 (the "License");
#  you may not use this file except in compliance with the License.
#  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
#  Unless required by applicable law or agreed to in writing, software
#  distributed under the License is distributed on an "AS IS" BASIS,
#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#  See the License for the specific language governing permissions and
#  limitations under the License.

import warnings

try:
    from tensorflow.contrib.learn import *
    warnings.warn("skflow as separate library is deprecated. "
                  "Use import tensorflow.contrib.learn as skflow instead.", DeprecationWarning)
except ImportError:
    raise ImportError("Update your Tensorflow to 0.8+ to use latest skflow.")

示例#36
0
from __future__ import (absolute_import, division, print_function,
                        unicode_literals)

import six

import logging
import os
import sys
import warnings

if six.PY3:
    warnings.warn(
        "The gtk* backends have not been tested with Python 3.x",
        ImportWarning)

try:
    import gobject
    import gtk; gdk = gtk.gdk
    import pango
except ImportError:
    raise ImportError("Gtk* backend requires pygtk to be installed.")

pygtk_version_required = (2,4,0)
if gtk.pygtk_version < pygtk_version_required:
    raise ImportError ("PyGTK %d.%d.%d is installed\n"
                      "PyGTK %d.%d.%d or later is required"
                      % (gtk.pygtk_version + pygtk_version_required))
del pygtk_version_required

_new_tooltip_api =  (gtk.pygtk_version[1] >= 12)
示例#37
0
    def __init__(
        self,
        surface,
        chains,
        fractions=None,
        backfill=None,
        pattern=None,
        tile_x=1,
        tile_y=1,
        **kwargs
    ):
        from mbuild.lib.recipes import TiledCompound

        super(Monolayer, self).__init__()

        # Replicate the surface.
        tiled_compound = TiledCompound(surface, n_tiles=(tile_x, tile_y, 1))
        self.add(tiled_compound, label="tiled_surface")

        if pattern is None:  # Fill the surface.
            pattern = mb.Random2DPattern(len(tiled_compound.referenced_ports()))

        if isinstance(chains, mb.Compound):
            chains = [chains]

        if fractions:
            fractions = list(fractions)
            if len(chains) != len(fractions):
                raise ValueError(
                    "Number of fractions does not match the number"
                    " of chain types provided"
                )

            n_chains = len(pattern.points)

            # Attach chains of each type to binding sites based on
            # respective fractions.
            for chain, fraction in zip(chains[:-1], fractions[:-1]):

                # Create sub-pattern for this chain type
                subpattern = deepcopy(pattern)
                n_points = int(round(fraction * n_chains))
                warn("\n Adding {} of chain {}".format(n_points, chain))
                pick = np.random.choice(
                    subpattern.points.shape[0], n_points, replace=False
                )
                points = subpattern.points[pick]
                subpattern.points = points

                # Remove now-occupied points from overall pattern
                pattern.points = np.array(
                    [
                        point
                        for point in pattern.points.tolist()
                        if point not in subpattern.points.tolist()
                    ]
                )

                # Attach chains to the surface
                attached_chains, _ = subpattern.apply_to_compound(
                    guest=chain,
                    host=self["tiled_surface"],
                    backfill=None,
                    **kwargs
                )
                self.add(attached_chains)

        else:
            warn("\n No fractions provided. Assuming a single chain type.")

        # Attach final chain type. Remaining sites get a backfill.
        warn("\n Adding {} of chain {}".format(len(pattern), chains[-1]))
        attached_chains, backfills = pattern.apply_to_compound(
            guest=chains[-1],
            host=self["tiled_surface"],
            backfill=backfill,
            **kwargs
        )
        self.add(attached_chains)
        self.add(backfills)
示例#38
0
 def predict_proba(self, X, batch_size=128, verbose=1):
     preds = self.predict(X, batch_size, verbose)
     if preds.min()<0 or preds.max()>1:
         warnings.warn("Network returning invalid probability values.")
     return preds
示例#39
0
def wrn(warn_msg: str):
    warnings.warn(tag(warn_msg))
示例#40
0
    def __init__(__self__,
                 resource_name,
                 opts=None,
                 collation=None,
                 create_mode=None,
                 edition=None,
                 elastic_pool_name=None,
                 import_=None,
                 location=None,
                 max_size_bytes=None,
                 name=None,
                 requested_service_objective_id=None,
                 requested_service_objective_name=None,
                 resource_group_name=None,
                 restore_point_in_time=None,
                 server_name=None,
                 source_database_deletion_date=None,
                 source_database_id=None,
                 tags=None,
                 threat_detection_policy=None,
                 __name__=None,
                 __opts__=None):
        """
        Allows you to manage an Azure SQL Database
        
        :param str resource_name: The name of the resource.
        :param pulumi.ResourceOptions opts: Options for the resource.
        :param pulumi.Input[str] collation: The name of the collation. Applies only if `create_mode` is `Default`.  Azure default is `SQL_LATIN1_GENERAL_CP1_CI_AS`. Changing this forces a new resource to be created.
        :param pulumi.Input[str] create_mode: Specifies the type of database to create. Defaults to `Default`. See below for the accepted values/
        :param pulumi.Input[str] edition: The edition of the database to be created. Applies only if `create_mode` is `Default`. Valid values are: `Basic`, `Standard`, `Premium`, or `DataWarehouse`. Please see [Azure SQL Database Service Tiers](https://azure.microsoft.com/en-gb/documentation/articles/sql-database-service-tiers/).
        :param pulumi.Input[str] elastic_pool_name: The name of the elastic database pool.
        :param pulumi.Input[dict] import_: A Database Import block as documented below. `create_mode` must be set to `Default`.
        :param pulumi.Input[str] location: Specifies the supported Azure location where the resource exists. Changing this forces a new resource to be created.
        :param pulumi.Input[str] max_size_bytes: The maximum size that the database can grow to. Applies only if `create_mode` is `Default`.  Please see [Azure SQL Database Service Tiers](https://azure.microsoft.com/en-gb/documentation/articles/sql-database-service-tiers/).
        :param pulumi.Input[str] name: The name of the database.
        :param pulumi.Input[str] requested_service_objective_id: Use `requested_service_objective_id` or `requested_service_objective_name` to set the performance level for the database.
               Please see [Azure SQL Database Service Tiers](https://azure.microsoft.com/en-gb/documentation/articles/sql-database-service-tiers/).
        :param pulumi.Input[str] requested_service_objective_name: Use `requested_service_objective_name` or `requested_service_objective_id` to set the performance level for the database. Valid values are: `S0`, `S1`, `S2`, `S3`, `P1`, `P2`, `P4`, `P6`, `P11` and `ElasticPool`.  Please see [Azure SQL Database Service Tiers](https://azure.microsoft.com/en-gb/documentation/articles/sql-database-service-tiers/).
        :param pulumi.Input[str] resource_group_name: The name of the resource group in which to create the database.  This must be the same as Database Server resource group currently.
        :param pulumi.Input[str] restore_point_in_time: The point in time for the restore. Only applies if `create_mode` is `PointInTimeRestore` e.g. 2013-11-08T22:00:40Z
        :param pulumi.Input[str] server_name: The name of the SQL Server on which to create the database.
        :param pulumi.Input[str] source_database_deletion_date: The deletion date time of the source database. Only applies to deleted databases where `create_mode` is `PointInTimeRestore`.
        :param pulumi.Input[str] source_database_id: The URI of the source database if `create_mode` value is not `Default`.
        :param pulumi.Input[dict] tags: A mapping of tags to assign to the resource.
        :param pulumi.Input[dict] threat_detection_policy: Threat detection policy configuration. The `threat_detection_policy` block supports fields documented below.
        """
        if __name__ is not None:
            warnings.warn("explicit use of __name__ is deprecated",
                          DeprecationWarning)
            resource_name = __name__
        if __opts__ is not None:
            warnings.warn(
                "explicit use of __opts__ is deprecated, use 'opts' instead",
                DeprecationWarning)
            opts = __opts__
        if not resource_name:
            raise TypeError(
                'Missing resource name argument (for URN creation)')
        if not isinstance(resource_name, str):
            raise TypeError('Expected resource name to be a string')
        if opts and not isinstance(opts, pulumi.ResourceOptions):
            raise TypeError(
                'Expected resource options to be a ResourceOptions instance')

        __props__ = dict()

        __props__['collation'] = collation

        __props__['create_mode'] = create_mode

        __props__['edition'] = edition

        __props__['elastic_pool_name'] = elastic_pool_name

        __props__['import_'] = import_

        if location is None:
            raise TypeError('Missing required property location')
        __props__['location'] = location

        __props__['max_size_bytes'] = max_size_bytes

        __props__['name'] = name

        __props__[
            'requested_service_objective_id'] = requested_service_objective_id

        __props__[
            'requested_service_objective_name'] = requested_service_objective_name

        if resource_group_name is None:
            raise TypeError('Missing required property resource_group_name')
        __props__['resource_group_name'] = resource_group_name

        __props__['restore_point_in_time'] = restore_point_in_time

        if server_name is None:
            raise TypeError('Missing required property server_name')
        __props__['server_name'] = server_name

        __props__[
            'source_database_deletion_date'] = source_database_deletion_date

        __props__['source_database_id'] = source_database_id

        __props__['tags'] = tags

        __props__['threat_detection_policy'] = threat_detection_policy

        __props__['creation_date'] = None
        __props__['default_secondary_location'] = None
        __props__['encryption'] = None

        super(Database, __self__).__init__('azure:sql/database:Database',
                                           resource_name, __props__, opts)
示例#41
0
 def add_event(self, event: Event):
     if self.valid_date(event):
         self.events.append(event)
     else:
         warnings.warn('Event not at the correct date, ignoring')
示例#42
0
def get_opposite_faces(eptm):
    warnings.warn("Deprecated, use `eptm.get_opposite_faces()` instead")
    eptm.get_opposite_faces()
示例#43
0
 def add_birthday(self, birthday: Birthday):
     if self.valid_date(birthday):
         self.birthdays.append(birthday)
     else:
         warnings.warn('Birthday not at the correct date, ignoring')
示例#44
0
    def _initialize_pool(self):
        """Build a process or thread pool and return the number of workers"""
        n_jobs = self._effective_n_jobs()
        # The list of exceptions that we will capture
        self.exceptions = [TransportableException]

        if n_jobs == 1:
            # Sequential mode: do not use a pool instance to avoid any
            # useless dispatching overhead
            self._pool = None
        elif self.backend == 'threading':
            self._pool = ThreadPool(n_jobs)
        elif self.backend == 'multiprocessing':
            if mp.current_process().daemon:
                # Daemonic processes cannot have children
                self._pool = None
                warnings.warn(
                    'Multiprocessing-backed parallel loops cannot be nested,'
                    ' setting n_jobs=1',
                    stacklevel=3)
                return 1
            elif threading.current_thread().name != 'MainThread':
                # Prevent posix fork inside in non-main posix threads
                self._pool = None
                warnings.warn(
                    'Multiprocessing backed parallel loops cannot be nested'
                    ' below threads, setting n_jobs=1',
                    stacklevel=3)
                return 1
            else:
                already_forked = int(os.environ.get(JOBLIB_SPAWNED_PROCESS, 0))
                if already_forked:
                    raise ImportError('[joblib] Attempting to do parallel computing '
                            'without protecting your import on a system that does '
                            'not support forking. To use parallel-computing in a '
                            'script, you must protect your main loop using "if '
                            "__name__ == '__main__'"
                            '". Please see the joblib documentation on Parallel '
                            'for more information'
                        )
                # Set an environment variable to avoid infinite loops
                os.environ[JOBLIB_SPAWNED_PROCESS] = '1'

                # Make sure to free as much memory as possible before forking
                gc.collect()
                poolargs = dict(
                    max_nbytes=self._max_nbytes,
                    mmap_mode=self._mmap_mode,
                    temp_folder=self._temp_folder,
                    verbose=max(0, self.verbose - 50),
                    context_id=0,  # the pool is used only for one call
                )
                if self._mp_context is not None:
                    # Use Python 3.4+ multiprocessing context isolation
                    poolargs['context'] = self._mp_context
                self._pool = MemmapingPool(n_jobs, **poolargs)

                # We are using multiprocessing, we also want to capture
                # KeyboardInterrupts
                self.exceptions.extend([KeyboardInterrupt, WorkerInterrupt])
        else:
            raise ValueError("Unsupported backend: %s" % self.backend)
        return n_jobs
示例#45
0
 def holiday(self, value: Event):
     if (value is not None) and not self.valid_date(value):
         warnings.warn('Holiday not at the correct date, ignoring')
     self.__dict__['holiday'] = value
示例#46
0
 def add_nameday(self, nameday: Nameday):
     if self.valid_date(nameday):
         self.namedays.append(nameday)
     else:
         warnings.warn('Nameday not at the correct date, ignoring')
print(f"Reading the aggregated regulons...", flush=True)
start = time.time()
with gzip.open(args.aggregated_regulons_fname.name, 'rb') as file_handler:
    regulons = pickle.load(file_handler)
print(f"... took {time.time() - start} seconds to run.", flush=True)

print(f"Reading AUCell matrix...", flush=True)
start = time.time()
# Read the regulons AUCell matrix
auc_mtx = pd.read_csv(args.auc_mtx_fname.name, sep='\t', header=0, index_col=0)
auc_mtx.columns.name = "Regulon"
print(f"... took {time.time() - start} seconds to run.", flush=True)

# Check whether the cell index between AUC matrix and expression matrix are the same
if len(auc_mtx.index.difference(ex_matrix_df.index)) > 0:
    warnings.warn("Difference detected in the cell indexes between the AUCell matrix and the expression matrix. Subsetting cells from AUC matrix...", Warning)
    auc_mtx = auc_mtx[auc_mtx.index.isin(ex_matrix_df.index)]

# Filter regulons based on the ones used in AUCell matrix
# In case of a lot of regulons (>800), this is required to avoid header limitation set by h5py
regulons = list(filter(lambda x: x.name in auc_mtx.columns, regulons))

# Create loom
print(f"Exporting to loom...", flush=True)
start = time.time()
# Create the basic loom
scope_loom = export_to_loom.SCopeLoom(
    ex_mtx=ex_matrix_df,
    regulons=regulons,
    title=args.title,
    nomenclature=args.nomenclature,
示例#48
0
 def moon(self, value: Event):
     if (value is not None) and (not self.valid_date(value)):
         warnings.warn('Moon not at the correct date, ignoring')
     self.__dict__['moon'] = value
示例#49
0
文件: smtp.py 项目: imuledx/aiosmtpd
 async def smtp_DATA(self, arg: str) -> None:
     if await self.check_helo_needed():
         return
     if await self.check_auth_needed("DATA"):
         return
     if not self.envelope.rcpt_tos:
         await self.push('503 Error: need RCPT command')
         return
     if arg:
         await self.push('501 Syntax: DATA')
         return
     await self.push('354 End data with <CR><LF>.<CR><LF>')
     data = []
     num_bytes = 0
     size_exceeded = False
     while self.transport is not None:  # pragma: nobranch
         try:
             line = await self._reader.readline()
             log.debug('DATA readline: %s', line)
         except asyncio.CancelledError:
             # The connection got reset during the DATA command.
             log.info('Connection lost during DATA')
             self._writer.close()
             raise
         if line == b'.\r\n':
             break
         num_bytes += len(line)
         if (not size_exceeded and self.data_size_limit
                 and num_bytes > self.data_size_limit):
             size_exceeded = True
             await self.push('552 Error: Too much mail data')
         if not size_exceeded:
             data.append(line)
     if size_exceeded:
         self._set_post_data_state()
         return
     # Remove extraneous carriage returns and de-transparency
     # according to RFC 5321, Section 4.5.2.
     for i, text in enumerate(data):
         if text.startswith(b'.'):
             data[i] = text[1:]
     content = original_content = EMPTYBYTES.join(data)
     if self._decode_data:
         if self.enable_SMTPUTF8:
             content = original_content.decode('utf-8',
                                               errors='surrogateescape')
         else:
             try:
                 content = original_content.decode('ascii', errors='strict')
             except UnicodeDecodeError:
                 # This happens if enable_smtputf8 is false, meaning that
                 # the server explicitly does not want to accept non-ascii,
                 # but the client ignores that and sends non-ascii anyway.
                 await self.push('500 Error: strict ASCII mode')
                 return
     self.envelope.content = content
     self.envelope.original_content = original_content
     # Call the new API first if it's implemented.
     if "DATA" in self._handle_hooks:
         status = await self._call_handler_hook('DATA')
     else:
         # Backward compatibility.
         status = MISSING
         if hasattr(self.event_handler, 'process_message'):
             warn('Use handler.handle_DATA() instead of .process_message()',
                  DeprecationWarning)
             args = (self.session.peer, self.envelope.mail_from,
                     self.envelope.rcpt_tos, self.envelope.content)
             if asyncio.iscoroutinefunction(
                     self.event_handler.process_message):
                 status = await self.event_handler.process_message(*args)
             else:
                 status = self.event_handler.process_message(*args)
             # The deprecated API can return None which means, return the
             # default status.  Don't worry about coverage for this case as
             # it's a deprecated API that will go away after 1.0.
             if status is None:  # pragma: nocover
                 status = MISSING
     self._set_post_data_state()
     await self.push('250 OK' if status is MISSING else status)
示例#50
0
def load_inputs_from_input_arg_string(inputs_str, input_exprs_str,
                                      input_examples_str):
    """Parses input arg strings and create inputs feed_dict.

  Parses '--inputs' string for inputs to be loaded from file, and parses
  '--input_exprs' string for inputs to be evaluated from python expression.
  '--input_examples' string for inputs to be created from tf.example feature
  dictionary list.

  Args:
    inputs_str: A string that specified where to load inputs. Each input is
        separated by semicolon.
        * For each input key:
            '<input_key>=<filename>' or
            '<input_key>=<filename>[<variable_name>]'
        * The optional 'variable_name' key will be set to None if not specified.
        * File specified by 'filename' will be loaded using numpy.load. Inputs
            can be loaded from only .npy, .npz or pickle files.
        * The "[variable_name]" key is optional depending on the input file type
            as descripted in more details below.
        When loading from a npy file, which always contains a numpy ndarray, the
        content will be directly assigned to the specified input tensor. If a
        variable_name is specified, it will be ignored and a warning will be
        issued.
        When loading from a npz zip file, user can specify which variable within
        the zip file to load for the input tensor inside the square brackets. If
        nothing is specified, this function will check that only one file is
        included in the zip and load it for the specified input tensor.
        When loading from a pickle file, if no variable_name is specified in the
        square brackets, whatever that is inside the pickle file will be passed
        to the specified input tensor, else SavedModel CLI will assume a
        dictionary is stored in the pickle file and the value corresponding to
        the variable_name will be used.
    input_exprs_str: A string that specifies python expressions for inputs.
        * In the format of: '<input_key>=<python expression>'.
        * numpy module is available as np.
    input_examples_str: A string that specifies tf.Example with dictionary.
        * In the format of: '<input_key>=<[{feature:value list}]>'

  Returns:
    A dictionary that maps input tensor keys to numpy ndarrays.

  Raises:
    RuntimeError: An error when a key is specified, but the input file contains
        multiple numpy ndarrays, none of which matches the given key.
    RuntimeError: An error when no key is specified, but the input file contains
        more than one numpy ndarrays.
  """
    tensor_key_feed_dict = {}

    inputs = preprocess_inputs_arg_string(inputs_str)
    input_exprs = preprocess_input_exprs_arg_string(input_exprs_str)
    input_examples = preprocess_input_examples_arg_string(input_examples_str)

    for input_tensor_key, (filename, variable_name) in inputs.items():
        data = np.load(file_io.FileIO(filename, mode='rb'))

        # When a variable_name key is specified for the input file
        if variable_name:
            # if file contains a single ndarray, ignore the input name
            if isinstance(data, np.ndarray):
                warnings.warn(
                    'Input file %s contains a single ndarray. Name key \"%s\" ignored.'
                    % (filename, variable_name))
                tensor_key_feed_dict[input_tensor_key] = data
            else:
                if variable_name in data:
                    tensor_key_feed_dict[input_tensor_key] = data[
                        variable_name]
                else:
                    raise RuntimeError(
                        'Input file %s does not contain variable with name \"%s\".'
                        % (filename, variable_name))
        # When no key is specified for the input file.
        else:
            # Check if npz file only contains a single numpy ndarray.
            if isinstance(data, np.lib.npyio.NpzFile):
                variable_name_list = data.files
                if len(variable_name_list) != 1:
                    raise RuntimeError(
                        'Input file %s contains more than one ndarrays. Please specify '
                        'the name of ndarray to use.' % filename)
                tensor_key_feed_dict[input_tensor_key] = data[
                    variable_name_list[0]]
            else:
                tensor_key_feed_dict[input_tensor_key] = data

    # When input is a python expression:
    for input_tensor_key, py_expr_evaluated in input_exprs.items():
        if input_tensor_key in tensor_key_feed_dict:
            warnings.warn(
                'input_key %s has been specified with both --inputs and --input_exprs'
                ' options. Value in --input_exprs will be used.' %
                input_tensor_key)
        tensor_key_feed_dict[input_tensor_key] = py_expr_evaluated

    # When input is a tf.Example:
    for input_tensor_key, example in input_examples.items():
        if input_tensor_key in tensor_key_feed_dict:
            warnings.warn(
                'input_key %s has been specified in multiple options. Value in '
                '--input_examples will be used.' % input_tensor_key)
        tensor_key_feed_dict[input_tensor_key] = example
    return tensor_key_feed_dict
示例#51
0
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']

# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
git_cmd = [
    "git", "log", "--pretty=format:'%ad, commit %h'", "--date=local", "-n1"
]
try:
    html_last_updated_fmt = subprocess.Popen(
        git_cmd, stdout=subprocess.PIPE).communicate()[0]
except Exception:
    warnings.warn('Cannot get last updated time from git repository. '
                  'Not setting "html_last_updated_fmt".')

# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True

# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}

# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}

# If false, no module index is generated.
# html_domain_indices = True
示例#52
0
文件: smtp.py 项目: imuledx/aiosmtpd
 def __init__(self,
              handler,
              *,
              data_size_limit=DATA_SIZE_DEFAULT,
              enable_SMTPUTF8=False,
              decode_data=False,
              hostname=None,
              ident=None,
              tls_context=None,
              require_starttls=False,
              timeout=300,
              auth_required=False,
              auth_require_tls=True,
              auth_exclude_mechanism: Optional[Iterable[str]] = None,
              auth_callback: Callable[[str, bytes, bytes], bool] = None,
              loop=None):
     self.__ident__ = ident or __ident__
     self.loop = loop if loop else make_loop()
     super().__init__(asyncio.StreamReader(loop=self.loop),
                      client_connected_cb=self._client_connected_cb,
                      loop=self.loop)
     self.event_handler = handler
     self.data_size_limit = data_size_limit
     self.enable_SMTPUTF8 = enable_SMTPUTF8
     self._decode_data = decode_data
     self.command_size_limits.clear()
     if hostname:
         self.hostname = hostname
     else:
         self.hostname = socket.getfqdn()
     self.tls_context = tls_context
     if tls_context:
         # Through rfc3207 part 4.1 certificate checking is part of SMTP
         # protocol, not SSL layer.
         self.tls_context.check_hostname = False
         self.tls_context.verify_mode = ssl.CERT_NONE
     self.require_starttls = tls_context and require_starttls
     self._timeout_duration = timeout
     self._timeout_handle = None
     self._tls_handshake_okay = True
     self._tls_protocol = None
     self._original_transport = None
     self.session = None
     self.envelope = None
     self.transport = None
     self._handler_coroutine = None
     if not auth_require_tls and auth_required:
         warn("Requiring AUTH while not requiring TLS "
              "can lead to security vulnerabilities!")
     self._auth_require_tls = auth_require_tls
     self._auth_callback = auth_callback or login_always_fail
     self._auth_required = auth_required
     self.authenticated = False
     # Get hooks & methods to significantly speedup getattr's
     self._auth_methods: Dict[str, _AuthMechAttr] = {
         m.replace("auth_", ""): _AuthMechAttr(getattr(h, m), h is self)
         for h in (self, handler) for m in dir(h) if m.startswith("auth_")
     }
     for m in (auth_exclude_mechanism or []):
         self._auth_methods.pop(m, None)
     msg = "Available AUTH mechanisms:"
     for m, impl in sorted(
             self._auth_methods.items()):  # type: str, _AuthMechAttr
         msg += f" {m}{'(builtin)' if impl.is_builtin else ''}"
     log.info(msg)
     self._handle_hooks: Dict[str, Callable] = {
         m.replace("handle_", ""): getattr(handler, m)
         for m in dir(handler) if m.startswith("handle_")
     }
     self._smtp_methods: Dict[str, Any] = {
         m.replace("smtp_", ""): getattr(self, m)
         for m in dir(self) if m.startswith("smtp_")
     }
示例#53
0
 def _safe(self, fn):
     try:
         fn()
     except Exception as e:
         warnings.warn("ReconnectFixture couldn't "
                       "close connection: %s" % e)
示例#54
0
文件: range.py 项目: fomar5989/pandas
    def __new__(cls,
                start=None,
                stop=None,
                step=None,
                dtype=None,
                copy=False,
                name=None,
                fastpath=None):

        if fastpath is not None:
            warnings.warn(
                "The 'fastpath' keyword is deprecated, and will be "
                "removed in a future version.",
                FutureWarning,
                stacklevel=2)
            if fastpath:
                return cls._simple_new(start, stop, step, name=name)

        cls._validate_dtype(dtype)

        # RangeIndex
        if isinstance(start, RangeIndex):
            if name is None:
                name = start.name
            return cls._simple_new(name=name,
                                   **dict(start._get_data_as_items()))

        # validate the arguments
        def ensure_int(value, field):
            msg = ("RangeIndex(...) must be called with integers,"
                   " {value} was passed for {field}")
            if not is_scalar(value):
                raise TypeError(
                    msg.format(value=type(value).__name__, field=field))
            try:
                new_value = int(value)
                assert (new_value == value)
            except (TypeError, ValueError, AssertionError):
                raise TypeError(
                    msg.format(value=type(value).__name__, field=field))

            return new_value

        if com._all_none(start, stop, step):
            msg = "RangeIndex(...) must be called with integers"
            raise TypeError(msg)
        elif start is None:
            start = 0
        else:
            start = ensure_int(start, 'start')
        if stop is None:
            stop = start
            start = 0
        else:
            stop = ensure_int(stop, 'stop')
        if step is None:
            step = 1
        elif step == 0:
            raise ValueError("Step must not be zero")
        else:
            step = ensure_int(step, 'step')

        return cls._simple_new(start, stop, step, name)
示例#55
0
	def check_pcon(self,static,dynamic):
		"""Checks operator string lists for particle number (magnetisation) conservartion of the combined operator.

		Parameters
		-----------
		static: list
			Static operators formatted to be passed into the static argument of the `hamiltonian` class.
		dynamic: list
			Dynamic operators formatted to be passed into the dynamic argument of the `hamiltonian` class.

		Examples
		---------

		"""
		if self._check_pcon is None:
			warnings.warn("Test for particle conservation not implemented for {0}, to turn off this warning set check_pcon=False in hamiltonian".format(type(self)),UserWarning,stacklevel=3)
			return

		if self._check_pcon:
			static_list,dynamic_list = self._get_local_lists(static,dynamic)
			static_list_exp = self._expand_local_list(static_list)
			dynamic_list_exp = self._expand_local_list(dynamic_list)
			static_list_exp,dynamic_list_exp = self._consolidate_local_lists(static_list_exp,dynamic_list_exp)
			con = ""

			odd_ops = []
			for opstr,indx,J,ii in static_list_exp:	
				p = opstr.count("+")
				m = opstr.count("-")

				if (p-m) != 0:
					for i in ii:
						if static_list[i] not in odd_ops:
							odd_ops.append(static_list[i])


	
			if odd_ops:
				unique_opstrs = list(set( next(iter(zip(*tuple(odd_ops))))) )
				unique_odd_ops = []
				[ unique_odd_ops.append(ele) for ele in odd_ops if ele not in unique_odd_ops]
				warnings.warn("The following static operator strings do not conserve particle number{1}: {0}".format(unique_opstrs,con),UserWarning,stacklevel=4)
				try:
					user_input = raw_input("Display all {0} couplings? (y or n) ".format(len(odd_ops)) )
				except NameError:
					user_input = input("Display all {0} couplings? (y or n) ".format(len(odd_ops)) )

				if user_input == 'y':
					print(" these operators do not conserve particle number{0}:".format(con))
					print("   (opstr, indices, coupling)")
					for i,op in enumerate(unique_odd_ops):
						print("{0}. {1}".format(i+1, op))
				raise TypeError("Hamiltonian does not conserve particle number{0} To turn off check, use check_pcon=False in hamiltonian.".format(con))

			


			odd_ops = []
			for opstr,indx,J,f,f_args,ii in dynamic_list_exp:	
				p = opstr.count("+")
				m = opstr.count("-")
				if (p-m) != 0:
					for i in ii:
						if dynamic_list[i] not in odd_ops:
							odd_ops.append(dynamic_list[i])

	
			if odd_ops:
				unique_opstrs = list(set( next(iter(zip(*tuple(odd_ops))))))
				unique_odd_ops = []
				[ unique_odd_ops.append(ele) for ele in odd_ops if ele not in unique_odd_ops]
				warnings.warn("The following static operator strings do not conserve particle number{1}: {0}".format(unique_opstrs,con),UserWarning,stacklevel=4)
				try:
					user_input = raw_input("Display all {0} couplings? (y or n) ".format(len(odd_ops)) )
				except NameError:
					user_input = input("Display all {0} couplings? (y or n) ".format(len(odd_ops)) )
				if user_input == 'y':
					print(" these operators do not conserve particle number{0}:".format(con))
					print("   (opstr, indices, coupling)")
					for i,op in enumerate(unique_odd_ops):
						print("{0}. {1}".format(i+1, op))
				raise TypeError("Hamiltonian does not conserve particle number{0} To turn off check, use check_pcon=False in hamiltonian.".format(con))

			print("Particle conservation check passed!")
示例#56
0
 def _safe(self, fn):
     try:
         fn()
     except Exception as e:
         warnings.warn("testing_reaper couldn't "
                       "rollback/close connection: %s" % e)
import warnings

warnings.warn(
    "my_django.conf.urls.defaults is deprecated; use my_django.conf.urls instead",
    PendingDeprecationWarning)

from my_django.conf.urls import (handler403, handler404, handler500, include,
                                 patterns, url)
示例#58
0
def get_parent(globals, level):
    """
    parent, name = get_parent(globals, level)

    Return the package that an import is being performed in.  If globals comes
    from the module foo.bar.bat (not itself a package), this returns the
    sys.modules entry for foo.bar.  If globals is from a package's __init__.py,
    the package's entry in sys.modules is returned.

    If globals doesn't come from a package or a module in a package, or a
    corresponding entry is not found in sys.modules, None is returned.
    """
    orig_level = level

    if not level or not isinstance(globals, dict):
        return None, ''

    pkgname = globals.get('__package__', None)

    if pkgname is not None:
        # __package__ is set, so use it
        if not hasattr(pkgname, 'rindex'):
            raise ValueError('__package__ set to non-string')
        if len(pkgname) == 0:
            if level > 0:
                raise ValueError('Attempted relative import in non-package')
            return None, ''
        name = pkgname
    else:
        # __package__ not set, so figure it out and set it
        if '__name__' not in globals:
            return None, ''
        modname = globals['__name__']

        if '__path__' in globals:
            # __path__ is set, so modname is already the package name
            globals['__package__'] = name = modname
        else:
            # Normal module, so work out the package name if any
            lastdot = modname.rfind('.')
            if lastdot < 0 < level:
                raise ValueError("Attempted relative import in non-package")
            if lastdot < 0:
                globals['__package__'] = None
                return None, ''
            globals['__package__'] = name = modname[:lastdot]

    dot = len(name)
    for x in range(level, 1, -1):
        try:
            dot = name.rindex('.', 0, dot)
        except ValueError:
            raise ValueError("attempted relative import beyond top-level "
                             "package")
    name = name[:dot]

    try:
        parent = sys.modules[name]
    except:
        if orig_level < 1:
            warn("Parent module '%.200s' not found while handling absolute "
                 "import" % name)
            parent = None
        else:
            raise SystemError("Parent module '%.200s' not loaded, cannot "
                              "perform relative import" % name)

    # We expect, but can't guarantee, if parent != None, that:
    # - parent.__name__ == name
    # - parent.__dict__ is globals
    # If this is violated...  Who cares?
    return parent, name
示例#59
0
from object import Segmento as sg
from util import pickle_util as pk
from util import accuracy as ac
from util import layout as lay
from util import disegna as dsg
from object import Superficie as fc
import parameters as par
import pickle
import os
import glob
import shutil
import time
import cv2
import warnings

warnings.warn("Settare i parametri del lateralLine e cvThresh")


def start_main(parametri_obj, path_obj):
    #----------------------------1.0_LAYOUT DELLE STANZE----------------------------------
    #------inizio layout
    #leggo l'immagine originale in scala di grigio e la sistemo con il thresholding
    img_rgb = cv2.imread(path_obj.metricMap)
    img_ini = img_rgb.copy()  #copio l'immagine
    # 127 per alcuni dati, 255 per altri
    ret, thresh1 = cv2.threshold(img_rgb, parametri_obj.cv2thresh, 255,
                                 cv2.THRESH_BINARY)  #prova

    #------------------1.1_CANNY E HOUGH PER TROVARE MURI---------------------------------
    walls, canny = lay.start_canny_ed_hough(thresh1, parametri_obj)
    #walls , canny = lay.start_canny_ed_hough(img_rgb,parametri_obj)
示例#60
0
	def check_symm(self,static,dynamic):
		"""Checks operator string lists for the required symmetries of the combined operator.

		Parameters
		-----------
		static: list
			Static operators formatted to be passed into the static argument of the `hamiltonian` class.
		dynamic: list
			Dynamic operators formatted to be passed into the dynamic argument of the `hamiltonian` class.

		Examples
		---------

		"""
		if self._check_symm is None:
			warnings.warn("Test for symmetries not implemented for {0}, to turn off this warning set check_symm=False in hamiltonian".format(type(self)),UserWarning,stacklevel=3)
			return

		static_blocks,dynamic_blocks = self._check_symm(static,dynamic)

		# define arbitrarily complicated weird-ass number
		t = _np.cos( (_np.pi/_np.exp(0))**( 1.0/_np.euler_gamma ) )

		for symm in static_blocks.keys():
			if len(static_blocks[symm]) == 2:
				odd_ops,missing_ops = static_blocks[symm]
				ops = list(missing_ops)
				ops.extend(odd_ops)
				unique_opstrs = list(set( next(iter(zip(*tuple(ops))))) )
				if unique_opstrs:
					unique_missing_ops = []
					unique_odd_ops = []
					[ unique_missing_ops.append(ele) for ele in missing_ops if ele not in unique_missing_ops]
					[ unique_odd_ops.append(ele) for ele in odd_ops if ele not in unique_odd_ops]
					warnings.warn("The following static operator strings do not obey {0}: {1}".format(symm,unique_opstrs),UserWarning,stacklevel=4)
					try:
						user_input = raw_input("Display all {0} couplings? (y or n) ".format(len(unique_missing_ops) + len(unique_odd_ops)) )
					except NameError:
						user_input = input("Display all {0} couplings? (y or n) ".format(len(unique_missing_ops) + len(unique_odd_ops)) )
					if user_input == 'y':
						print(" these operators are needed for {}:".format(symm))
						print("   (opstr, indices, coupling)")
						for i,op in enumerate(unique_missing_ops):
							print("{0}. {1}".format(i+1, op))
						print(" ")
						print(" these do not obey the {}:".format(symm))
						print("   (opstr, indices, coupling)")
						for i,op in enumerate(unique_odd_ops):
							print("{0}. {1}".format(i+1, op))
					raise TypeError("Hamiltonian does not obey {0}! To turn off check, use check_symm=False in hamiltonian.".format(symm))


			elif len(static_blocks[symm]) == 1:
				missing_ops, = static_blocks[symm]
				unique_opstrs = list(set( next(iter(zip(*tuple(missing_ops))))) )
				if unique_opstrs:
					unique_missing_ops = []
					[ unique_missing_ops.append(ele) for ele in missing_ops if ele not in unique_missing_ops]
					warnings.warn("The following static operator strings do not obey {0}: {1}".format(symm,unique_opstrs),UserWarning,stacklevel=4)
					try:
						user_input = raw_input("Display all {0} couplings? (y or n) ".format(len(unique_missing_ops)) )
					except NameError:
						user_input = input("Display all {0} couplings? (y or n) ".format(len(unique_missing_ops)) )

					if user_input == 'y':
						print(" these operators are needed for {}:".format(symm))
						print("   (opstr, indices, coupling)")
						for i,op in enumerate(unique_missing_ops):
							print("{0}. {1}".format(i+1, op))
					raise TypeError("Hamiltonian does not obey {0}! To turn off check, use check_symm=False in hamiltonian.".format(symm))
			else:
				continue


		for symm in dynamic_blocks.keys():
			if len(dynamic_blocks[symm]) == 2:
				odd_ops,missing_ops = dynamic_blocks[symm]
				ops = list(missing_ops)
				ops.extend(odd_ops)
				unique_opstrs = list(set( next(iter(zip(*tuple(ops))))) )
				if unique_opstrs:
					unique_missing_ops = []
					unique_odd_ops = []
					[ unique_missing_ops.append(ele) for ele in missing_ops if ele not in unique_missing_ops]
					[ unique_odd_ops.append(ele) for ele in odd_ops if ele not in unique_odd_ops]
					warnings.warn("The following dynamic operator strings do not obey {0}: {1}".format(symm,unique_opstrs),UserWarning,stacklevel=4)
					try:
						user_input = raw_input("Display all {0} couplings? (y or n) ".format(len(unique_missing_ops) + len(unique_odd_ops)) )
					except NameError:
						user_input = input("Display all {0} couplings? (y or n) ".format(len(unique_missing_ops) + len(unique_odd_ops)) )

					if user_input == 'y':
						print(" these operators are missing for {}:".format(symm))
						print("   (opstr, indices, coupling)")
						for i,op in enumerate(unique_missing_ops):
							print("{0}. {1}".format(i+1, op))
						print(" ")
						print(" these do not obey {}:".format(symm))
						print("   (opstr, indices, coupling)")
						for i,op in enumerate(unique_odd_ops):
							print("{0}. {1}".format(i+1, op))
					raise TypeError("Hamiltonian does not obey {0}! To turn off check, use check_symm=False in hamiltonian.".format(symm))


			elif len(dynamic_blocks[symm]) == 1:
				missing_ops, = dynamic_blocks[symm]
				unique_opstrs = list(set( next(iter(zip(*tuple(missing_ops))))) )
				if unique_opstrs:
					unique_missing_ops = []
					[ unique_missing_ops.append(ele) for ele in missing_ops if ele not in unique_missing_ops]
					warnings.warn("The following dynamic operator strings do not obey {0}: {1}".format(symm,unique_opstrs),UserWarning,stacklevel=4)
					try:
						user_input = raw_input("Display all {0} couplings? (y or n) ".format(len(unique_missing_ops)) )
					except NameError:
						user_input = input("Display all {0} couplings? (y or n) ".format(len(unique_missing_ops)) )

					if user_input == 'y':
						print(" these operators are needed for {}:".format(symm))
						print("   (opstr, indices, coupling)")
						for i,op in enumerate(unique_missing_ops):
							print("{0}. {1}".format(i+1, op))
					raise TypeError("Hamiltonian does not obey {0}! To turn off check, use check_symm=False in hamiltonian.".format(symm))
			else:
				continue

		print("Symmetry checks passed!")