Beispiel #1
0
    def excepthook(type, value, tb):
        import traceback

        traceback.print_exception(type, value, tb)

        # Cause an exception if PyGTK can't open a display. Normally this just
        # produces a warning, but the lack of a display eventually causes a
        # segmentation fault. See http://live.gnome.org/PyGTK/WhatsNew210.
        import warnings
        warnings.filterwarnings("error", module="gtk")
        import gtk
        warnings.resetwarnings()

        gtk.gdk.threads_enter()

        from zenmapGUI.higwidgets.higdialogs import HIGAlertDialog
        from zenmapGUI.CrashReport import CrashReport
        if type == ImportError:
            d = HIGAlertDialog(type=gtk.MESSAGE_ERROR,
                message_format=_("Import error"),
                secondary_text=_("""A required module was not found.

""" + unicode(value)))
            d.run()
            d.destroy()
        else:
            c = CrashReport(type, value, tb)
            c.show_all()
            gtk.main()

        gtk.gdk.threads_leave()

        gtk.main_quit()
Beispiel #2
0
    def alert(self, matches):
        body = u'⚠ *%s* ⚠ ```\n' % (self.create_title(matches))
        for match in matches:
            body += unicode(BasicMatchString(self.rule, match))
            # Separate text of aggregated alerts with dashes
            if len(matches) > 1:
                body += '\n----------------------------------------\n'
        body += u' ```'

        headers = {'content-type': 'application/json'}
        # set https proxy, if it was provided
        proxies = {'https': self.telegram_proxy} if self.telegram_proxy else None
        payload = {
            'chat_id': self.telegram_room_id,
            'text': body,
            'parse_mode': 'markdown',
            'disable_web_page_preview': True
        }

        try:
            response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies)
            warnings.resetwarnings()
            response.raise_for_status()
        except RequestException as e:
            raise EAException("Error posting to Telegram: %s" % e)

        elastalert_logger.info(
            "Alert sent to Telegram room %s" % self.telegram_room_id)
 def test_spectrogram(self):
     """
     Create spectrogram plotting examples in tests/output directory.
     """
     # Create dynamic test_files to avoid dependencies of other modules.
     # set specific seed value such that random numbers are reproducible
     np.random.seed(815)
     head = {
         'network': 'BW', 'station': 'BGLD',
         'starttime': UTCDateTime(2007, 12, 31, 23, 59, 59, 915000),
         'sampling_rate': 200.0, 'channel': 'EHE'}
     tr = Trace(data=np.random.randint(0, 1000, 824), header=head)
     st = Stream([tr])
     # 1 - using log=True
     reltol = 1
     if MATPLOTLIB_VERSION < [1, 2, 0]:
         reltol = 2000
     with ImageComparison(self.path, 'spectrogram_log.png',
                          reltol=reltol) as ic:
         with warnings.catch_warnings(record=True):
             warnings.resetwarnings()
             np_err = np.seterr(all="warn")
             spectrogram.spectrogram(st[0].data, log=True, outfile=ic.name,
                                     samp_rate=st[0].stats.sampling_rate,
                                     show=False)
             np.seterr(**np_err)
     # 2 - using log=False
     reltol = 1
     if MATPLOTLIB_VERSION < [1, 3, 0]:
         reltol = 3
     with ImageComparison(self.path, 'spectrogram.png',
                          reltol=reltol) as ic:
         spectrogram.spectrogram(st[0].data, log=False, outfile=ic.name,
                                 samp_rate=st[0].stats.sampling_rate,
                                 show=False)
Beispiel #4
0
def validate(file_or_object):
    """
    Validate a given SEIS-PROV file.

    :param file_or_object: The filename or file-like object to validate.
    """
    errors = []
    warns = []

    with warnings.catch_warnings(record=True) as w:
        warnings.resetwarnings()
        warnings.simplefilter("always")

        try:
            _validate(file_or_object)
        except SeisProvValidationException as e:
            errors.append(e.message)

    for warn in w:
        warn = warn.message
        if not isinstance(warn, SeisProvValidationWarning):
            continue
        warns.append(warn.args[0])

    return SeisProvValidationResult(errors=errors,
                                    warnings=warns)
Beispiel #5
0
    def alert(self, matches):
        body = self.create_alert_body(matches)

        # HipChat sends 400 bad request on messages longer than 10000 characters
        if (len(body) > 9999):
            body = body[:9980] + '..(truncated)'

        # Use appropriate line ending for text/html
        if self.hipchat_message_format == 'html':
            body = body.replace('\n', '<br />')

        # Post to HipChat
        headers = {'content-type': 'application/json'}
        # set https proxy, if it was provided
        proxies = {'https': self.hipchat_proxy} if self.hipchat_proxy else None
        payload = {
            'color': self.hipchat_msg_color,
            'message': body,
            'message_format': self.hipchat_message_format,
            'notify': self.hipchat_notify,
            'from': self.hipchat_from
        }

        try:
            if self.hipchat_ignore_ssl_errors:
                requests.packages.urllib3.disable_warnings()
            response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers,
                                     verify=not self.hipchat_ignore_ssl_errors,
                                     proxies=proxies)
            warnings.resetwarnings()
            response.raise_for_status()
        except RequestException as e:
            raise EAException("Error posting to HipChat: %s" % e)
        elastalert_logger.info("Alert sent to HipChat room %s" % self.hipchat_room_id)
Beispiel #6
0
    def test_invalid_keys(self):
        """
        All the builtin backends (except memcached, see below) should warn on
        keys that would be refused by memcached. This encourages portable
        caching code without making it too difficult to use production backends
        with more liberal key rules. Refs #6447.

        """
        # On Python 2.6+ we could use the catch_warnings context
        # manager to test this warning nicely. Since we can't do that
        # yet, the cleanest option is to temporarily ask for
        # CacheKeyWarning to be raised as an exception.
        warnings.simplefilter("error", CacheKeyWarning)

        # memcached does not allow whitespace or control characters in keys
        self.assertRaises(CacheKeyWarning, self.cache.set, 'key with spaces', 'value')
        # memcached limits key length to 250
        self.assertRaises(CacheKeyWarning, self.cache.set, 'a' * 251, 'value')

        # The warnings module has no public API for getting the
        # current list of warning filters, so we can't save that off
        # and reset to the previous value, we have to globally reset
        # it. The effect will be the same, as long as the Django test
        # runner doesn't add any global warning filters (it currently
        # does not).
        warnings.resetwarnings()
        warnings.simplefilter("ignore", PendingDeprecationWarning)
 def setUp(self):
     warnings.resetwarnings()
     self.tmpfp = NamedTemporaryFile(prefix='mmap')
     self.shape = (3,4)
     self.dtype = 'float32'
     self.data = arange(12, dtype=self.dtype)
     self.data.resize(self.shape)
Beispiel #8
0
def _assert_warns_context_manager(warning_class=None, warnings_test=None):
    """
    Builds a context manager for testing code that should throw a warning.
    This will look for a given class, call a custom test, or both.

    Args:
        warning_class - a class or subclass of Warning. If not None, then
            the context manager will raise an AssertionError if the block
            does not throw at least one warning of that type.
        warnings_test - a function which takes a list of warnings caught,
            and makes a number of assertions about the result. If the function
            returns without an exception, the context manager will consider
            this a successful assertion.
    """
    with warnings.catch_warnings(record=True) as caught:
        # All warnings should be triggered.
        warnings.resetwarnings()
        if warning_class:
            warnings.simplefilter('ignore')
            warnings.simplefilter('always', category=warning_class)
        else:
            warnings.simplefilter('always')
        # Do something that ought to trigger a warning.
        yield
        # We should have received at least one warning.
        assert_gt(len(caught), 0, 'expected at least one warning to be thrown')
        # Run the custom test against the warnings we caught.
        if warnings_test:
            warnings_test(caught)
Beispiel #9
0
def open_data(gdfilename, tracknames, verbose=False):
    warnings.simplefilter("ignore")
    with Genome(gdfilename, "r+") as genome:
        for trackname in tracknames:
            genome.add_track_continuous(trackname)

    warnings.resetwarnings()
Beispiel #10
0
def test_read_epochs_bad_events():
    """Test epochs when events are at the beginning or the end of the file
    """
    # Event at the beginning
    epochs = Epochs(
        raw, np.array([[raw.first_samp, 0, event_id]]), event_id, tmin, tmax, picks=picks, baseline=(None, 0)
    )
    with warnings.catch_warnings(record=True):
        evoked = epochs.average()

    epochs = Epochs(
        raw, np.array([[raw.first_samp, 0, event_id]]), event_id, tmin, tmax, picks=picks, baseline=(None, 0)
    )
    epochs.drop_bad_epochs()
    with warnings.catch_warnings(record=True):
        evoked = epochs.average()

    # Event at the end
    epochs = Epochs(
        raw, np.array([[raw.last_samp, 0, event_id]]), event_id, tmin, tmax, picks=picks, baseline=(None, 0)
    )

    with warnings.catch_warnings(record=True):
        evoked = epochs.average()
        assert evoked
    warnings.resetwarnings()
Beispiel #11
0
def test_misspecifications():
    # Tests for model specification and misspecification exceptions
    endog = np.arange(20).reshape(10,2)

    # Bad trend specification
    assert_raises(ValueError, varmax.VARMAX, endog, order=(1,0), trend='')

    # Bad error_cov_type specification
    assert_raises(ValueError, varmax.VARMAX, endog, order=(1,0), error_cov_type='')

    # Bad order specification
    assert_raises(ValueError, varmax.VARMAX, endog, order=(0,0))

    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        varmax.VARMAX(endog, order=(1,1))

    # Warning with VARMA specification
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')

        varmax.VARMAX(endog, order=(1,1))

        message = ('Estimation of VARMA(p,q) models is not generically robust,'
                   ' due especially to identification issues.')
        assert_equal(str(w[0].message), message)
    warnings.resetwarnings()
 def expect_deprecations(self):
     """Call this if the test expects to call deprecated function."""
     warnings.resetwarnings()
     warnings.filterwarnings('ignore', category=DeprecationWarning,
                             module='^keystoneclient\\.')
     warnings.filterwarnings('ignore', category=DeprecationWarning,
                             module='^debtcollector\\.')
 def test_called_twice_from_class(self):
     import warnings
     from guillotina.component._declaration import adapts
     from zope.interface import Interface
     from zope.interface._compat import PYTHON3
     class IFoo(Interface):
         pass
     class IBar(Interface):
         pass
     globs = {'adapts': adapts, 'IFoo': IFoo, 'IBar': IBar}
     locs = {}
     CODE = "\n".join([
         'class Foo(object):',
         '    adapts(IFoo)',
         '    adapts(IBar)',
         ])
     with warnings.catch_warnings(record=True) as log:
         warnings.resetwarnings()
         try:
             exec(CODE, globs, locs)
         except TypeError:
             if not PYTHON3:
                 self.assertEqual(len(log), 0) # no longer warn
         else:
             self.fail("Didn't raise TypeError")
Beispiel #14
0
    def Configuration(self):
        if self.config:
            return self.config.getElementsByTagName('configuration')[0]

        warnings.filterwarnings("ignore")
        cib_file=os.tmpnam()
        warnings.resetwarnings()
        
        os.system("rm -f "+cib_file)

        if self.Env["ClobberCIB"] == 1:
            if self.Env["CIBfilename"] == None:
                self.debug("Creating new CIB in: " + cib_file)
                os.system("echo \'"+ self.default_cts_cib +"\' > "+ cib_file)
            else:
                os.system("cp "+self.Env["CIBfilename"]+" "+cib_file)
        else:            
            if 0 != self.rsh.echo_cp(
                self.Env["nodes"][0], "/var/lib/heartbeat/crm/cib.xml", None, cib_file):
                raise ValueError("Can not copy file to %s, maybe permission denied"%cib_file)

        self.config = parse(cib_file)
        os.remove(cib_file)

        return self.config.getElementsByTagName('configuration')[0]
def main():
    infilename = ''
    outfilename = ''

    #Get the command-line arguments
    args = sys.argv[1:]
    
    if len(args) < 4:
        usage()
        sys.exit(1)
        
    for i in range(0,len(args)):
        if args[i] == '-i':
            infilename = args[i+1]
        elif args[i] == '-o':
            outfilename = args[i+1]
    if os.path.isfile(infilename): 
        try:
            # Perform the translation using the methods from the OpenIOC to CybOX Script
            openioc_indicators = openioc.parse(infilename)
            observables_obj = openioc_to_cybox.generate_cybox(openioc_indicators, infilename, True)
            observables_cls = Observables.from_obj(observables_obj)

            # Set the namespace to be used in the STIX Package
            stix.utils.set_id_namespace({"https://github.com/STIXProject/openioc-to-stix":"openiocToSTIX"})

            # Wrap the created Observables in a STIX Package/Indicator
            stix_package = STIXPackage()
            # Add the OpenIOC namespace
            input_namespaces = {"http://openioc.org/":"openioc"}
            stix_package.__input_namespaces__ = input_namespaces

            for observable in observables_cls.observables:
                indicator_dict = {}
                producer_dict = {}
                producer_dict['tools'] = [{'name':'OpenIOC to STIX Utility', 'version':str(__VERSION__)}]
                indicator_dict['producer'] = producer_dict
                indicator_dict['title'] = "CybOX-represented Indicator Created from OpenIOC File"
                indicator = Indicator.from_dict(indicator_dict)
                indicator.add_observable(observables_cls.observables[0])
                stix_package.add_indicator(indicator)

            # Create and write the STIX Header
            stix_header = STIXHeader()
            stix_header.package_intent = "Indicators - Malware Artifacts"
            stix_header.description = "CybOX-represented Indicators Translated from OpenIOC File"
            stix_package.stix_header = stix_header

            # Write the generated STIX Package as XML to the output file
            outfile = open(outfilename, 'w')
            # Ignore any warnings - temporary fix for no schemaLocation w/ namespace
            with warnings.catch_warnings():
                warnings.simplefilter("ignore")
                outfile.write(stix_package.to_xml())
                warnings.resetwarnings()
            outfile.flush()
            outfile.close()
        except Exception, err:
            print('\nError: %s\n' % str(err))
            traceback.print_exc()
Beispiel #16
0
    def solve(self, objective, constraints, cached_data,
              warm_start, verbose, solver_opts):
        """Returns the result of the call to the solver.

        Parameters
        ----------
        objective : CVXPY objective object
            Raw objective passed by CVXPY. Can be convex/concave.
        constraints : list
            The list of raw constraints.

        Returns
        -------
        tuple
            (status, optimal value, primal, equality dual, inequality dual)
        """

        sym_data = self.get_sym_data(objective, constraints)

        id_map = sym_data.var_offsets
        N = sym_data.x_length

        extractor = QuadCoeffExtractor(id_map, N)

        # Extract the coefficients
        (Ps, Q, R) = extractor.get_coeffs(objective.args[0])

        P = Ps[0]
        q = np.asarray(Q.todense()).flatten()
        r = R[0]

        # Forming the KKT system
        if len(constraints) > 0:
            Cs = [extractor.get_coeffs(c._expr)[1:] for c in constraints]
            As = sp.vstack([C[0] for C in Cs])
            bs = np.array([C[1] for C in Cs]).flatten()
            lhs = sp.bmat([[2*P, As.transpose()], [As, None]], format='csr')
            rhs = np.concatenate([-q, -bs])
        else:  # avoiding calling vstack with empty list
            lhs = 2*P
            rhs = -q

        warnings.filterwarnings('error')

        # Actually solving the KKT system
        try:
            sol = SLA.spsolve(lhs.tocsr(), rhs)
            x = np.array(sol[:N])
            nu = np.array(sol[N:])
            p_star = np.dot(x.transpose(), P*x + q) + r
        except SLA.MatrixRankWarning:
            x = None
            nu = None
            p_star = None

        warnings.resetwarnings()

        result_dict = {s.PRIMAL: x, s.EQ_DUAL: nu, s.VALUE: p_star}

        return self.format_results(result_dict, None, cached_data)
Beispiel #17
0
    def install_config(self, node):
        if not self.ns.WaitForNodeToComeUp(node):
            self.log("Node %s is not up." % node)
            return None

        if not self.CIBsync.has_key(node) and self.Env["ClobberCIB"] == 1:
            self.CIBsync[node] = 1
            self.rsh.remote_py(node, "os", "system", "rm -f /var/lib/heartbeat/crm/cib.xml")
            self.rsh.remote_py(node, "os", "system", "rm -f /var/lib/heartbeat/crm/cib.xml.sig")
            self.rsh.remote_py(node, "os", "system", "rm -f /var/lib/heartbeat/crm/cib.xml.last")
            self.rsh.remote_py(node, "os", "system", "rm -f /var/lib/heartbeat/crm/cib.xml.sig.last")

            # Only install the CIB on the first node, all the other ones will pick it up from there
            if self.cib_installed == 1:
                return None

            self.cib_installed = 1
            if self.Env["CIBfilename"] == None:
                self.debug("Installing Generated CIB on node %s" %(node))
                warnings.filterwarnings("ignore")
                cib_file=os.tmpnam()
                warnings.resetwarnings()
                os.system("rm -f "+cib_file)
                self.debug("Creating new CIB for " + node + " in: " + cib_file)
                os.system("echo \'" + self.default_cts_cib + "\' > " + cib_file)
                if 0!=self.rsh.echo_cp(None, cib_file, node, "/var/lib/heartbeat/crm/cib.xml"):
                    raise ValueError("Can not create CIB on %s "%node)

                os.system("rm -f "+cib_file)
            else:
                self.debug("Installing CIB (%s) on node %s" %(self.Env["CIBfilename"], node))
                if 0!=self.rsh.cp(self.Env["CIBfilename"], "root@" + (self["CIBfile"]%node)):
                    raise ValueError("Can not scp file to %s "%node)
        
            self.rsh.remote_py(node, "os", "system", "chown hacluster /var/lib/heartbeat/crm/cib.xml")
Beispiel #18
0
        def __init__(self, log_names, filename=None, timescale='epoch',
                     interval=1, name=None, verbose=None):
            super(BokehVisualizer, self).__init__(name, timescale, interval,
                                                  verbose)

            if isinstance(log_names, string_types):
                self.log_names = [log_names]
            elif isinstance(log_names, (tuple, list)):
                self.log_names = log_names
            else:
                raise ValueError('log_names must be either str or list but'
                                 ' was {}'.format(type(log_names)))

            self.filename = filename

            self.bk = bk
            self.TOOLS = "resize,crosshair,pan,wheel_zoom,box_zoom,reset,save"
            self.colors = ['blue', 'green', 'red', 'olive', 'cyan', 'aqua',
                           'gray']

            warnings.filterwarnings('error')
            try:
                self.bk.output_server(self.__name__)
                warnings.resetwarnings()
            except Warning:
                raise StopIteration('Bokeh server is not running')

            self.fig = self.bk.figure(
                title=self.__name__, x_axis_label=self.timescale,
                y_axis_label='value', tools=self.TOOLS,
                plot_width=1000, x_range=(0, 25), y_range=(0, 1))
Beispiel #19
0
    def create_db(self, schema_file=None):
        """
        Create the database for this source, using given SQL schema file.

        If schema file is not given, defaults to
        "template_schema/project.sql.tmpl".
        """
        import MySQLdb

        if schema_file is None:
            schema_file = path("model", "sql", "template_schema", "project.sql.tmpl")

        filterwarnings('ignore', category=MySQLdb.Warning)
        with connection.cursor() as cursor:
            cursor.execute("CREATE DATABASE IF NOT EXISTS {0}".format(self.name))
            cursor.execute("USE {0}".format(self.name))
            try:
                with open(schema_file) as f:
                    # set the engine to use
                    sql = f.read()
                    statement_list = sql.split(";")
                    for statement in statement_list:
                        cursor.execute(statement)
            finally:
                cursor.execute("USE {0}".format(
                    settings.DATABASES['default']['NAME']
                ))

        resetwarnings()
Beispiel #20
0
def clean_warning_registry():
    """Safe way to reset warniings """
    warnings.resetwarnings()
    reg = "__warningregistry__"
    for mod in sys.modules.values():
        if hasattr(mod, reg):
            getattr(mod, reg).clear()
Beispiel #21
0
    def OnTailLength(self, evt):
        try:
            dlg = wx.NumberEntryDialog(
                self.frame,
                "Enter new tail length",
                "(0-200 frames)",
                "Tail Length",
                value=params.tail_length,
                min=0,
                max=200,
            )
            if dlg.ShowModal() == wx.ID_OK:
                params.tail_length = dlg.GetValue()
            dlg.Destroy()
        except AttributeError:  # NumberEntryDialog not present yet in 2.6.3.2
            import warnings

            warnings.filterwarnings("ignore", "", DeprecationWarning)
            # but for some reason, GetNumberFromUser is already deprecated
            new_num = wx.GetNumberFromUser(
                "Enter new tail length",
                "(0-200 frames)",
                "Tail Length",
                params.tail_length,
                min=0,
                max=200,
                parent=self.frame,
            )
            warnings.resetwarnings()
            if new_num >= 0:
                params.tail_length = new_num
        self.ShowCurrentFrame()
Beispiel #22
0
    def test_removed_deprecated_runas(self):
        # We *always* want *all* warnings thrown on this module
        warnings.resetwarnings()
        warnings.filterwarnings('always', '', DeprecationWarning, __name__)

        mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
        pip_list = MagicMock(return_value=['pep8'])
        pip_uninstall = MagicMock(return_value=True)
        with patch.dict(pip_state.__salt__, {'cmd.run_all': mock,
                                             'pip.list': pip_list,
                                             'pip.uninstall': pip_uninstall}):
            with warnings.catch_warnings(record=True) as w:
                ret = pip_state.removed('pep8', runas='me!')
                self.assertEqual(
                    'The \'runas\' argument to pip.installed is deprecated, '
                    'and will be removed in Salt Hydrogen (Unreleased). '
                    'Please use \'user\' instead.', str(w[-1].message)
                )
                self.assertSaltTrueReturn({'testsuite': ret})
                # Is the state returning a warnings key with the deprecation
                # message?
                self.assertInSalStatetWarning(
                    'The \'runas\' argument to pip.installed is deprecated, '
                    'and will be removed in Salt Hydrogen (Unreleased). '
                    'Please use \'user\' instead.', {'testsuite': ret}
                )
 def test_errcheck(self):
     py.test.skip('fixme')
     def errcheck(result, func, args):
         assert result == -42
         assert type(result) is int
         arg, = args
         assert arg == -126
         assert type(arg) is int
         return result
     #
     tf_b = dll.tf_b
     tf_b.restype = c_byte
     tf_b.argtypes = (c_byte,)
     tf_b.errcheck = errcheck
     assert tf_b(-126) == -42
     del tf_b.errcheck
     with warnings.catch_warnings(record=True) as w:
         dll.get_an_integer.argtypes = []
         dll.get_an_integer()
         assert len(w) == 1
         assert issubclass(w[0].category, RuntimeWarning)
         assert "C function without declared return type called" in str(w[0].message)
         
     with warnings.catch_warnings(record=True) as w:
         dll.get_an_integer.restype = None
         dll.get_an_integer()
         assert len(w) == 0
         
     warnings.resetwarnings()
Beispiel #24
0
def bestFit(indices, trace):
    """
    Function extrapolating noise from trace containing sequence
    
    :type indices: numpy.array
    :param indices: indices of noise, as retrurned by extractNoise
    :type trace: trace object from obspy.core
    :param trace: trace whose noise has to be fitted
    :returns: array of extrapolated noise
    
    """
    deg=1
    warnings.filterwarnings('error')
    #Initiate loop to choose best degree of polynomial extrapolation
    #Increment degree whille error occurs
    while True:
        try:
            best_fit = np.poly1d(np.polyfit(indices, trace.data[indices], deg))
            deg+=1
        except:
            break;
    warnings.resetwarnings()

    #Make extrapolation at optimal degree
    x=range(0, trace.stats.npts)
    fit=best_fit(x)

    return fit
 def write(self, data):
     # get all warnings
     warnFilters = list(warnings.filters)
     # reset warnings
     warnings.resetwarnings()
     # ignore all warnings
     # we dont want warnings while pusing text to the textview
     warnings.filterwarnings("ignore")
     if PY2 and isinstance(data, str):
         try:
             data = unicode(data, "utf-8", "replace")
         except UnicodeDecodeError:
             data = "XXX " + repr(data)
     if self.outputView is not None:
         # Better not get SIGINT/KeyboardInterrupt exceptions while we're updating the output view
         with cancelLock:
             self.outputView.append(data, self.isError)
             t = time.time()
             if t - self._previousFlush > 0.2:
                 self.outputView.scrollToEnd()
                 if osVersionCurrent >= osVersion10_10:
                     AppKit.NSRunLoop.mainRunLoop().runUntilDate_(AppKit.NSDate.dateWithTimeIntervalSinceNow_(0.0001))
                 self._previousFlush = t
     else:
         self.data.append((data, self.isError))
     # reset the new warnings
     warnings.resetwarnings()
     # update with the old warnings filters
     warnings.filters.extend(warnFilters)
 def _run_generated_code(self, code, globs, locs):
     import warnings
     with warnings.catch_warnings(record=True) as log:
         warnings.resetwarnings()
         exec(code, globs, locs)
         self.assertEqual(len(log), 0) # no longer warn
         return True
Beispiel #27
0
def deprecation(trac_number, message):
    r"""
    Issue a deprecation warning.

    INPUT:

    - ``trac_number`` -- integer. The trac ticket number where the
      deprecation is introduced.

    - ``message`` -- string. an explanation why things are deprecated
      and by what it should be replaced.

    EXAMPLES::

        sage: def foo():
        ....:  sage.misc.superseded.deprecation(13109, 'the function foo is replaced by bar')
        sage: foo()
        doctest:...: DeprecationWarning: the function foo is replaced by bar
        See http://trac.sagemath.org/13109 for details.
    """
    _check_trac_number(trac_number)
    message += '\n'
    message += 'See http://trac.sagemath.org/'+ str(trac_number) + ' for details.'
    resetwarnings()
    # Stack level 3 to get the line number of the code which called
    # the deprecated function which called this function.
    warn(message, DeprecationWarning, stacklevel=3)
Beispiel #28
0
    def set_fcntl_constants(self):
        if fcntl is None: return 0
        self.F_GETFL = fcntl.F_GETFL
        self.F_SETFL = fcntl.F_SETFL
        self.F_GETFD = fcntl.F_GETFD
        self.F_SETFD = fcntl.F_SETFD
        ok = 0
        if fcntl.__dict__.has_key("FD_CLOEXEC"):
            self.FD_CLOEXEC = fcntl.FD_CLOEXEC
            ok = 1
        if ok == 0:
            try:
                FCNTL_ok = 0
                import warnings
                warnings.filterwarnings("ignore", "", DeprecationWarning)
                import FCNTL
                FCNTL_ok = 1
                warnings.resetwarnings()
            except ImportError:
                pass
            if FCNTL_ok and FCNTL.__dict__.has_key("FD_CLOEXEC"):
                self.FD_CLOEXEC = FCNTL.FD_CLOEXEC
                ok = 1
        if ok == 0:
            # assume FD_CLOEXEC = 1. see 
            # http://mail.python.org/pipermail/python-bugs-list/2001-December/009360.html
            self.FD_CLOEXEC = 1
            ok = 1

        if ok == 0:
            Es("This platform provides no ways to set "
               "close-on-exec flag. abort\n")
            os._exit(1)
Beispiel #29
0
def runTreewalkerTest(innerHTML, input, expected, errors, treeClass):
    warnings.resetwarnings()
    warnings.simplefilter("error")
    try:
        p = html5parser.HTMLParser(tree = treeClass["builder"])
        if innerHTML:
            document = p.parseFragment(input, innerHTML)
        else:
            document = p.parse(input)
    except constants.DataLossWarning:
        #Ignore testcases we know we don't pass
        return

    document = treeClass.get("adapter", lambda x: x)(document)
    try:
        output = convertTokens(treeClass["walker"](document))
        output = attrlist.sub(sortattrs, output)
        expected = attrlist.sub(sortattrs, convertExpected(expected))
        diff = "".join(unified_diff([line + "\n" for line in expected.splitlines()],
                                    [line + "\n" for line in output.splitlines()],
                                    "Expected", "Received"))
        assert expected == output, "\n".join([
                "", "Input:", input,
                "", "Expected:", expected,
                "", "Received:", output,
                "", "Diff:", diff,
                ])
    except NotImplementedError:
        pass # Amnesty for those that confess...
    def _create_trigger(self, field):
        # import MySQLdb as Database
        from warnings import filterwarnings, resetwarnings

        filterwarnings('ignore', message='Trigger does not exist',
                       category=Warning)

        opts = field.model._meta
        trigger_name = get_trigger_name(field, opts)

        stm = self.sql.format(trigger_name=trigger_name,
                              opts=opts, field=field)
        cursor = self.connection._clone().cursor()
        try:
            cursor.execute(stm)
            self._triggers[field] = trigger_name

        except (BaseException, _mysql_exceptions.ProgrammingError) as exc:
            errno, message = exc.args
            if errno != 2014:
                import traceback
                traceback.print_exc(exc)
                raise
        resetwarnings()
        return trigger_name
Beispiel #31
0
def treat_deprecations_as_exceptions():
    """
    Turn all DeprecationWarnings (which indicate deprecated uses of
    Python itself or Numpy, but not within Astropy, where we use our
    own deprecation warning class) into exceptions so that we find
    out about them early.

    This completely resets the warning filters and any "already seen"
    warning state.
    """
    # First, totally reset the warning state
    for module in list(six.itervalues(sys.modules)):
        # We don't want to deal with six.MovedModules, only "real"
        # modules.
        if (isinstance(module, types.ModuleType) and
            hasattr(module, '__warningregistry__')):
            del module.__warningregistry__

    if not _deprecations_as_exceptions:
        return

    warnings.resetwarnings()

    # Hide the next couple of DeprecationWarnings
    warnings.simplefilter('ignore', DeprecationWarning)
    # Here's the wrinkle: a couple of our third-party dependencies
    # (py.test and scipy) are still using deprecated features
    # themselves, and we'd like to ignore those.  Fortunately, those
    # show up only at import time, so if we import those things *now*,
    # before we turn the warnings into exceptions, we're golden.
    try:
        # A deprecated stdlib module used by py.test
        import compiler
    except ImportError:
        pass

    try:
        import scipy
    except ImportError:
        pass

    # Now, start over again with the warning filters
    warnings.resetwarnings()
    # Now, turn DeprecationWarnings into exceptions
    warnings.filterwarnings("error", ".*", DeprecationWarning)

    # Only turn astropy deprecation warnings into exceptions if requested
    if _include_astropy_deprecations:
        warnings.filterwarnings("error", ".*", AstropyDeprecationWarning)
        warnings.filterwarnings("error", ".*", AstropyPendingDeprecationWarning)

    if sys.version_info[:2] >= (3, 4):
        # py.test reads files with the 'U' flag, which is now
        # deprecated in Python 3.4.
        warnings.filterwarnings(
            "ignore",
            r"'U' mode is deprecated",
            DeprecationWarning)

        # BeautifulSoup4 triggers a DeprecationWarning in stdlib's
        # html module.x
        warnings.filterwarnings(
            "ignore",
            r"The strict argument and mode are deprecated\.",
            DeprecationWarning)
        warnings.filterwarnings(
            "ignore",
            r"The value of convert_charrefs will become True in 3\.5\. "
            r"You are encouraged to set the value explicitly\.",
            DeprecationWarning)

    if sys.version_info[:2] >= (3, 5):
        # py.test raises this warning on Python 3.5.
        # This can be removed when fixed in py.test.
        # See https://github.com/pytest-dev/pytest/pull/1009
        warnings.filterwarnings(
            "ignore",
            r"inspect\.getargspec\(\) is deprecated, use "
            r"inspect\.signature\(\) instead",
            DeprecationWarning)
 def __init__(self, fmc, **kwargs):
     warnings.resetwarnings()
     warnings.warn(
         "Deprecated: ApplicationProductivity() should be called via ApplicationProductivities()."
     )
     super().__init__(fmc, **kwargs)
Beispiel #33
0
from email.mime.text import MIMEText
from io import BytesIO

try:
    import html2text
except ImportError:
    html2text = None

try:
    import weasyprint
except ImportError:
    weasyprint = None

warnings.simplefilter("ignore")
import relatorio.reporting
warnings.resetwarnings()
try:
    from relatorio.templates.opendocument import Manifest, MANIFEST
except ImportError:
    Manifest, MANIFEST = None, None
from genshi.filters import Translator

from trytond.i18n import gettext
from trytond.pool import Pool, PoolBase
from trytond.transaction import Transaction
from trytond.url import URLMixin
from trytond.rpc import RPC
from trytond.exceptions import UserError

logger = logging.getLogger(__name__)
Beispiel #34
0
 def __init__(self, fmc, **kwargs):
     warnings.resetwarnings()
     warnings.warn(
         "Deprecated: ICMPv4Object() should be called via ICMPv4Objects().")
     super().__init__(fmc, **kwargs)
Beispiel #35
0
def output_integ(slope_int, dq_int, effintim, var_p3, var_r3, var_both3,
                 int_times):
    """
    For the OLS algorithm, construct the output integration-specific results.
    Any variance values that are a large fraction of the default value
    LARGE_VARIANCE correspond to non-existent segments, so will be set to 0
    here before output.

    Parameters
    ----------
    model : instance of Data Model
       DM object for input

    slope_int : ndarray
       Data cube of weighted slopes for each integration, 3-D float

    dq_int : ndarray
       Data cube of DQ arrays for each integration, 3-D int

    effintim : float
       Effective integration time per integration

    var_p3 : ndarray
        Cube of integration-specific values for the slope variance due to
        Poisson noise only, 3-D float

    var_r3 : ndarray
        Cube of integration-specific values for the slope variance due to
        read noise only, 3-D float

    var_both3 : ndarray
        Cube of integration-specific values for the slope variance due to
        read noise and Poisson noise, 3-D float

    int_times : bintable, or None
        The INT_TIMES table, if it exists in the input, else None

    Returns
    -------
    integ_info : tuple
        The tuple of computed integration ramp fitting arrays.

    """
    # Suppress harmless arithmetic warnings for now
    warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning)
    warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning)

    var_p3[var_p3 > 0.4 * LARGE_VARIANCE] = 0.
    var_r3[var_r3 > 0.4 * LARGE_VARIANCE] = 0.
    var_both3[var_both3 > 0.4 * LARGE_VARIANCE] = 0.

    data = slope_int / effintim
    err = np.sqrt(var_both3)
    dq = dq_int
    var_poisson = var_p3
    var_rnoise = var_r3
    int_times = int_times
    integ_info = (data, dq, var_poisson, var_rnoise, int_times, err)

    # Reset the warnings filter to its original state
    warnings.resetwarnings()

    return integ_info
Beispiel #36
0
def calc_slope_vars(rn_sect, gain_sect, gdq_sect, group_time, max_seg):
    """
    Calculate the segment-specific variance arrays for the given
    integration.

    Parameters
    ----------
    rn_sect : ndarray
        read noise values for all pixels in data section, 2-D float

    gain_sect : ndarray
        gain values for all pixels in data section, 2-D float

    gdq_sect : ndarray
        data quality flags for pixels in section, 3-D int

    group_time : float
        Time increment between groups, in seconds.

    max_seg : int
        maximum number of segments fit

    Returns
    -------
    den_r3 : ndarray
        for a given integration, the reciprocal of the denominator of the
        segment-specific variance of the segment's slope due to read noise, 3-D float

    den_p3 : ndarray
        for a given integration, the reciprocal of the denominator of the
        segment-specific variance of the segment's slope due to Poisson noise, 3-D float

    num_r3 : ndarray
        numerator of the segment-specific variance of the segment's slope
        due to read noise, 3-D float

    segs_beg_3 : ndarray
        lengths of segments for all pixels in the given data section and
        integration, 3-D int
    """
    (nreads, asize2, asize1) = gdq_sect.shape
    npix = asize1 * asize2
    imshape = (asize2, asize1)

    # Create integration-specific sections of input arrays for determination
    #   of the variances.
    gdq_2d = gdq_sect[:, :, :].reshape((nreads, npix))
    gain_1d = gain_sect.reshape(npix)
    gdq_2d_nan = gdq_2d.copy()  # group dq with SATS will be replaced by nans
    gdq_2d_nan = gdq_2d_nan.astype(np.float32)

    wh_sat = np.where(np.bitwise_and(gdq_2d, constants.dqflags["SATURATED"]))
    if len(wh_sat[0]) > 0:
        gdq_2d_nan[wh_sat] = np.nan  # set all SAT groups to nan

    del wh_sat

    # Get lengths of semiramps for all pix [number_of_semiramps, number_of_pix]
    segs = np.zeros_like(gdq_2d)

    # Counter of semiramp for each pixel
    sr_index = np.zeros(npix, dtype=np.uint8)
    pix_not_done = np.ones(npix, dtype=bool)  # initialize to True

    i_read = 0
    # Loop over reads for all pixels to get segments (segments per pixel)
    while (i_read < nreads and np.any(pix_not_done)):
        gdq_1d = gdq_2d_nan[i_read, :]
        wh_good = np.where(gdq_1d == 0)  # good groups

        # if this group is good, increment those pixels' segments' lengths
        if len(wh_good[0]) > 0:
            segs[sr_index[wh_good], wh_good] += 1
        del wh_good

        # Locate any CRs that appear before the first SAT group...
        wh_cr = np.where(gdq_2d_nan[i_read, :].astype(np.int32)
                         & constants.dqflags["JUMP_DET"] > 0)

        # ... but not on final read:
        if (len(wh_cr[0]) > 0 and (i_read < nreads - 1)):
            sr_index[wh_cr[0]] += 1
            segs[sr_index[wh_cr], wh_cr] += 1

        del wh_cr

        # If current group is a NaN, this pixel is done (pix_not_done is False)
        wh_nan = np.where(np.isnan(gdq_2d_nan[i_read, :]))
        if len(wh_nan[0]) > 0:
            pix_not_done[wh_nan[0]] = False

        del wh_nan

        i_read += 1

    segs = segs.astype(np.uint8)
    segs_beg = segs[:max_seg, :]  # the leading nonzero lengths

    # Create reshaped version [ segs, y, x ] to simplify computation
    segs_beg_3 = segs_beg.reshape(max_seg, imshape[0], imshape[1])
    segs_beg_3 = remove_bad_singles(segs_beg_3)

    # Create a version 1 less for later calculations for the variance due to
    #   Poisson, with a floor=1 to handle single-group segments
    wh_pos_3 = np.where(segs_beg_3 > 1)
    segs_beg_3_m1 = segs_beg_3.copy()
    segs_beg_3_m1[wh_pos_3] -= 1
    segs_beg_3_m1[segs_beg_3_m1 < 1] = 1

    # For a segment, the variance due to Poisson noise
    #   = slope/(tgroup * gain * (ngroups-1)),
    #   where slope is the estimated median slope, tgroup is the group time,
    #   and ngroups is the number of groups in the segment.
    #   Here the denominator of this quantity will be computed, which will be
    #   later multiplied by the estimated median slope.

    # Suppress, then re-enable, harmless arithmetic warnings, as NaN will be
    #   checked for and handled later
    warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning)
    warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning)
    den_p3 = 1. / (group_time * gain_1d.reshape(imshape) * segs_beg_3_m1)
    warnings.resetwarnings()

    # For a segment, the variance due to readnoise noise
    # = 12 * readnoise**2 /(ngroups_seg**3. - ngroups_seg)/( tgroup **2.)
    num_r3 = 12. * (rn_sect / group_time)**2.  # always >0

    # Reshape for every group, every pixel in section
    num_r3 = np.dstack([num_r3] * max_seg)
    num_r3 = np.transpose(num_r3, (2, 0, 1))

    # Denominator den_r3 = 1./(segs_beg_3 **3.-segs_beg_3). The minimum number
    #   of allowed groups is 2, which will apply if there is actually only 1
    #   group; in this case den_r3 = 1/6. This covers the case in which there is
    #   only one good group at the beginning of the integration, so it will be
    #   be compared to the plane of (near) zeros resulting from the reset. For
    #   longer segments, this value is overwritten below.
    den_r3 = num_r3.copy() * 0. + 1. / 6
    wh_seg_pos = np.where(segs_beg_3 > 1)

    # Suppress, then, re-enable harmless arithmetic warnings, as NaN will be
    #   checked for and handled later
    warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning)
    warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning)
    den_r3[wh_seg_pos] = 1. / (
        segs_beg_3[wh_seg_pos]**3. - segs_beg_3[wh_seg_pos]
    )  # overwrite where segs>1
    warnings.resetwarnings()

    return (den_r3, den_p3, num_r3, segs_beg_3)
Beispiel #37
0
def test_device_non_physical(mock_factory):
    with warnings.catch_warnings(record=True) as w:
        warnings.resetwarnings()
        device = GPIODevice('GPIO37')
        assert len(w) == 1
        assert w[0].category == PinNonPhysical
Beispiel #38
0
 def tearDown(self):
     warnings.resetwarnings()
Beispiel #39
0
    def test(self,
             label='fast',
             verbose=1,
             extra_argv=None,
             doctests=False,
             coverage=False,
             raise_warnings=None):
        """
        Run tests for module using nose.

        Parameters
        ----------
        label : {'fast', 'full', '', attribute identifier}, optional
            Identifies the tests to run. This can be a string to pass to
            the nosetests executable with the '-A' option, or one of several
            special values.  Special values are:
            * 'fast' - the default - which corresponds to the ``nosetests -A``
              option of 'not slow'.
            * 'full' - fast (as above) and slow tests as in the
              'no -A' option to nosetests - this is the same as ''.
            * None or '' - run all tests.
            attribute_identifier - string passed directly to nosetests as '-A'.
        verbose : int, optional
            Verbosity value for test outputs, in the range 1-10. Default is 1.
        extra_argv : list, optional
            List with any extra arguments to pass to nosetests.
        doctests : bool, optional
            If True, run doctests in module. Default is False.
        coverage : bool, optional
            If True, report coverage of NumPy code. Default is False.
            (This requires the `coverage module:
             <http://nedbatchelder.com/code/modules/coverage.html>`_).
        raise_warnings : str or sequence of warnings, optional
            This specifies which warnings to configure as 'raise' instead
            of 'warn' during the test execution.  Valid strings are:

              - "develop" : equals ``(DeprecationWarning, RuntimeWarning)``
              - "release" : equals ``()``, don't raise on any warnings.

        Returns
        -------
        result : object
            Returns the result of running the tests as a
            ``nose.result.TextTestResult`` object.

        Notes
        -----
        Each NumPy module exposes `test` in its namespace to run all tests for it.
        For example, to run all tests for numpy.lib:

        >>> np.lib.test() #doctest: +SKIP

        Examples
        --------
        >>> result = np.lib.test() #doctest: +SKIP
        Running unit tests for numpy.lib
        ...
        Ran 976 tests in 3.933s

        OK

        >>> result.errors #doctest: +SKIP
        []
        >>> result.knownfail #doctest: +SKIP
        []
        """

        # cap verbosity at 3 because nose becomes *very* verbose beyond that
        verbose = min(verbose, 3)

        import utils
        utils.verbose = verbose

        if doctests:
            print "Running unit tests and doctests for %s" % self.package_name
        else:
            print "Running unit tests for %s" % self.package_name

        self._show_system_info()

        # reset doctest state on every run
        import doctest
        doctest.master = None

        if raise_warnings is None:
            raise_warnings = self.raise_warnings

        _warn_opts = dict(develop=(DeprecationWarning, RuntimeWarning),
                          release=())
        if raise_warnings in _warn_opts.keys():
            raise_warnings = _warn_opts[raise_warnings]

        # Preserve the state of the warning filters
        warn_ctx = numpy.testing.utils.WarningManager()
        warn_ctx.__enter__()
        # Reset the warning filters to the default state,
        # so that running the tests is more repeatable.
        warnings.resetwarnings()
        # If deprecation warnings are not set to 'error' below,
        # at least set them to 'warn'.
        warnings.filterwarnings('always', category=DeprecationWarning)
        # Force the requested warnings to raise
        for warningtype in raise_warnings:
            warnings.filterwarnings('error', category=warningtype)
        # Filter out annoying import messages.
        warnings.filterwarnings('ignore', message='Not importing directory')
        warnings.filterwarnings("ignore", message="numpy.dtype size changed")
        warnings.filterwarnings("ignore", message="numpy.ufunc size changed")

        try:
            from noseclasses import NumpyTestProgram

            argv, plugins = self.prepare_test_args(label, verbose, extra_argv,
                                                   doctests, coverage)
            t = NumpyTestProgram(argv=argv, exit=False, plugins=plugins)
        finally:
            warn_ctx.__exit__()

        return t.result
Beispiel #40
0
 def assertHFToolsDeprecationWarning(self, funk, *k, **kw):
     warnings.resetwarnings()
     warnings.simplefilter("error", HFToolsDeprecationWarning)
     self.assertRaises(HFToolsDeprecationWarning, funk, *k, **kw)
     warnings.simplefilter("ignore", HFToolsDeprecationWarning)
Beispiel #41
0
 def setup_method(self, method):
     warnings.resetwarnings()
Beispiel #42
0
def teardown():
    # Clear list of warning filters
    warnings.resetwarnings()
Beispiel #43
0
def _configure_warnings(args):
    warnings.resetwarnings()
    if args.w:
        warnings.simplefilter('ignore')
    if args.Werror:
        warnings.simplefilter('error')
Beispiel #44
0
 def teardown_method(self, method):
     warnings.resetwarnings()
Beispiel #45
0
    def attack(self,
               model: nn.Module,
               inputs: torch.Tensor,
               labels: torch.Tensor,
               targeted: bool = False) -> torch.Tensor:
        """
        Performs the attack of the model for the inputs and labels.

        Parameters
        ----------
        model : nn.Module
            Model to attack.
        inputs : torch.Tensor
            Batch of samples to attack. Values should be in the [0, 1] range.
        labels : torch.Tensor
            Labels of the samples to attack if untargeted, else labels of targets.
        targeted : bool, optional
            Whether to perform a targeted attack or not.

        Returns
        -------
        torch.Tensor
            Batch of samples modified to be adversarial to the model.

        """
        if inputs.min() < 0 or inputs.max() > 1:
            raise ValueError('Input values should be in the [0, 1] range.')
        if targeted:
            print(
                'DeepFool is an untargeted adversarial attack. Returning clean inputs.'
            )
            return inputs

        fmodel = foolbox.models.PyTorchModel(model,
                                             bounds=(0, 1),
                                             num_classes=self.num_classes,
                                             device=self.device)
        attack = foolbox.attacks.DeepFoolL2Attack(model=fmodel)

        numpy_inputs = inputs.cpu().numpy()
        numpy_labels = labels.cpu().numpy()
        batch_size = len(inputs)
        adversarials = numpy_inputs.copy()

        warnings.filterwarnings('ignore', category=UserWarning)
        for i in tqdm.tqdm(range(batch_size), ncols=80):
            adv = attack(
                numpy_inputs[i], numpy_labels[i]
            )  #, unpack=True, steps=self.max_iter, subsample=self.subsample)
            if adv is not None:
                adv = torch.renorm(torch.from_numpy(adv - numpy_inputs[i]),
                                   p=2,
                                   dim=0,
                                   maxnorm=1).numpy() + numpy_inputs[i]

                adversarials[i] = adv
        warnings.resetwarnings()

        adversarials = torch.from_numpy(adversarials).to(self.device)

        return adversarials
Beispiel #46
0
 def __init__(self, fmc, **kwargs):
     warnings.resetwarnings()
     warnings.warn(
         "Deprecated: DeviceHAMonitoredInterfaces() should be called via MonitoredInterfaces()."
     )
     super().__init__(fmc, **kwargs)
Beispiel #47
0
    def __init__(self,
                 emu=None,
                 y=None,
                 x=None,
                 thetaprior=None,
                 yvar=None,
                 method='directbayes',
                 args={},
                 options={}):
        '''
        A class to represent a calibrator. Fits a calibrator model provided
        in ``calibrationmethods/[method].py`` where [method] is the user
        option with default listed above.

        .. tip::
           To use a new calibrator, just drop a new file to the
           ``calibrationmethods/`` directory with the required formatting.

        :Example:
            .. code-block:: python

               calibrator(emu=emu, y=y, x=x, thetaprior=thetaprior,
                          method='directbayes', args=args)

        Parameters
        ----------
        emu : surmise.emulation.emulator, optional
            An emulator class instance as defined in surmise.emulation.
            The default is None.

        y : numpy.ndarray, optional
            Array of observed values at x. The default is None.

        x : numpy.ndarray, optional
            An array of x values that match the definition of "emu.x".
            Currently, existing methods supports only the case when x is a
            subset of "emu.x". The default is None.

        thetaprior : class, optional
            class instance with two built-in functions. The default is None.

            .. important::
                If a calibration method requires sampling, then
                the prior distribution of the parameters should be included
                into the calibrator. In this case, thetaprior class
                should include two methods:
                    - ``lpdf(theta)``
                        Returns the log of the pdf of a given theta with size
                        ``(len(theta), 1)``
                    - ``rnd(n)``
                        Generates n random variable from a prior distribution.

            :Example:
                .. code-block:: python

                    class prior_example:
                        def lpdf(theta):
                            return sps.uniform.logpdf(
                                theta[:, 0], 0, 1).reshape((len(theta), 1))

                        def rnd(n):
                            return np.vstack((sps.uniform.rvs(0, 1, size=n)))

        yvar : numpy.ndarray, optional
            The vector of observation variances at y. The default is None.

        method : str, optional
            A string that points to the file located in ``calibrationmethods/``
            you would like to use. The default is 'directbayes'.

        args : dict, optional
            Optional dictionary containing options you would like to pass to
            [method].fit(x, theta, f, args)
            or
            [method].predict(x, theta args) The default is {}.

        Raises
        ------
        ValueError
            If the dimension of the data do not match with the fitted emulator.

        Returns
        -------
        None.

        '''

        if ('warnings' in args.keys()) and args['warnings']:
            warnings.resetwarnings()
        else:
            warnings.simplefilter('ignore')

        self.args = args
        if y is None:
            raise ValueError('You have not provided any y.')
        if y.ndim > 1.5:
            y = np.squeeze(y)
        if y.shape[0] < 5:
            raise ValueError('5 is the minimum number of observations at this '
                             'time.')
        self.y = y
        if emu is None:
            raise ValueError('You have not provided any emulator.')
        self.emu = emu

        try:
            thetatestsamp = thetaprior.rnd(100)
        except Exception:
            raise ValueError('thetaprior.rnd(100) failed.')

        if thetatestsamp.shape[0] != 100:
            raise ValueError('thetaprior.rnd(100) failed to give 100 values.')

        try:
            thetatestlpdf = thetaprior.lpdf(thetatestsamp)
        except Exception:
            raise ValueError('thetaprior.lpdf(thetatestsamp) failed.')

        if thetatestlpdf.shape[0] != 100:
            raise ValueError('thetaprior.lpdf(thetaprior.rnd(100)) failed to '
                             'give 100 values.')
        # if thetatestlpdf.ndim != 1:
        #    raise ValueError('thetaprior.lpdf(thetaprior.rnd(100)) has '
        #                     'dimension higher than 1.')

        self.info = {}
        self.info['thetaprior'] = copy.deepcopy(thetaprior)

        if x is not None:
            if x.shape[0] != y.shape[0]:
                raise ValueError('If x is provided, shape[0] must align with '
                                 'the length of y.')
        self.x = copy.deepcopy(x)
        predtry = emu.predict(copy.copy(self.x), thetatestsamp)
        if y.shape[0] != predtry().shape[0]:
            if x is None:
                raise ValueError('y and emu.predict(theta) must have the same '
                                 'shape')
            else:
                raise ValueError('y and emu.predict(x,theta) must have the '
                                 'same shape')
        else:
            prednotfinite = np.logical_not(np.isfinite(predtry()))
            if np.any(prednotfinite):
                warnings.warn('Some non-finite values from emulation '
                              'received.')
                fracfail = np.mean(prednotfinite, 1)
                if np.sum(fracfail <= 10**(-3)) < 5:
                    raise ValueError('Your emulator failed enough places to '
                                     'give up.')
                else:
                    warnings.warn('Current protocol is to remove observations'
                                  ' that have nonfinite values.')
                    whichrm = np.where(fracfail > 10**(-3))[0]
                    warnings.warn('Removing values at %s.' %
                                  np.array2string(whichrm))
                    whichkeep = np.where(fracfail <= 10**(-3))[0]
                    if x is not None:
                        self.x = self.x[whichkeep, :]
                    self.y = self.y[whichkeep]
            else:
                whichkeep = None
        if yvar is not None:
            if yvar.shape[0] != y.shape[0] and yvar.shape[0] > 1.5:
                raise ValueError('yvar must be the same size as y or '
                                 'of size 1.')
            if np.min(yvar) < 0:
                raise ValueError('yvar has at least one negative value.')
            if np.min(yvar) < 10**(-6) or np.max(yvar) > 10**(6):
                raise ValueError('Rescale your problem so that the yvar'
                                 ' is between 10 ^ -6 and 10 ^ 6.')
            self.info['yvar'] = copy.deepcopy(yvar)
            if whichkeep is not None:
                self.info['yvar'] = self.info['yvar'][whichkeep]

        try:
            self.method = importlib.import_module(
                'surmise.calibrationmethods.' + method)
        except Exception:
            raise ValueError('Module not found!')

        self.__options = copy.copy(options)

        if 'autofit' in self.__options.keys():
            if self.__options['autofit'] is not False:
                self.fit()
        else:
            self.fit()
Beispiel #48
0
    def solve(self, objective, constraints, cached_data, warm_start, verbose,
              solver_opts):
        """Returns the result of the call to the solver.

        Parameters
        ----------
        objective : CVXPY objective object
            Raw objective passed by CVXPY. Can be convex/concave.
        constraints : list
            The list of raw constraints.
        
        Returns
        -------
        tuple
            (status, optimal value, primal, equality dual, inequality dual)
        """

        sym_data = self.get_sym_data(objective, constraints)

        vars_ = sym_data.vars_
        id_map = sym_data.var_offsets
        N = sym_data.x_length

        extractor = QuadCoeffExtractor(id_map, N)

        # Extract the coefficients
        (Ps, Q, R) = extractor.get_coeffs(objective.args[0])

        P = Ps[0]
        q = np.asarray(Q.todense()).flatten()
        r = R[0]

        # Forming the KKT system
        if len(constraints) > 0:
            Cs = [extractor.get_coeffs(c._expr)[1:] for c in constraints]
            As = sp.vstack([C[0] for C in Cs])
            bs = np.array([C[1] for C in Cs]).flatten()
            lhs = sp.bmat([[2 * P, As.transpose()], [As, None]], format='csr')
            rhs = np.concatenate([-q, -bs])
        else:  # avoiding calling vstack with empty list
            lhs = 2 * P
            rhs = -q

        warnings.filterwarnings('error')

        # Actually solving the KKT system
        try:
            sol = SLA.spsolve(lhs.tocsr(), rhs)
            x = np.array(sol[:N])
            nu = np.array(sol[N:])
            p_star = np.dot(x.transpose(), P * x + q) + r
        except SLA.MatrixRankWarning:
            x = None
            nu = None
            p_star = None

        warnings.resetwarnings()

        result_dict = {s.PRIMAL: x, s.EQ_DUAL: nu, s.VALUE: p_star}

        return self.format_results(result_dict, None, cached_data)
def main():
    print("データベースを選択してください")
    print("サブディレクトリ内の画像もすべて検索対象となります")

    #tkinterを非表示
    tk = tkinter.Tk()
    tk.withdraw()

    data_folder_path = tkinter.filedialog.askdirectory(
        initialdir=data_processed_path, title='choose data folder')

    print("データベースと比較したい画像を選択してください")
    test_img_path = tkinter.filedialog.askopenfilename(
        initialdir=data_processed_path,
        title='choose test image',
        filetypes=[('image file', '*.jpeg;*jpg;*png')])

    import keras
    from keras.models import Model
    from keras.layers import Input, Dense
    from keras.applications.inception_v3 import InceptionV3
    from keras.preprocessing import image
    from keras.applications.vgg19 import preprocess_input

    # refer https://qiita.com/K-jun/items/cab923d49a939a8486fc
    from keras.applications.vgg19 import VGG19, preprocess_input

    base_model = VGG19(
        weights="imagenet")  #VGG19の設定と、VGG19でimagenetを使用した時の重み付けを使用
    #base_model.summary()
    model = Model(inputs=base_model.input,
                  outputs=base_model.get_layer("fc2").output)

    test_img = image.load_img(test_img_path, target_size=(224, 224))
    x = image.img_to_array(test_img)
    x = np.expand_dims(x, axis=0)
    x = preprocess_input(x)
    test_fc2_features = model.predict(x)

    png_list = glob.glob(
        data_folder_path + "/**/*.png",
        recursive=True)  #/**/且つrecursiveで現在のフォルダ以下にフォルダがあっても読み込んでくれる
    jpeg_list = glob.glob(data_folder_path + "/**/*.jpeg", recursive=True)
    jpg_list = glob.glob(data_folder_path + "/**/*.jpg", recursive=True)
    image_list = png_list + jpeg_list + jpg_list

    fc2_list = []
    print("画像数は", len(image_list), "です。")

    print("探索を開始します。中止する場合はCtrl+Cを押してください")
    import time
    start_time = time.time()

    warnings.resetwarnings()
    warnings.simplefilter('ignore', UserWarning)
    warnings.simplefilter('ignore', FutureWarning)

    for i, image_path in enumerate(image_list):
        if (i == 10) or (i != 0 and i % 100 == 0):
            remained_num = len(image_list) - i
            elapsed_time = time.time()
            remained_time = (elapsed_time - start_time) / i * remained_num
            print(i, "件完了しました。 残り", remained_num, "件、", round(remained_time),
                  "秒くらいかかります。")
        img = image.load_img(image_path, target_size=(224, 224))
        x = image.img_to_array(img)
        x = np.expand_dims(x, axis=0)
        x = preprocess_input(x)
        fc2_features = model.predict(x)
        fc2_list.append(fc2_features[0])

    warnings.resetwarnings()
    #print(fc2_list)

    # refer https://qiita.com/wasnot/items/20c4f30a529ae3ed5f52
    index = nmslib.init(method='hnsw', space='cosinesimil')
    index.addDataPointBatch(fc2_list)
    index.createIndex({'post': 2}, print_progress=True)

    ids, distances = index.knnQuery(test_fc2_features[0], k=len(image_list))
    result = [image_list[i] for i in ids]
    print(ids)
    print(distances)
    print(result)

    print("選択した画像は ", test_img_path, " です")

    print("選択した画像に似ている順に表示します")
    for i, id in enumerate(ids):
        print(image_list[id], " : 距離: ", distances[i])

    print("選択した画像は ", test_img_path, " です")

    #index.save(model_name)
    import time
    print("30分後に画面を閉じます")
    time.sleep(1800)
Beispiel #50
0
def reset_warnings(gallery_conf, fname):
    """Ensure we are future compatible and ignore silly warnings."""
    # In principle, our examples should produce no warnings.
    # Here we cause warnings to become errors, with a few exceptions.
    # This list should be considered alongside
    # setup.cfg -> [tool:pytest] -> filterwarnings

    # remove tweaks from other module imports or example runs
    warnings.resetwarnings()
    # restrict
    warnings.filterwarnings('error')
    # allow these, but show them
    warnings.filterwarnings('always', '.*non-standard config type: "foo".*')
    warnings.filterwarnings('always', '.*config type: "MNEE_USE_CUUDAA".*')
    warnings.filterwarnings('always', '.*cannot make axes width small.*')
    warnings.filterwarnings('always', '.*Axes that are not compatible.*')
    warnings.filterwarnings('always', '.*FastICA did not converge.*')
    warnings.filterwarnings(  # xhemi morph (should probably update sample)
        'always', '.*does not exist, creating it and saving it.*')
    warnings.filterwarnings('default', module='sphinx')  # internal warnings
    warnings.filterwarnings(
        'always', '.*converting a masked element to nan.*')  # matplotlib?
    # allow these warnings, but don't show them
    warnings.filterwarnings('ignore', '.*OpenSSL\\.rand is deprecated.*')
    warnings.filterwarnings('ignore', '.*is currently using agg.*')
    warnings.filterwarnings(  # SciPy-related warning (maybe 1.2.0 will fix it)
        'ignore', '.*the matrix subclass is not the recommended.*')
    warnings.filterwarnings(  # some joblib warning
        'ignore', '.*semaphore_tracker: process died unexpectedly.*')
    warnings.filterwarnings(  # needed until SciPy 1.2.0 is released
        'ignore',
        '.*will be interpreted as an array index.*',
        module='scipy')
    for key in (
            'HasTraits',
            r'numpy\.testing',
            'importlib',
            r'np\.loads',
            'Using or importing the ABCs from',  # internal modules on 3.7
            r"it will be an error for 'np\.bool_'",  # ndimage
            "DocumenterBridge requires a state object",  # sphinx dev
            "'U' mode is deprecated",  # sphinx io
            r"joblib is deprecated in 0\.21",  # nilearn
    ):
        warnings.filterwarnings(  # deal with other modules having bad imports
            'ignore',
            message=".*%s.*" % key,
            category=DeprecationWarning)
    warnings.filterwarnings(  # deal with bootstrap-theme bug
        'ignore',
        message=".*modify script_files in the theme.*",
        category=Warning)
    warnings.filterwarnings(  # deal with other modules having bad imports
        'ignore',
        message=".*ufunc size changed.*",
        category=RuntimeWarning)
    warnings.filterwarnings(  # realtime
        'ignore',
        message=".*unclosed file.*",
        category=ResourceWarning)
    warnings.filterwarnings('ignore', message='Exception ignored in.*')
    # allow this ImportWarning, but don't show it
    warnings.filterwarnings('ignore',
                            message="can't resolve package from",
                            category=ImportWarning)
    warnings.filterwarnings('ignore',
                            message='.*mne-realtime.*',
                            category=DeprecationWarning)
Beispiel #51
0
 def test_textmode_warn(self):
     with warnings.catch_warnings(record=True) as w:
         warnings.resetwarnings()
         warnings.simplefilter('always', ldap.LDAPBytesWarning)
         self._search_wrong_type(bytes_mode=True, strictness='warn')
     self.assertEqual(len(w), 1)
Beispiel #52
0
def main(*args):

    # Change to blm directory
    pwd = os.getcwd()
    os.chdir(os.path.dirname(os.path.realpath(__file__)))

    if len(args) == 0 or (not args[0]):
        # Load in inputs
        ipath = os.path.abspath(os.path.join('..', 'blm_config.yml'))
        with open(ipath, 'r') as stream:
            inputs = yaml.load(stream, Loader=yaml.FullLoader)
        retnb = False
    else:
        if type(args[0]) is str:
            if os.path.isabs(args[0]):
                ipath = args[0]
            else:
                ipath = os.path.abspath(os.path.join(pwd, args[0]))
            # In this case inputs file is first argument
            with open(ipath, 'r') as stream:
                inputs = yaml.load(stream, Loader=yaml.FullLoader)
                # Work out whether to return nb or save it in a
                # file
                if len(args) > 1:
                    retnb = args[1]
                else:
                    retnb = False
        else:
            # In this case inputs structure is first argument.
            inputs = args[0]
            ipath = ''
            retnb = True

    # Save absolute filepaths in place of relative filepaths
    if ipath:

        # Y files
        if not os.path.isabs(inputs['Y_files']):

            # Change Y in inputs
            inputs['Y_files'] = os.path.join(pwd, inputs['Y_files'])

        # If mask files are specified
        if 'data_mask_files' in inputs:

            # M_files
            if not os.path.isabs(inputs['data_mask_files']):

                # Change M in inputs
                inputs['data_mask_files'] = os.path.join(
                    pwd, inputs['data_mask_files'])

        # If analysis mask file specified,
        if 'analysis_mask' in inputs:

            # M_files
            if not os.path.isabs(inputs['analysis_mask']):

                # Change M in inputs
                inputs['analysis_mask'] = os.path.join(pwd,
                                                       inputs['analysis_mask'])

        # If X is specified
        if not os.path.isabs(inputs['X']):

            # Change X in inputs
            inputs['X'] = os.path.join(pwd, inputs['X'])

        if not os.path.isabs(inputs['outdir']):

            # Change output directory in inputs
            inputs['outdir'] = os.path.join(pwd, inputs['outdir'])

        # Update inputs
        with open(ipath, 'w') as outfile:
            yaml.dump(inputs, outfile, default_flow_style=False)

    # Change paths to absoluate if they aren't already
    if 'MAXMEM' in inputs:
        MAXMEM = eval(inputs['MAXMEM'])
    else:
        MAXMEM = 2**32

    OutDir = inputs['outdir']

    # Get number of parameters
    c1 = str2vec(inputs['contrasts'][0]['c' + str(1)]['vector'])
    c1 = np.array(c1)
    n_p = c1.shape[0]
    del c1

    # Make output directory and tmp
    if not os.path.isdir(OutDir):
        os.mkdir(OutDir)
    if not os.path.isdir(os.path.join(OutDir, "tmp")):
        os.mkdir(os.path.join(OutDir, "tmp"))

    with open(inputs['Y_files']) as a:

        Y_files = []
        i = 0
        for line in a.readlines():

            Y_files.append(line.replace('\n', ''))

    # Load in one nifti to check NIFTI size
    try:
        Y0 = loadFile(Y_files[0])
    except Exception as error:
        raise ValueError('The NIFTI "' + Y_files[0] + '"does not exist')

    # Get the maximum memory a NIFTI could take in storage. We divide by 3
    # as approximately a third of the volume is actually non-zero/brain
    NIFTIsize = sys.getsizeof(np.zeros(Y0.shape, dtype='uint64'))

    if NIFTIsize > MAXMEM:
        raise ValueError('The NIFTI "' + Y_files[0] + '"is too large')

    # Similar to blksize in SwE, we divide by 8 times the size of a nifti
    # to work out how many blocks we use. We divide NIFTIsize by 3
    # as approximately a third of the volume is actually non-zero/brain
    # and then also divide though everything by the number of parameters
    # in the analysis.
    blksize = np.floor(MAXMEM / 8 / NIFTIsize / n_p)
    if blksize == 0:
        raise ValueError('Blocksize too small.')

    # Check F contrast ranks
    n_c = len(inputs['contrasts'])
    for i in range(0, n_c):

        # Read in contrast vector
        cvec = str2vec(inputs['contrasts'][i]['c' + str(i + 1)]['vector'])
        cvec = np.array(cvec)

        if cvec.ndim > 1:

            # Get dimension of cvector
            q = cvec.shape[0]

            if np.linalg.matrix_rank(cvec) < q:
                raise ValueError('F contrast: \n' + str(cvec) +
                                 '\n is not of correct rank.')

    if not retnb:
        with open(os.path.join(OutDir, "nb.txt"), 'w') as f:
            print(int(np.ceil(len(Y_files) / int(blksize))), file=f)
    else:
        return (int(np.ceil(len(Y_files) / int(blksize))))

    w.resetwarnings()
Beispiel #53
0
def _get_totalvi_protein_priors(adata, n_cells=100):
    """Compute an empirical prior for protein background."""
    import warnings
    from sklearn.exceptions import ConvergenceWarning
    from sklearn.mixture import GaussianMixture

    warnings.filterwarnings("error")

    batch = get_from_registry(adata, _CONSTANTS.BATCH_KEY).ravel()
    cats = adata.uns["_scvi"]["categorical_mappings"]["_scvi_batch"]["mapping"]
    codes = np.arange(len(cats))

    batch_avg_mus, batch_avg_scales = [], []
    for b in np.unique(codes):
        # can happen during online updates
        # the values of these batches will not be used
        num_in_batch = np.sum(batch == b)
        if num_in_batch == 0:
            batch_avg_mus.append(0)
            batch_avg_scales.append(1)
            continue
        pro_exp = get_from_registry(adata,
                                    _CONSTANTS.PROTEIN_EXP_KEY)[batch == b]

        # for missing batches, put dummy values -- scarches case, will be replaced anyway
        if pro_exp.shape[0] == 0:
            batch_avg_mus.append(0.0)
            batch_avg_scales.append(0.05)

        cells = np.random.choice(np.arange(pro_exp.shape[0]), size=n_cells)
        if isinstance(pro_exp, pd.DataFrame):
            pro_exp = pro_exp.to_numpy()
        pro_exp = pro_exp[cells]
        gmm = GaussianMixture(n_components=2)
        mus, scales = [], []
        # fit per cell GMM
        for c in pro_exp:
            try:
                gmm.fit(np.log1p(c.reshape(-1, 1)))
            # when cell is all 0
            except ConvergenceWarning:
                mus.append(0)
                scales.append(0.05)
                continue

            means = gmm.means_.ravel()
            sorted_fg_bg = np.argsort(means)
            mu = means[sorted_fg_bg].ravel()[0]
            covariances = gmm.covariances_[sorted_fg_bg].ravel()[0]
            scale = np.sqrt(covariances)
            mus.append(mu)
            scales.append(scale)

        # average distribution over cells
        batch_avg_mu = np.mean(mus)
        batch_avg_scale = np.sqrt(np.sum(np.square(scales)) / (n_cells**2))

        batch_avg_mus.append(batch_avg_mu)
        batch_avg_scales.append(batch_avg_scale)

    # repeat prior for each protein
    batch_avg_mus = np.array(batch_avg_mus, dtype=np.float32).reshape(1, -1)
    batch_avg_scales = np.array(batch_avg_scales,
                                dtype=np.float32).reshape(1, -1)
    batch_avg_mus = np.tile(batch_avg_mus, (pro_exp.shape[1], 1))
    batch_avg_scales = np.tile(batch_avg_scales, (pro_exp.shape[1], 1))

    warnings.resetwarnings()

    return batch_avg_mus, batch_avg_scales
Beispiel #54
0
 def catch_byteswarnings(self, *args, **kwargs):
     with warnings.catch_warnings(record=True) as w:
         conn = self._get_bytes_ldapobject(*args, **kwargs)
         warnings.resetwarnings()
         warnings.simplefilter('always', ldap.LDAPBytesWarning)
         yield conn, w
def show_trend(df, country, place, metric, n_changepoints=20, events=None):
    """
    Show trend of log10(@variable) using fbprophet package.
    @ncov_df <pd.DataFrame>: the clean data
    @variable <str>: variable name to analyse
        - if Confirmed, use Infected + Recovered + Deaths
    @n_changepoints <int>: max number of change points
    @kwargs: keword arguments of select_area()
    """
    # Log10(x)
    warnings.resetwarnings()
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        df["y"] = df["y"]  #np.log10(df["y"]).replace([np.inf, -np.inf], 0)
    # fbprophet
    model = Prophet(growth="linear",
                    daily_seasonality=False,
                    n_changepoints=n_changepoints)
    model.fit(df)
    future = model.make_future_dataframe(periods=14)
    forecast = model.predict(future)

    c = model.changepoints
    # Create figure
    fig = model.plot(forecast, figsize=(15, 5))

    ax = plt.gca()
    for i, v in model.changepoints.items():
        s = f'{v.month}/{v.day}/{v.year % 100}'
        text(v, (df["y"].max() - df["y"].min()) / 2.,
             '',
             rotation=90,
             fontsize=8,
             color='gray')

    middle = (df["y"].max() - df["y"].min()) / 2. - (df["y"].max() -
                                                     df["y"].min()) / 4.
    if events:
        #plot events
        for evt in events:
            ax.axvline(x=evt['date'], linewidth=1, color='lightgrey')
            text(evt['date'],
                 middle,
                 f'{evt["date"]}: {evt["event"]}',
                 rotation=90,
                 fontsize=8,
                 color='gray')
    _ = add_changepoints_to_plot(fig.gca(), model, forecast, cp_color='tomato')
    name = f"{place}: "
    plt.title(
        f"{name} Cumulative number of {metric} over time and chainge points")
    plt.ylabel(f"Cumulative number of {metric}")
    plt.xlabel("")
    ax.xaxis.set_major_formatter(DateFormatter('%b,%Y'))
    ax.grid(False)

    # Use tight layout
    fig.tight_layout()

    plt.savefig(f'../img/{country}/{place}_{metric}_trend.pdf', dpi=600)
    plt.clf()
Beispiel #56
0
 def test_bytesmode_silent(self):
     with warnings.catch_warnings(record=True) as w:
         warnings.resetwarnings()
         warnings.simplefilter('always', ldap.LDAPBytesWarning)
         self._search_wrong_type(bytes_mode=True, strictness='silent')
     self.assertEqual(w, [])
Beispiel #57
0
def setup():
    client_context.init()
    warnings.resetwarnings()
    warnings.simplefilter("always")
Beispiel #58
0
 def __init__(self, fmc, **kwargs):
     warnings.resetwarnings()
     warnings.warn(
         "Deprecated: DeviceGroups() should be called via DeviceGroupRecords()."
     )
     super().__init__(fmc, **kwargs)
Beispiel #59
0
 def tearDown(self):
     super().tearDown()
     warnings.resetwarnings()
def validate(source, output=None, xmllint=False, filename=None):
    """
    Prints a validation report for the given file.

    Parameters
    ----------
    source : str or readable file-like object
        Path to a VOTABLE_ xml file or pathlib.path
        object having Path to a VOTABLE_ xml file.

    output : writable file-like object, optional
        Where to output the report.  Defaults to ``sys.stdout``.
        If `None`, the output will be returned as a string.

    xmllint : bool, optional
        When `True`, also send the file to ``xmllint`` for schema and
        DTD validation.  Requires that ``xmllint`` is installed.  The
        default is `False`.  ``source`` must be a file on the local
        filesystem in order for ``xmllint`` to work.

    filename : str, optional
        A filename to use in the error messages.  If not provided, one
        will be automatically determined from ``source``.

    Returns
    -------
    is_valid : bool or str
        Returns `True` if no warnings were found.  If ``output`` is
        `None`, the return value will be a string.
    """

    from ...utils.console import print_code_line, color_print

    if output is None:
        output = sys.stdout

    return_as_str = False
    if output is None:
        output = io.StringIO()

    lines = []
    votable = None

    reset_vo_warnings()

    with data.get_readable_fileobj(source, encoding='binary') as fd:
        content = fd.read()
    content_buffer = io.BytesIO(content)
    content_buffer.seek(0)

    if filename is None:
        if isinstance(source, str):
            filename = source
        elif hasattr(source, 'name'):
            filename = source.name
        elif hasattr(source, 'url'):
            filename = source.url
        else:
            filename = "<unknown>"

    with warnings.catch_warnings(record=True) as warning_lines:
        warnings.resetwarnings()
        warnings.simplefilter("always", exceptions.VOWarning, append=True)
        try:
            votable = parse(content_buffer, pedantic=False, filename=filename)
        except ValueError as e:
            lines.append(str(e))

    lines = [
        str(x.message)
        for x in warning_lines if issubclass(x.category, exceptions.VOWarning)
    ] + lines

    content_buffer.seek(0)
    output.write("Validation report for {0}\n\n".format(filename))

    if len(lines):
        xml_lines = iterparser.xml_readlines(content_buffer)

        for warning in lines:
            w = exceptions.parse_vowarning(warning)

            if not w['is_something']:
                output.write(w['message'])
                output.write('\n\n')
            else:
                line = xml_lines[w['nline'] - 1]
                warning = w['warning']
                if w['is_warning']:
                    color = 'yellow'
                else:
                    color = 'red'
                color_print('{0:d}: '.format(w['nline']),
                            '',
                            warning or 'EXC',
                            color,
                            ': ',
                            '',
                            textwrap.fill(w['message'],
                                          initial_indent='          ',
                                          subsequent_indent='  ').lstrip(),
                            file=output)
                print_code_line(line, w['nchar'], file=output)
            output.write('\n')
    else:
        output.write('astropy.io.votable found no violations.\n\n')

    success = 0
    if xmllint and os.path.exists(filename):
        from . import xmlutil

        if votable is None:
            version = "1.1"
        else:
            version = votable.version
        success, stdout, stderr = xmlutil.validate_schema(filename, version)

        if success != 0:
            output.write('xmllint schema violations:\n\n')
            output.write(stderr.decode('utf-8'))
        else:
            output.write('xmllint passed\n')

    if return_as_str:
        return output.getvalue()
    return len(lines) == 0 and success == 0