Exemple #1
0
    def test_urlsplit(self):
        result = netutils.urlsplit('rpc://myhost?someparam#somefragment')
        self.assertEqual(result.scheme, 'rpc')
        self.assertEqual(result.netloc, 'myhost')
        self.assertEqual(result.path, '')
        self.assertEqual(result.query, 'someparam')
        self.assertEqual(result.fragment, 'somefragment')

        result = netutils.urlsplit(
            'rpc://myhost/mypath?someparam#somefragment',
            allow_fragments=False)
        self.assertEqual(result.scheme, 'rpc')
        self.assertEqual(result.netloc, 'myhost')
        self.assertEqual(result.path, '/mypath')
        self.assertEqual(result.query, 'someparam#somefragment')
        self.assertEqual(result.fragment, '')

        result = netutils.urlsplit(
            'rpc://*****:*****@myhost/mypath?someparam#somefragment',
            allow_fragments=False)
        self.assertEqual(result.scheme, 'rpc')
        self.assertEqual(result.netloc, 'user:pass@myhost')
        self.assertEqual(result.path, '/mypath')
        self.assertEqual(result.query, 'someparam#somefragment')
        self.assertEqual(result.fragment, '')
Exemple #2
0
    def test_urlsplit_ipv6(self):
        ipv6_url = 'http://[::1]:443/v2.0/'
        result = netutils.urlsplit(ipv6_url)
        self.assertEqual(result.scheme, 'http')
        self.assertEqual(result.netloc, '[::1]:443')
        self.assertEqual(result.path, '/v2.0/')
        self.assertEqual(result.hostname, '::1')
        self.assertEqual(result.port, 443)

        ipv6_url = 'http://*****:*****@[::1]/v2.0/'
        result = netutils.urlsplit(ipv6_url)
        self.assertEqual(result.scheme, 'http')
        self.assertEqual(result.netloc, 'user:pass@[::1]')
        self.assertEqual(result.path, '/v2.0/')
        self.assertEqual(result.hostname, '::1')
        self.assertEqual(result.port, None)

        ipv6_url = 'https://[2001:db8:85a3::8a2e:370:7334]:1234/v2.0/xy?ab#12'
        result = netutils.urlsplit(ipv6_url)
        self.assertEqual(result.scheme, 'https')
        self.assertEqual(result.netloc, '[2001:db8:85a3::8a2e:370:7334]:1234')
        self.assertEqual(result.path, '/v2.0/xy')
        self.assertEqual(result.hostname, '2001:db8:85a3::8a2e:370:7334')
        self.assertEqual(result.port, 1234)
        self.assertEqual(result.query, 'ab')
        self.assertEqual(result.fragment, '12')
Exemple #3
0
    def _parse_resource(res):
        """Parse resource from discovery.

        Either URL can be given or dict. Dict has to contain at least
        keys 'resource_id' and 'resource_url', all the dict keys will be stored
        as metadata.

        :param res: URL or dict containing all resource info.
        :return parsed_url, resource_id, metadata: Returns parsed URL used for
            SNMP query, unique identifier of the resource and metadata
            of the resource.
        """

        if isinstance(res, dict):
            if 'resource_url' not in res or 'resource_id' not in res:
                LOG.exception(_('Passed resource dict must contain keys '
                                'resource_id and resource_url.'))

            metadata = res
            parsed_url = netutils.urlsplit(res['resource_url'])
            resource_id = res['resource_id']
        else:
            metadata = {}
            parsed_url = netutils.urlsplit(res)
            resource_id = res

        return parsed_url, resource_id, metadata
 def test_publish_error(self):
     with mock.patch('socket.socket',
                     self._make_broken_socket):
         publisher = udp.UDPPublisher(
             netutils.urlsplit('udp://localhost'))
     publisher.publish_samples(None,
                               self.test_data)
    def test_published_no_mock(self):
        publisher = msg_publisher.RPCPublisher(
            netutils.urlsplit('rpc://'))

        endpoint = mock.MagicMock(['record_metering_data'])
        collector = messaging.get_rpc_server(
            self.transport, self.CONF.publisher_rpc.metering_topic, endpoint)
        endpoint.record_metering_data.side_effect = (lambda *args, **kwds:
                                                     collector.stop())

        collector.start()
        eventlet.sleep()
        publisher.publish_samples(context.RequestContext(),
                                  self.test_data)
        collector.wait()

        class Matcher(object):
            @staticmethod
            def __eq__(data):
                for i, sample_item in enumerate(data):
                    if sample_item['counter_name'] != self.test_data[i].name:
                        return False
                return True

        endpoint.record_metering_data.assert_called_once_with(
            mock.ANY, data=Matcher())
    def test_published(self):
        self.data_sent = []
        with mock.patch('socket.socket',
                        self._make_fake_socket(self.data_sent)):
            publisher = udp.UDPPublisher(
                netutils.urlsplit('udp://somehost'))
        publisher.publish_samples(None,
                                  self.test_data)

        self.assertEqual(5, len(self.data_sent))

        sent_counters = []

        for data, dest in self.data_sent:
            counter = msgpack.loads(data)
            sent_counters.append(counter)

            # Check destination
            self.assertEqual(('somehost',
                              self.CONF.collector.udp_port), dest)

        # Check that counters are equal
        self.assertEqual(sorted(
            [utils.meter_message_from_counter(d, "not-so-secret")
             for d in self.test_data]), sorted(sent_counters))
Exemple #7
0
    def authenticate(self):
        magic_tuple = netutils.urlsplit(self.auth_url)
        scheme, netloc, path, query, frag = magic_tuple
        port = magic_tuple.port
        if port is None:
            port = 80
        path_parts = path.split('/')
        for part in path_parts:
            if len(part) > 0 and part[0] == 'v':
                self.version = part
                break

        if self.auth_token and self.management_url:
            self._save_keys()
            return

        # TODO(sandy): Assume admin endpoint is 35357 for now.
        # Ideally this is going to have to be provided by the service catalog.
        new_netloc = netloc.replace(':%d' % port, ':%d' % (35357,))
        admin_url = parse.urlunsplit(
            (scheme, new_netloc, path, query, frag))

        auth_url = self.auth_url
        if self.version == "v2.0":  # FIXME(chris): This should be better.
            while auth_url:
                if not self.auth_system or self.auth_system == 'keystone':
                    auth_url = self._v2_auth(auth_url)
                else:
                    auth_url = self._plugin_auth(auth_url)

            # Are we acting on behalf of another user via an
            # existing token? If so, our actual endpoints may
            # be different than that of the admin token.
            if self.proxy_token:
                if self.bypass_url:
                    self.set_management_url(self.bypass_url)
                else:
                    self._fetch_endpoints_from_auth(admin_url)
                # Since keystone no longer returns the user token
                # with the endpoints any more, we need to replace
                # our service account token with the user token.
                self.auth_token = self.proxy_token
        else:
            try:
                while auth_url:
                    auth_url = self._v1_auth(auth_url)
            # In some configurations nova makes redirection to
            # v2.0 keystone endpoint. Also, new location does not contain
            # real endpoint, only hostname and port.
            except exceptions.AuthorizationFailure:
                if auth_url.find('v2.0') < 0:
                    auth_url = auth_url + '/v2.0'
                self._v2_auth(auth_url)

        if self.bypass_url:
            self.set_management_url(self.bypass_url)
        elif not self.management_url:
            raise exceptions.Unauthorized('Nova Client')

        self._save_keys()
    def test_published_with_policy_sized_queue_and_rpc_down(self):
        publisher = self.publisher_cls(netutils.urlsplit(
            '%s://?policy=queue&max_queue_length=3' % self.protocol))

        side_effect = oslo.messaging.MessageDeliveryFailure()
        with mock.patch.object(publisher, '_send') as fake_send:
            fake_send.side_effect = side_effect
            for i in range(0, 5):
                for s in self.test_data:
                    s.source = 'test-%d' % i
                publisher.publish_samples(mock.MagicMock(),
                                          self.test_data)

        self.assertEqual(3, len(publisher.local_queue))
        self.assertEqual(
            'test-2',
            publisher.local_queue[0][2][0]['source']
        )
        self.assertEqual(
            'test-3',
            publisher.local_queue[1][2][0]['source']
        )
        self.assertEqual(
            'test-4',
            publisher.local_queue[2][2][0]['source']
        )
Exemple #9
0
    def _handle_action(self, action, alarm_id, previous,
                       current, reason, reason_data):
        try:
            action = netutils.urlsplit(action)
        except Exception:
            LOG.error(
                _("Unable to parse action %(action)s for alarm %(alarm_id)s"),
                {'action': action, 'alarm_id': alarm_id})
            return

        try:
            notifier = self.notifiers[action.scheme].obj
        except KeyError:
            scheme = action.scheme
            LOG.error(
                _("Action %(scheme)s for alarm %(alarm_id)s is unknown, "
                  "cannot notify"),
                {'scheme': scheme, 'alarm_id': alarm_id})
            return

        try:
            LOG.debug(_("Notifying alarm %(id)s with action %(act)s") % (
                      {'id': alarm_id, 'act': action}))
            notifier.notify(action, alarm_id, previous,
                            current, reason, reason_data)
        except Exception:
            LOG.exception(_("Unable to notify alarm %s"), alarm_id)
            return
Exemple #10
0
    def get_samples(self, manager, cache, resources):
        """Return an iterable of Sample instances from polling the resources.

        :param manager: The service manager invoking the plugin
        :param cache: A dictionary for passing data between plugins
        :param resources: end point to poll data from
        """
        resources = resources or []
        h_cache = cache.setdefault(self.CACHE_KEY, {})
        sample_iters = []
        for res in resources:
            parsed_url = netutils.urlsplit(res)
            ins = self._get_inspector(parsed_url)
            try:
                # Call hardware inspector to poll for the data
                i_cache = h_cache.setdefault(res, {})
                if self.IDENTIFIER not in i_cache:
                    i_cache[self.IDENTIFIER] = list(ins.inspect_generic(
                        parsed_url,
                        self.IDENTIFIER,
                        i_cache))
                # Generate samples
                if i_cache[self.IDENTIFIER]:
                    sample_iters.append(self.generate_samples(
                        parsed_url,
                        i_cache[self.IDENTIFIER]))
            except Exception as err:
                LOG.exception(_('inspector call failed for %(ident)s '
                                'host %(host)s: %(err)s'),
                              dict(ident=self.IDENTIFIER,
                                   host=parsed_url.hostname,
                                   err=err))
        return itertools.chain(*sample_iters)
Exemple #11
0
 def setUp(self):
     super(TestSNMPInspector, self).setUp()
     self.inspector = snmp.SNMPInspector()
     self.host = netutils.urlsplit("snmp://localhost")
     self.inspector.MAPPING = self.mapping
     self.useFixture(mockpatch.PatchObject(self.inspector._cmdGen, "getCmd", new=faux_getCmd_new))
     self.useFixture(mockpatch.PatchObject(self.inspector._cmdGen, "bulkCmd", new=faux_bulkCmd_new))
    def test_published_with_per_meter_topic(self):
        publisher = msg_publisher.RPCPublisher(
            netutils.urlsplit('rpc://?per_meter_topic=1'))
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            publisher.publish_samples(mock.MagicMock(),
                                      self.test_data)

            class MeterGroupMatcher(object):
                def __eq__(self, meters):
                    return len(set(meter['counter_name']
                                   for meter in meters)) == 1

            topic = self.CONF.publisher_rpc.metering_topic
            expected = [mock.call(topic=topic),
                        mock.call().cast(mock.ANY, 'record_metering_data',
                                         data=mock.ANY),
                        mock.call(topic=topic + '.test'),
                        mock.call().cast(mock.ANY, 'record_metering_data',
                                         data=MeterGroupMatcher()),
                        mock.call(topic=topic + '.test2'),
                        mock.call().cast(mock.ANY, 'record_metering_data',
                                         data=MeterGroupMatcher()),
                        mock.call(topic=topic + '.test3'),
                        mock.call().cast(mock.ANY, 'record_metering_data',
                                         data=MeterGroupMatcher())]
            self.assertEqual(expected, prepare.mock_calls)
    def test_published_concurrency(self):
        """Test concurrent access to the local queue of the rpc publisher."""

        publisher = self.publisher_cls(
            netutils.urlsplit('%s://' % self.protocol))

        with mock.patch.object(publisher, '_send') as fake_send:
            def fake_send_wait(ctxt, topic, meters):
                fake_send.side_effect = mock.Mock()
                # Sleep to simulate concurrency and allow other threads to work
                eventlet.sleep(0)

            fake_send.side_effect = fake_send_wait

            job1 = eventlet.spawn(publisher.publish_samples,
                                  mock.MagicMock(), self.test_data)
            job2 = eventlet.spawn(publisher.publish_samples,
                                  mock.MagicMock(), self.test_data)

            job1.wait()
            job2.wait()

        self.assertEqual('default', publisher.policy)
        self.assertEqual(2, len(fake_send.mock_calls))
        self.assertEqual(0, len(publisher.local_queue))
    def test_urlsplit_params(self):
        test_url = "http://localhost/?a=b&c=d"
        result = netutils.urlsplit(test_url)
        self.assertEqual({'a': 'b', 'c': 'd'}, result.params())
        self.assertEqual({'a': 'b', 'c': 'd'}, result.params(collapse=False))

        test_url = "http://localhost/?a=b&a=c&a=d"
        result = netutils.urlsplit(test_url)
        self.assertEqual({'a': 'd'}, result.params())
        self.assertEqual({'a': ['b', 'c', 'd']}, result.params(collapse=False))

        test_url = "http://localhost"
        result = netutils.urlsplit(test_url)
        self.assertEqual({}, result.params())

        test_url = "http://localhost?"
        result = netutils.urlsplit(test_url)
        self.assertEqual({}, result.params())
def get_publisher(url, namespace='ceilometer.publisher'):
    """Get publisher driver and load it.

    :param URL: URL for the publisher
    :param namespace: Namespace to use to look for drivers.
    """
    parse_result = netutils.urlsplit(url)
    loaded_driver = driver.DriverManager(namespace, parse_result.scheme)
    return loaded_driver.driver(parse_result)
    def test_file_publisher_invalid(self):
        # Test invalid max bytes, backup count configurations
        tempdir = tempfile.mkdtemp()
        parsed_url = netutils.urlsplit(
            'file://%s/log_file_bad'
            '?max_bytes=yus&backup_count=5y' % tempdir)
        publisher = file.FilePublisher(parsed_url)
        publisher.publish_samples(None,
                                  self.test_data)

        self.assertIsNone(publisher.publisher_logger)
 def test_published_with_policy_drop_and_rpc_down(self):
     publisher = self.publisher_cls(
         netutils.urlsplit('%s://?policy=drop' % self.protocol))
     side_effect = oslo.messaging.MessageDeliveryFailure()
     with mock.patch.object(publisher, '_send') as fake_send:
         fake_send.side_effect = side_effect
         publisher.publish_samples(mock.MagicMock(),
                                   self.test_data)
         self.assertEqual(0, len(publisher.local_queue))
         fake_send.assert_called_once_with(
             mock.ANY, self.CONF.publisher_rpc.metering_topic,
             mock.ANY)
Exemple #18
0
    def _parse_my_resource(resource):

        parse_url = netutils.urlsplit(resource)

        params = urlparse.parse_qs(parse_url.query)
        parts = urlparse.ParseResult(parse_url.scheme,
                                     parse_url.netloc,
                                     parse_url.path,
                                     None,
                                     None,
                                     None)
        return parts, params
    def test_publish_target(self):
        publisher = msg_publisher.RPCPublisher(
            netutils.urlsplit('rpc://?target=custom_procedure_call'))
        cast_context = mock.MagicMock()
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            prepare.return_value = cast_context
            publisher.publish_samples(mock.MagicMock(),
                                      self.test_data)

        prepare.assert_called_once_with(
            topic=self.CONF.publisher_rpc.metering_topic)
        cast_context.cast.assert_called_once_with(
            mock.ANY, 'custom_procedure_call', data=mock.ANY)
Exemple #20
0
def parse_uri(uri):
    """Parses a uri into its components."""
    # Do some basic validation before continuing...
    if not isinstance(uri, six.string_types):
        raise TypeError(
            "Can only parse string types to uri data, " "and not an object of type %s" % reflection.get_class_name(uri)
        )
    match = _SCHEME_REGEX.match(uri)
    if not match:
        raise ValueError("Uri %r does not start with a RFC 3986 compliant" " scheme" % (uri))
    split = netutils.urlsplit(uri)
    return ModifiedSplitResult(
        scheme=split.scheme, fragment=split.fragment, path=split.path, netloc=split.netloc, query=split.query
    )
 def test_published_with_policy_block(self, mylog):
     publisher = self.publisher_cls(
         netutils.urlsplit('%s://?policy=default' % self.protocol))
     side_effect = oslo.messaging.MessageDeliveryFailure()
     with mock.patch.object(publisher, '_send') as fake_send:
         fake_send.side_effect = side_effect
         self.assertRaises(
             oslo.messaging.MessageDeliveryFailure,
             publisher.publish_samples,
             mock.MagicMock(), self.test_data)
         self.assertTrue(mylog.info.called)
         self.assertEqual(0, len(publisher.local_queue))
         fake_send.assert_called_once_with(
             mock.ANY, self.CONF.publisher_rpc.metering_topic,
             mock.ANY)
 def test_published_with_policy_incorrect(self, mylog):
     publisher = self.publisher_cls(
         netutils.urlsplit('%s://?policy=notexist' % self.protocol))
     side_effect = oslo.messaging._drivers.common.RPCException()
     with mock.patch.object(publisher, '_send') as fake_send:
         fake_send.side_effect = side_effect
         self.assertRaises(
             oslo.messaging._drivers.common.RPCException,
             publisher.publish_samples,
             mock.MagicMock(), self.test_data)
         self.assertTrue(mylog.warn.called)
         self.assertEqual('default', publisher.policy)
         self.assertEqual(0, len(publisher.local_queue))
         fake_send.assert_called_once_with(
             mock.ANY, self.CONF.publisher_rpc.metering_topic,
             mock.ANY)
    def test_file_publisher_maxbytes(self):
        # Test valid configurations
        tempdir = tempfile.mkdtemp()
        name = '%s/log_file' % tempdir
        parsed_url = netutils.urlsplit('file://%s?max_bytes=50&backup_count=3'
                                       % name)
        publisher = file.FilePublisher(parsed_url)
        publisher.publish_samples(None,
                                  self.test_data)

        handler = publisher.publisher_logger.handlers[0]
        self.assertIsInstance(handler,
                              logging.handlers.RotatingFileHandler)
        self.assertEqual([50, name, 3], [handler.maxBytes,
                                         handler.baseFilename,
                                         handler.backupCount])
        # The rotating file gets created since only allow 50 bytes.
        self.assertTrue(os.path.exists('%s.1' % name))
Exemple #24
0
    def connect(self, url):
        connection_options = pymongo.uri_parser.parse_uri(url)
        del connection_options['database']
        del connection_options['username']
        del connection_options['password']
        del connection_options['collection']
        pool_key = tuple(connection_options)

        if pool_key in self._pool:
            client = self._pool.get(pool_key)()
            if client:
                return client
        splitted_url = netutils.urlsplit(url)
        log_data = {'db': splitted_url.scheme,
                    'nodelist': connection_options['nodelist']}
        LOG.info(_('Connecting to %(db)s on %(nodelist)s') % log_data)
        client = self._mongo_connect(url)
        self._pool[pool_key] = weakref.ref(client)
        return client
Exemple #25
0
def parse_uri(uri, query_duplicates=False):
    """Parses a uri into its components."""
    # Do some basic validation before continuing...
    if not isinstance(uri, six.string_types):
        raise TypeError("Can only parse string types to uri data, "
                        "and not an object of type %s"
                        % reflection.get_class_name(uri))
    match = _SCHEME_REGEX.match(uri)
    if not match:
        raise ValueError("Uri %r does not start with a RFC 3986 compliant"
                         " scheme" % (uri))
    parsed = netutils.urlsplit(uri)
    if parsed.query:
        query_params = urlparse.parse_qsl(parsed.query)
        if not query_duplicates:
            query_params = dict(query_params)
        else:
            # Retain duplicates in a list for keys which have duplicates, but
            # for items which are not duplicated, just associate the key with
            # the value.
            tmp_query_params = {}
            for (k, v) in query_params:
                if k in tmp_query_params:
                    p_v = tmp_query_params[k]
                    if isinstance(p_v, list):
                        p_v.append(v)
                    else:
                        p_v = [p_v, v]
                        tmp_query_params[k] = p_v
                else:
                    tmp_query_params[k] = v
            query_params = tmp_query_params
    else:
        query_params = {}
    return AttrDict(
        scheme=parsed.scheme,
        username=parsed.username,
        password=parsed.password,
        fragment=parsed.fragment,
        path=parsed.path,
        params=query_params,
        hostname=parsed.hostname,
        port=parsed.port)
Exemple #26
0
    def _parse_connection_url(url):
        """Parse connection parameters from a database url.

        .. note::

          HBase Thrift does not support authentication and there is no
          database name, so we are not looking for these in the url.
        """
        opts = {}
        result = netutils.urlsplit(url)
        opts['table_prefix'] = urlparse.parse_qs(
            result.query).get('table_prefix', [None])[0]
        opts['dbtype'] = result.scheme
        if ':' in result.netloc:
            opts['host'], port = result.netloc.split(':')
        else:
            opts['host'] = result.netloc
            port = 9090
        opts['port'] = port and int(port) or 9090
        return opts
    def test_file_publisher(self):
        # Test missing max bytes, backup count configurations
        tempdir = tempfile.mkdtemp()
        name = '%s/log_file_plain' % tempdir
        parsed_url = netutils.urlsplit('file://%s' % name)
        publisher = file.FilePublisher(parsed_url)
        publisher.publish_samples(None,
                                  self.test_data)

        handler = publisher.publisher_logger.handlers[0]
        self.assertIsInstance(handler,
                              logging.handlers.RotatingFileHandler)
        self.assertEqual([0, name, 0], [handler.maxBytes,
                                        handler.baseFilename,
                                        handler.backupCount])
        # Test the content is corrected saved in the file
        self.assertTrue(os.path.exists(name))
        with open(name, 'r') as f:
            content = f.read()
        for sample_item in self.test_data:
            self.assertTrue(sample_item.id in content)
            self.assertTrue(sample_item.timestamp in content)
    def test_published_with_policy_default_sized_queue_and_rpc_down(self):
        publisher = self.publisher_cls(
            netutils.urlsplit('%s://?policy=queue' % self.protocol))

        side_effect = oslo.messaging._drivers.common.RPCException()
        with mock.patch.object(publisher, '_send') as fake_send:
            fake_send.side_effect = side_effect
            for i in range(0, 2000):
                for s in self.test_data:
                    s.source = 'test-%d' % i
                publisher.publish_samples(mock.MagicMock(),
                                          self.test_data)

        self.assertEqual(1024, len(publisher.local_queue))
        self.assertEqual(
            'test-976',
            publisher.local_queue[0][2][0]['source']
        )
        self.assertEqual(
            'test-1999',
            publisher.local_queue[1023][2][0]['source']
        )
    def test_published_with_policy_queue_and_rpc_down_up(self):
        self.rpc_unreachable = True
        publisher = self.publisher_cls(
            netutils.urlsplit('%s://?policy=queue' % self.protocol))

        side_effect = oslo.messaging.MessageDeliveryFailure()
        with mock.patch.object(publisher, '_send') as fake_send:
            fake_send.side_effect = side_effect
            publisher.publish_samples(mock.MagicMock(),
                                      self.test_data)

            self.assertEqual(1, len(publisher.local_queue))

            fake_send.side_effect = mock.MagicMock()
            publisher.publish_samples(mock.MagicMock(),
                                      self.test_data)

            self.assertEqual(0, len(publisher.local_queue))

            topic = self.CONF.publisher_rpc.metering_topic
            expected = [mock.call(mock.ANY, topic, mock.ANY),
                        mock.call(mock.ANY, topic, mock.ANY),
                        mock.call(mock.ANY, topic, mock.ANY)]
            self.assertEqual(expected, fake_send.mock_calls)
Exemple #30
0
    def test_published_no_mock(self):
        publisher = msg_publisher.RPCPublisher(netutils.urlsplit('rpc://'))

        endpoint = mock.MagicMock(['record_metering_data'])
        collector = messaging.get_rpc_server(
            self.transport, self.CONF.publisher_rpc.metering_topic, endpoint)
        endpoint.record_metering_data.side_effect = (
            lambda *args, **kwds: collector.stop())

        collector.start()
        eventlet.sleep()
        publisher.publish_samples(context.RequestContext(), self.test_data)
        collector.wait()

        class Matcher(object):
            @staticmethod
            def __eq__(data):
                for i, sample_item in enumerate(data):
                    if sample_item['counter_name'] != self.test_data[i].name:
                        return False
                return True

        endpoint.record_metering_data.assert_called_once_with(mock.ANY,
                                                              data=Matcher())
 def test_get_inspector_illegal(self):
     url = netutils.urlsplit("illegal://")
     self.assertRaises(RuntimeError, inspector.get_inspector, url)
Exemple #32
0
 def parse_endpoint(endpoint):
     return netutils.urlsplit(endpoint)
 def test_get_inspector(self):
     url = netutils.urlsplit("snmp://")
     driver = inspector.get_inspector(url)
     self.assertTrue(driver)
Exemple #34
0
 def test_publish_error(self):
     with mock.patch('socket.socket', self._make_broken_socket):
         publisher = udp.UDPPublisher(netutils.urlsplit('udp://localhost'))
     publisher.publish_samples(None, self.test_data)
Exemple #35
0
 def setUp(self):
     super(TestPollsterUtils, self).setUp()
     self.host_url = netutils.urlsplit("snmp://127.0.0.1:161")
 def test_no_host(self):
     result = netutils.urlsplit('http://')
     self.assertEqual('', result.netloc)
     self.assertEqual(None, result.port)
     self.assertEqual(None, result.hostname)
     self.assertEqual('http', result.scheme)
Exemple #37
0
 def __init__(self, url):
     url_split = netutils.urlsplit(url)
     self.conn = es.Elasticsearch(url_split.netloc)
Exemple #38
0
    def authenticate(self):
        if not self.auth_url:
            msg = _("Authentication requires 'auth_url', which should be "
                    "specified in '%s'") % self.__class__.__name__
            raise exceptions.AuthorizationFailure(msg)
        magic_tuple = netutils.urlsplit(self.auth_url)
        scheme, netloc, path, query, frag = magic_tuple
        port = magic_tuple.port
        if port is None:
            port = 80
        path_parts = path.split('/')
        for part in path_parts:
            if len(part) > 0 and part[0] == 'v':
                self.version = part
                break

        if self.auth_token and self.management_url:
            self._save_keys()
            return

        # TODO(sandy): Assume admin endpoint is 35357 for now.
        # Ideally this is going to have to be provided by the service catalog.
        new_netloc = netloc.replace(':%d' % port, ':%d' % (35357, ))
        admin_url = parse.urlunsplit((scheme, new_netloc, path, query, frag))

        auth_url = self.auth_url
        if self.version == "v2.0":  # FIXME(chris): This should be better.
            while auth_url:
                if not self.auth_system or self.auth_system == 'keystone':
                    auth_url = self._v2_auth(auth_url)
                else:
                    auth_url = self._plugin_auth(auth_url)

            # Are we acting on behalf of another user via an
            # existing token? If so, our actual endpoints may
            # be different than that of the admin token.
            if self.proxy_token:
                if self.bypass_url:
                    self.set_management_url(self.bypass_url)
                else:
                    self._fetch_endpoints_from_auth(admin_url)
                # Since keystone no longer returns the user token
                # with the endpoints any more, we need to replace
                # our service account token with the user token.
                self.auth_token = self.proxy_token
        else:
            try:
                while auth_url:
                    auth_url = self._v1_auth(auth_url)
            # In some configurations nova makes redirection to
            # v2.0 keystone endpoint. Also, new location does not contain
            # real endpoint, only hostname and port.
            except exceptions.AuthorizationFailure:
                if auth_url.find('v2.0') < 0:
                    auth_url = auth_url + '/v2.0'
                self._v2_auth(auth_url)

        if self.bypass_url:
            self.set_management_url(self.bypass_url)
        elif not self.management_url:
            raise exceptions.Unauthorized('Nova Client')

        self._save_keys()