Exemplo n.º 1
0
    def get_samples(self, manager, cache, resources):
        user_count = 0
        user_with_orcid_count = 0
        users_by_idp = defaultdict(list)
        active_users = 0
        for user in resources:
            user_count += 1
            if user.orcid:
                user_with_orcid_count += 1
            if user.expiry_status != 'inactive':
                active_users += 1
            for eid in user.external_ids:
                idp = eid.idp
                url = urlsplit(idp)
                if url.netloc:
                    users_by_idp[url.netloc.replace('.', '_')].append(user)
                elif idp in ODD_IDPS:
                    users_by_idp[ODD_IDPS[idp].replace('.', '_')].append(user)
                elif idp == 'idp.fake.nectar.org.au':
                    LOG.debug("Unknown IDP %s" % idp)
                    continue
                else:
                    LOG.warning("Unknown IDP %s" % idp)

        samples = []
        samples.append(sample.Sample(
            name='global.users.total',
            type=sample.TYPE_GAUGE,
            unit='User',
            volume=user_count,
            user_id=None,
            project_id=None,
            resource_id='global-stats')
        )
        samples.append(sample.Sample(
            name='global.users.active',
            type=sample.TYPE_GAUGE,
            unit='User',
            volume=active_users,
            user_id=None,
            project_id=None,
            resource_id='global-stats')
        )
        samples.append(sample.Sample(
            name='global.users.with_orcid',
            type=sample.TYPE_GAUGE,
            unit='User',
            volume=user_with_orcid_count,
            user_id=None,
            project_id=None,
            resource_id='global-stats')
        )

        for idp, users in users_by_idp.items():
            samples.append(sample.Sample(
                name='users.total',
                type=sample.TYPE_GAUGE,
                unit='User',
                volume=len(users),
                user_id=None,
                project_id=None,
                resource_id=idp)
            )

        sample_iters = []
        sample_iters.append(samples)
        return itertools.chain(*sample_iters)
Exemplo n.º 2
0
    def post(self, body):
        """Post a list of new Samples to Ceilometer.

        :param body: a list of samples within the request body.
        """

        # Note:
        #  1) the above validate decorator seems to do nothing.
        #  2) the mandatory options seems to also do nothing.
        #  3) the body should already be in a list of Sample's

        def get_consistent_source():
            '''Find a source that can be applied across the sample group
            or raise InvalidInput if the sources are inconsistent.
            If all are None - use the configured sample_source
            If any sample has source set then the others must be the same
            or None.
            '''
            source = None
            for s in samples:
                if source and s.source:
                    if source != s.source:
                        raise wsme.exc.InvalidInput(
                            'source', s.source, 'can not post Samples %s' %
                            'with different sources')
                if s.source and not source:
                    source = s.source
            return source or pecan.request.cfg.sample_source

        samples = [Sample(**b) for b in body]
        now = timeutils.utcnow()
        auth_project = acl.get_limited_to_project(pecan.request.headers)
        source = get_consistent_source()
        for s in samples:
            if self._id != s.counter_name:
                raise wsme.exc.InvalidInput('counter_name', s.counter_name,
                                            'should be %s' % self._id)

            s.user_id = (s.user_id or pecan.request.headers.get('X-User-Id'))
            s.project_id = (s.project_id
                            or pecan.request.headers.get('X-Project-Id'))
            if auth_project and auth_project != s.project_id:
                # non admin user trying to cross post to another project_id
                auth_msg = 'can not post samples to other projects'
                raise wsme.exc.InvalidInput('project_id', s.project_id,
                                            auth_msg)

            if s.timestamp is None or s.timestamp is wsme.Unset:
                s.timestamp = now
            s.source = '%s:%s' % (s.project_id, source)

        published_samples = []
        for s in samples:
            published_sample = sample.Sample(
                name=s.counter_name,
                type=s.counter_type,
                unit=s.counter_unit,
                volume=s.counter_volume,
                user_id=s.user_id,
                project_id=s.project_id,
                resource_id=s.resource_id,
                timestamp=s.timestamp.isoformat(),
                resource_metadata=s.resource_metadata,
                source=source)
            s.message_id = published_sample.id
            published_samples.append(published_sample)

        with pecan.request.pipeline_manager.publisher(
                context.get_admin_context()) as publisher:
            publisher(published_samples)

        # TODO(asalkeld) this is not ideal, it would be nice if the publisher
        # returned the created sample message with message id (or at least the
        # a list of message_ids).
        return samples
Exemplo n.º 3
0
    def _do_test_rate_of_change_conversion(self, prev, curr, type, expected,
                                           offset=1, weight=None):
        s = "(resource_metadata.user_metadata.autoscaling_weight or 1.0)" \
            "* (resource_metadata.non.existent or 1.0)" \
            "* (100.0 / (10**9 * (resource_metadata.cpu_number or 1)))"
        self.pipeline_cfg[0]['transformers'] = [
            {
                'name': 'rate_of_change',
                'parameters': {
                    'source': {},
                    'target': {'name': 'cpu_util',
                               'unit': '%',
                               'type': sample.TYPE_GAUGE,
                               'scale': s},
                }
            },
        ]
        self.pipeline_cfg[0]['counters'] = ['cpu']
        now = timeutils.utcnow()
        later = now + datetime.timedelta(minutes=offset)
        um = {'autoscaling_weight': weight} if weight else {}
        counters = [
            sample.Sample(
                name='cpu',
                type=type,
                volume=prev,
                unit='ns',
                user_id='test_user',
                project_id='test_proj',
                resource_id='test_resource',
                timestamp=now.isoformat(),
                resource_metadata={'cpu_number': 4,
                                   'user_metadata': um},
            ),
            sample.Sample(
                name='cpu',
                type=type,
                volume=prev,
                unit='ns',
                user_id='test_user',
                project_id='test_proj',
                resource_id='test_resource2',
                timestamp=now.isoformat(),
                resource_metadata={'cpu_number': 2,
                                   'user_metadata': um},
            ),
            sample.Sample(
                name='cpu',
                type=type,
                volume=curr,
                unit='ns',
                user_id='test_user',
                project_id='test_proj',
                resource_id='test_resource',
                timestamp=later.isoformat(),
                resource_metadata={'cpu_number': 4,
                                   'user_metadata': um},
            ),
            sample.Sample(
                name='cpu',
                type=type,
                volume=curr,
                unit='ns',
                user_id='test_user',
                project_id='test_proj',
                resource_id='test_resource2',
                timestamp=later.isoformat(),
                resource_metadata={'cpu_number': 2,
                                   'user_metadata': um},
            ),
        ]

        pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
                                                    self.transformer_manager)
        pipe = pipeline_manager.pipelines[0]

        pipe.publish_samples(None, counters)
        publisher = pipeline_manager.pipelines[0].publishers[0]
        self.assertEqual(len(publisher.samples), 2)
        pipe.flush(None)
        self.assertEqual(len(publisher.samples), 2)
        cpu_util = publisher.samples[0]
        self.assertEqual(getattr(cpu_util, 'name'), 'cpu_util')
        self.assertEqual(getattr(cpu_util, 'resource_id'), 'test_resource')
        self.assertEqual(getattr(cpu_util, 'unit'), '%')
        self.assertEqual(getattr(cpu_util, 'type'), sample.TYPE_GAUGE)
        self.assertEqual(getattr(cpu_util, 'volume'), expected)
        cpu_util = publisher.samples[1]
        self.assertEqual(getattr(cpu_util, 'name'), 'cpu_util')
        self.assertEqual(getattr(cpu_util, 'resource_id'), 'test_resource2')
        self.assertEqual(getattr(cpu_util, 'unit'), '%')
        self.assertEqual(getattr(cpu_util, 'type'), sample.TYPE_GAUGE)
        self.assertEqual(getattr(cpu_util, 'volume'), expected * 2)
Exemplo n.º 4
0
class PublisherWorkflowTest(base.BaseTestCase,
                            testscenarios.TestWithScenarios):

    sample_scenarios = [
        ('disk.root.size',
         dict(sample=sample.Sample(
             resource_id=str(uuid.uuid4()) + "_foobar",
             name='disk.root.size',
             unit='GB',
             type=sample.TYPE_GAUGE,
             volume=2,
             user_id='test_user',
             project_id='test_project',
             source='openstack',
             timestamp='2012-05-08 20:23:48.028195',
             resource_metadata={
                 'host': 'foo',
                 'image_ref': 'imageref!',
                 'instance_flavor_id': 1234,
                 'display_name': 'myinstance',
             },
         ),
              metric_attributes={
                  "archive_policy_name":
                  "ceilometer-low",
                  "unit":
                  "GB",
                  "measures": [{
                      'timestamp': '2012-05-08 20:23:48.028195',
                      'value': 2
                  }]
              },
              postable_attributes={
                  'user_id': 'test_user',
                  'project_id': 'test_project',
              },
              patchable_attributes={
                  'host': 'foo',
                  'image_ref': 'imageref!',
                  'flavor_id': 1234,
                  'display_name': 'myinstance',
              },
              metric_names=[
                  'disk.root.size', 'disk.ephemeral.size', 'memory', 'vcpus',
                  'memory.usage', 'memory.resident', 'memory.swap.in',
                  'memory.swap.out', 'memory.bandwidth.total',
                  'memory.bandwidth.local', 'cpu', 'cpu.delta', 'cpu_util',
                  'vcpus', 'disk.read.requests', 'cpu_l3_cache',
                  'perf.cpu.cycles', 'perf.instructions',
                  'perf.cache.references', 'perf.cache.misses',
                  'disk.read.requests.rate', 'disk.write.requests',
                  'disk.write.requests.rate', 'disk.read.bytes',
                  'disk.read.bytes.rate', 'disk.write.bytes',
                  'disk.write.bytes.rate', 'disk.latency', 'disk.iops',
                  'disk.capacity', 'disk.allocation', 'disk.usage',
                  'compute.instance.booting.time'
              ],
              resource_type='instance')),
        ('hardware.ipmi.node.power',
         dict(sample=sample.Sample(
             resource_id=str(uuid.uuid4()) + "_foobar",
             name='hardware.ipmi.node.power',
             unit='W',
             type=sample.TYPE_GAUGE,
             volume=2,
             user_id='test_user',
             project_id='test_project',
             source='openstack',
             timestamp='2012-05-08 20:23:48.028195',
             resource_metadata={
                 'useless': 'not_used',
             },
         ),
              metric_attributes={
                  "archive_policy_name":
                  "ceilometer-low",
                  "unit":
                  "W",
                  "measures": [{
                      'timestamp': '2012-05-08 20:23:48.028195',
                      'value': 2
                  }]
              },
              postable_attributes={
                  'user_id': 'test_user',
                  'project_id': 'test_project',
              },
              patchable_attributes={},
              metric_names=[
                  'hardware.ipmi.node.power', 'hardware.ipmi.node.temperature',
                  'hardware.ipmi.node.inlet_temperature',
                  'hardware.ipmi.node.outlet_temperature',
                  'hardware.ipmi.node.fan', 'hardware.ipmi.node.current',
                  'hardware.ipmi.node.voltage', 'hardware.ipmi.node.airflow',
                  'hardware.ipmi.node.cups', 'hardware.ipmi.node.cpu_util',
                  'hardware.ipmi.node.mem_util', 'hardware.ipmi.node.io_util'
              ],
              resource_type='ipmi')),
    ]

    default_workflow = dict(resource_exists=True,
                            post_measure_fail=False,
                            create_resource_fail=False,
                            create_resource_race=False,
                            update_resource_fail=False,
                            retry_post_measures_fail=False)
    workflow_scenarios = [
        ('normal_workflow', {}),
        ('new_resource', dict(resource_exists=False)),
        ('new_resource_compat', dict(resource_exists=False)),
        ('new_resource_fail',
         dict(resource_exists=False, create_resource_fail=True)),
        ('new_resource_race',
         dict(resource_exists=False, create_resource_race=True)),
        ('resource_update_fail', dict(update_resource_fail=True)),
        ('retry_fail',
         dict(resource_exists=False, retry_post_measures_fail=True)),
        ('measure_fail', dict(post_measure_fail=True)),
    ]

    @classmethod
    def generate_scenarios(cls):
        workflow_scenarios = []
        for name, wf_change in cls.workflow_scenarios:
            wf = cls.default_workflow.copy()
            wf.update(wf_change)
            workflow_scenarios.append((name, wf))
        cls.scenarios = testscenarios.multiply_scenarios(
            cls.sample_scenarios, workflow_scenarios)

    def setUp(self):
        super(PublisherWorkflowTest, self).setUp()
        conf = ceilometer_service.prepare_service(argv=[], config_files=[])
        self.conf = self.useFixture(config_fixture.Config(conf))
        ks_client = mock.Mock()
        ks_client.projects.find.return_value = mock.Mock(
            name='gnocchi', id='a2d42c23-d518-46b6-96ab-3fba2e146859')
        self.useFixture(
            fixtures.MockPatch('ceilometer.keystone_client.get_client',
                               return_value=ks_client))
        self.ks_client = ks_client

    @mock.patch('gnocchiclient.v1.client.Client')
    def test_event_workflow(self, fakeclient_cls):
        url = netutils.urlsplit("gnocchi://")
        self.publisher = gnocchi.GnocchiPublisher(self.conf.conf, url)

        fakeclient = fakeclient_cls.return_value

        fakeclient.resource.search.side_effect = [
            [{
                "id": "b26268d6-8bb5-11e6-baff-00224d8226cd",
                "type": "instance_disk",
                "instance_id": "9f9d01b9-4a58-4271-9e27-398b21ab20d1"
            }],
            [{
                "id": "b1c7544a-8bb5-11e6-850e-00224d8226cd",
                "type": "instance_network_interface",
                "instance_id": "9f9d01b9-4a58-4271-9e27-398b21ab20d1"
            }],
        ]

        search_params = {
            '=': {
                'instance_id': '9f9d01b9-4a58-4271-9e27-398b21ab20d1'
            }
        }

        now = timeutils.utcnow()
        self.useFixture(utils_fixture.TimeFixture(now))

        expected_calls = [
            mock.call.resource.search('instance_network_interface',
                                      search_params),
            mock.call.resource.search('instance_disk', search_params),
            mock.call.resource.update('instance',
                                      '9f9d01b9-4a58-4271-9e27-398b21ab20d1',
                                      {'ended_at': now.isoformat()}),
            mock.call.resource.update('instance_disk',
                                      'b26268d6-8bb5-11e6-baff-00224d8226cd',
                                      {'ended_at': now.isoformat()}),
            mock.call.resource.update('instance_network_interface',
                                      'b1c7544a-8bb5-11e6-850e-00224d8226cd',
                                      {'ended_at': now.isoformat()}),
            mock.call.resource.update('image',
                                      'dc337359-de70-4044-8e2c-80573ba6e577',
                                      {'ended_at': now.isoformat()}),
            mock.call.resource.update('volume',
                                      '6cc6e7dd-d17d-460f-ae79-7e08a216ce96',
                                      {'ended_at': now.isoformat()}),
            mock.call.resource.update('network',
                                      '705e2c08-08e8-45cb-8673-5c5be955569b',
                                      {'ended_at': now.isoformat()})
        ]

        self.publisher.publish_events([
            INSTANCE_DELETE_START, IMAGE_DELETE_START, VOLUME_DELETE_START,
            FLOATINGIP_DELETE_END
        ])
        self.assertEqual(8, len(fakeclient.mock_calls))
        for call in expected_calls:
            self.assertIn(call, fakeclient.mock_calls)

    @mock.patch('ceilometer.publisher.gnocchi.LOG')
    @mock.patch('gnocchiclient.v1.client.Client')
    def test_workflow(self, fakeclient_cls, logger):

        fakeclient = fakeclient_cls.return_value

        resource_id = self.sample.resource_id.replace("/", "_")
        metric_name = self.sample.name
        gnocchi_id = uuid.uuid4()

        expected_calls = [
            mock.call.archive_policy.get("ceilometer-low"),
            mock.call.archive_policy.get("ceilometer-low-rate"),
            mock.call.metric.batch_resources_metrics_measures(
                {resource_id: {
                    metric_name: self.metric_attributes
                }},
                create_metrics=True)
        ]
        expected_debug = [
            mock.call('filtered project found: %s',
                      'a2d42c23-d518-46b6-96ab-3fba2e146859'),
        ]

        measures_posted = False
        batch_side_effect = []
        if self.post_measure_fail:
            batch_side_effect += [Exception('boom!')]
        elif not self.resource_exists:
            batch_side_effect += [
                gnocchi_exc.BadRequest(
                    400, {
                        "cause":
                        "Unknown resources",
                        'detail': [{
                            'resource_id': gnocchi_id,
                            'original_resource_id': resource_id
                        }]
                    })
            ]

            attributes = self.postable_attributes.copy()
            attributes.update(self.patchable_attributes)
            attributes['id'] = self.sample.resource_id
            attributes['metrics'] = dict(
                (metric_name, {}) for metric_name in self.metric_names)
            for k, v in six.iteritems(attributes['metrics']):
                if k in [
                        "cpu", "disk.read.requests", "disk.write.requests",
                        "disk.read.bytes", "disk.write.bytes"
                ]:
                    v["archive_policy_name"] = "ceilometer-low-rate"
                else:
                    v["archive_policy_name"] = "ceilometer-low"

                if k == 'disk.root.size':
                    v['unit'] = 'GB'
                elif k == 'hardware.ipmi.node.power':
                    v['unit'] = 'W'
            expected_calls.append(
                mock.call.resource.create(self.resource_type, attributes))

            if self.create_resource_fail:
                fakeclient.resource.create.side_effect = [Exception('boom!')]
            elif self.create_resource_race:
                fakeclient.resource.create.side_effect = [
                    gnocchi_exc.ResourceAlreadyExists(409)
                ]
            else:  # not resource_exists
                expected_debug.append(
                    mock.call('Resource %s created', self.sample.resource_id))

            if not self.create_resource_fail:
                expected_calls.append(
                    mock.call.metric.batch_resources_metrics_measures(
                        {resource_id: {
                            metric_name: self.metric_attributes
                        }},
                        create_metrics=True))

                if self.retry_post_measures_fail:
                    batch_side_effect += [Exception('boom!')]
                else:
                    measures_posted = True

        else:
            measures_posted = True

        if measures_posted:
            batch_side_effect += [None]
            expected_debug.append(
                mock.call(
                    "%d measures posted against %d metrics through %d "
                    "resources", len(self.metric_attributes["measures"]), 1,
                    1))

        if self.patchable_attributes:
            expected_calls.append(
                mock.call.resource.update(self.resource_type, resource_id,
                                          self.patchable_attributes))
            if self.update_resource_fail:
                fakeclient.resource.update.side_effect = [Exception('boom!')]
            else:
                expected_debug.append(
                    mock.call('Resource %s updated', self.sample.resource_id))

        batch = fakeclient.metric.batch_resources_metrics_measures
        batch.side_effect = batch_side_effect

        url = netutils.urlsplit("gnocchi://")
        publisher = gnocchi.GnocchiPublisher(self.conf.conf, url)
        publisher.publish_samples([self.sample])

        # Check that the last log message is the expected one
        if (self.post_measure_fail or self.create_resource_fail
                or self.retry_post_measures_fail
                or (self.update_resource_fail and self.patchable_attributes)):
            logger.error.assert_called_with('boom!', exc_info=True)
        else:
            self.assertEqual(0, logger.error.call_count)
        self.assertEqual(expected_calls, fakeclient.mock_calls)
        self.assertEqual(expected_debug, logger.debug.mock_calls)
Exemplo n.º 5
0
class TestPrometheusPublisher(base.BaseTestCase):

    resource_id = str(uuid.uuid4())

    sample_data = [
        sample.Sample(
            name='alpha',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id=resource_id,
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='beta',
            type=sample.TYPE_DELTA,
            unit='',
            volume=3,
            user_id='test',
            project_id='test',
            resource_id=resource_id,
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='gamma',
            type=sample.TYPE_GAUGE,
            unit='',
            volume=5,
            user_id='test',
            project_id='test',
            resource_id=resource_id,
            timestamp=datetime.datetime.now().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
    ]

    def setUp(self):
        super(TestPrometheusPublisher, self).setUp()
        self.CONF = service.prepare_service([], [])

    def test_post_samples(self):
        """Test publisher post."""
        parsed_url = urlparse.urlparse(
            'prometheus://localhost:90/metrics/job/os')
        publisher = prometheus.PrometheusPublisher(self.CONF, parsed_url)

        res = requests.Response()
        res.status_code = 200
        with mock.patch.object(requests.Session, 'post',
                               return_value=res) as m_req:
            publisher.publish_samples(self.sample_data)

        data = """# TYPE alpha counter
alpha{resource_id="%s"} 1
beta{resource_id="%s"} 3
# TYPE gamma gauge
gamma{resource_id="%s"} 5
""" % (self.resource_id, self.resource_id, self.resource_id)

        expected = [
            mock.call('http://localhost:90/metrics/job/os',
                      auth=None,
                      cert=None,
                      data=data,
                      headers={'Content-type': 'plain/text'},
                      timeout=5,
                      verify=True)
        ]
        self.assertEqual(expected, m_req.mock_calls)

    def test_post_samples_ssl(self):
        """Test publisher post."""
        parsed_url = urlparse.urlparse(
            'prometheus://localhost:90/metrics/job/os?ssl=1')
        publisher = prometheus.PrometheusPublisher(self.CONF, parsed_url)

        res = requests.Response()
        res.status_code = 200
        with mock.patch.object(requests.Session, 'post',
                               return_value=res) as m_req:
            publisher.publish_samples(self.sample_data)

        data = """# TYPE alpha counter
alpha{resource_id="%s"} 1
beta{resource_id="%s"} 3
# TYPE gamma gauge
gamma{resource_id="%s"} 5
""" % (self.resource_id, self.resource_id, self.resource_id)

        expected = [
            mock.call('https://localhost:90/metrics/job/os',
                      auth=None,
                      cert=None,
                      data=data,
                      headers={'Content-type': 'plain/text'},
                      timeout=5,
                      verify=True)
        ]
        self.assertEqual(expected, m_req.mock_calls)
Exemplo n.º 6
0
def main():
    cfg.CONF([], project='ceilometer')

    parser = argparse.ArgumentParser(description='generate metering data', )
    parser.add_argument(
        '--interval',
        default=10,
        type=int,
        help='the period between events, in minutes',
    )
    parser.add_argument(
        '--start',
        default=31,
        help='the number of days in the past to start timestamps',
    )
    parser.add_argument(
        '--end',
        default=2,
        help='the number of days into the future to continue timestamps',
    )
    parser.add_argument(
        '--type',
        choices=('gauge', 'cumulative'),
        default='gauge',
        help='counter type',
    )
    parser.add_argument(
        '--unit',
        default=None,
        help='counter unit',
    )
    parser.add_argument(
        '--project',
        help='project id of owner',
    )
    parser.add_argument(
        '--user',
        help='user id of owner',
    )
    parser.add_argument(
        'resource',
        help='the resource id for the meter data',
    )
    parser.add_argument(
        'counter',
        help='the counter name for the meter data',
    )
    parser.add_argument(
        'volume',
        help='the amount to attach to the meter',
        type=int,
        default=1,
    )
    args = parser.parse_args()

    # Set up logging to use the console
    console = logging.StreamHandler(sys.stderr)
    console.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(message)s')
    console.setFormatter(formatter)
    root_logger = logging.getLogger('')
    root_logger.addHandler(console)
    root_logger.setLevel(logging.DEBUG)

    # Connect to the metering database
    conn = storage.get_connection(cfg.CONF)

    # Find the user and/or project for a real resource
    if not (args.user or args.project):
        for r in conn.get_resources():
            if r['resource_id'] == args.resource:
                args.user = r['user_id']
                args.project = r['project_id']
                break

    # Compute start and end timestamps for the
    # new data.
    timestamp = timeutils.parse_isotime(args.start)
    end = timeutils.parse_isotime(args.end)
    increment = datetime.timedelta(minutes=args.interval)

    # Generate events
    n = 0
    while timestamp <= end:
        c = sample.Sample(
            name=args.counter,
            type=args.type,
            unit=args.unit,
            volume=args.volume,
            user_id=args.user,
            project_id=args.project,
            resource_id=args.resource,
            timestamp=timestamp,
            resource_metadata={},
            source='artificial',
        )
        data = rpc.meter_message_from_counter(
            c, cfg.CONF.publisher_rpc.metering_secret)
        conn.record_metering_data(data)
        n += 1
        timestamp = timestamp + increment

    print('Added %d new events' % n)

    return 0
Exemplo n.º 7
0
class TestZaqarPublisher(base.BaseTestCase):

    resource_id = str(uuid.uuid4())

    sample_data = [
        sample.Sample(
            name='alpha',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id=resource_id,
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='beta',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id=resource_id,
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='gamma',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id=resource_id,
            timestamp=datetime.datetime.now().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
    ]

    event_data = [
        event.Event(message_id=str(uuid.uuid4()),
                    event_type='event_%d' % i,
                    generated=datetime.datetime.utcnow().isoformat(),
                    traits=[],
                    raw={'payload': {
                        'some': 'aa'
                    }}) for i in range(3)
    ]

    def setUp(self):
        super(TestZaqarPublisher, self).setUp()
        self.CONF = service.prepare_service([], [])

    def test_zaqar_publisher_config(self):
        """Test publisher config parameters."""
        parsed_url = urlparse.urlparse('zaqar://')
        self.assertRaises(ValueError, zaqar.ZaqarPublisher, self.CONF,
                          parsed_url)

        parsed_url = urlparse.urlparse('zaqar://?queue=foo&ttl=bar')
        self.assertRaises(ValueError, zaqar.ZaqarPublisher, self.CONF,
                          parsed_url)

        parsed_url = urlparse.urlparse('zaqar://?queue=foo&ttl=60')
        publisher = zaqar.ZaqarPublisher(self.CONF, parsed_url)
        self.assertEqual(60, publisher.ttl)

        parsed_url = urlparse.urlparse('zaqar://?queue=foo')
        publisher = zaqar.ZaqarPublisher(self.CONF, parsed_url)
        self.assertEqual(3600, publisher.ttl)
        self.assertEqual('foo', publisher.queue_name)

    @mock.patch('zaqarclient.queues.v2.queues.Queue')
    def test_zaqar_post_samples(self, mock_queue):
        """Test publisher post."""
        parsed_url = urlparse.urlparse('zaqar://?queue=foo')
        publisher = zaqar.ZaqarPublisher(self.CONF, parsed_url)
        mock_post = mock.Mock()
        mock_queue.return_value = mock_post

        publisher.publish_samples(self.sample_data)

        mock_queue.assert_called_once_with(mock.ANY, 'foo')
        self.assertEqual(3, len(mock_post.post.call_args_list[0][0][0]))
        self.assertEqual(mock_post.post.call_args_list[0][0][0][0]['body'],
                         self.sample_data[0].as_dict())

    @mock.patch('zaqarclient.queues.v2.queues.Queue')
    def test_zaqar_post_events(self, mock_queue):
        """Test publisher post."""
        parsed_url = urlparse.urlparse('zaqar://?queue=foo')
        publisher = zaqar.ZaqarPublisher(self.CONF, parsed_url)
        mock_post = mock.Mock()
        mock_queue.return_value = mock_post

        publisher.publish_events(self.event_data)

        mock_queue.assert_called_once_with(mock.ANY, 'foo')
        self.assertEqual(3, len(mock_post.post.call_args_list[0][0][0]))
        self.assertEqual(mock_post.post.call_args_list[0][0][0][0]['body'],
                         self.event_data[0].serialize())
Exemplo n.º 8
0
class TestUDPPublisher(base.BaseTestCase):
    test_data = [
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test3',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
    ]

    @staticmethod
    def _make_fake_socket(published):
        def _fake_socket_socket(family, type):
            def record_data(msg, dest):
                published.append((msg, dest))

            udp_socket = mock.Mock()
            udp_socket.sendto = record_data
            return udp_socket

        return _fake_socket_socket

    def setUp(self):
        super(TestUDPPublisher, self).setUp()
        self.CONF = self.useFixture(fixture_config.Config()).conf
        self.CONF.publisher.telemetry_secret = 'not-so-secret'

    def _check_udp_socket(self, url, expected_addr_family):
        with mock.patch.object(socket, 'socket') as mock_socket:
            udp.UDPPublisher(self.CONF, netutils.urlsplit(url))
            mock_socket.assert_called_with(expected_addr_family,
                                           socket.SOCK_DGRAM)

    def test_publisher_udp_socket_ipv4(self):
        self._check_udp_socket('udp://127.0.0.1:4952',
                               socket.AF_INET)

    def test_publisher_udp_socket_ipv6(self):
        self._check_udp_socket('udp://[::1]:4952',
                               socket.AF_INET6)

    def test_publisher_udp_socket_ipv4_hostname(self):
        host = "ipv4.google.com"
        try:
            socket.getaddrinfo(host, None,
                               socket.AF_INET,
                               socket.SOCK_DGRAM)
        except socket.gaierror:
            self.skipTest("cannot resolve not running test")
        url = "udp://"+host+":4952"
        self._check_udp_socket(url, socket.AF_INET)

    def test_publisher_udp_socket_ipv6_hostname(self):
        host = "ipv6.google.com"
        try:
            socket.getaddrinfo(host, None,
                               socket.AF_INET6,
                               socket.SOCK_DGRAM)
        except socket.gaierror:
            self.skipTest("cannot resolve not running test")
        url = "udp://"+host+":4952"
        self._check_udp_socket(url, socket.AF_INET6)

    def test_published(self):
        self.data_sent = []
        with mock.patch('socket.socket',
                        self._make_fake_socket(self.data_sent)):
            publisher = udp.UDPPublisher(
                self.CONF,
                netutils.urlsplit('udp://somehost'))
        publisher.publish_samples(self.test_data)

        self.assertEqual(5, len(self.data_sent))

        sent_counters = []

        for data, dest in self.data_sent:
            counter = msgpack.loads(data, encoding="utf-8")
            sent_counters.append(counter)

            # Check destination
            self.assertEqual(('somehost',
                              self.CONF.collector.udp_port), dest)

        # Check that counters are equal
        def sort_func(counter):
            return counter['counter_name']

        counters = [utils.meter_message_from_counter(d, "not-so-secret")
                    for d in self.test_data]
        counters.sort(key=sort_func)
        sent_counters.sort(key=sort_func)
        self.assertEqual(counters, sent_counters)

    @staticmethod
    def _raise_ioerror(*args):
        raise IOError

    def _make_broken_socket(self, family, type):
        udp_socket = mock.Mock()
        udp_socket.sendto = self._raise_ioerror
        return udp_socket

    def test_publish_error(self):
        with mock.patch('socket.socket',
                        self._make_broken_socket):
            publisher = udp.UDPPublisher(
                self.CONF,
                netutils.urlsplit('udp://localhost'))
        publisher.publish_samples(self.test_data)
class TestMonascaPublisher(base.BaseTestCase):

    test_data = [
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
    ]

    field_mappings = {
        'dimensions': [
            'resource_id', 'project_id', 'user_id', 'geolocation', 'region',
            'availability_zone'
        ],
        'metadata': {
            'common':
            ['event_type', 'audit_period_beginning', 'audit_period_ending'],
            'image': ['size', 'status'],
            'image.delete': ['size', 'status'],
            'image.size': ['size', 'status'],
            'image.update': ['size', 'status'],
            'image.upload': ['size', 'status'],
            'instance': ['state', 'state_description'],
            'snapshot': ['status'],
            'snapshot.size': ['status'],
            'volume': ['status'],
            'volume.size': ['status'],
        }
    }

    @staticmethod
    def create_side_effect(exception_type, test_exception):
        def side_effect(*args, **kwargs):
            if test_exception.pop():
                raise exception_type
            else:
                return FakeResponse(204)

        return side_effect

    def setUp(self):
        super(TestMonascaPublisher, self).setUp()

        self.CONF = service.prepare_service([], [])
        self.parsed_url = mock.MagicMock()

    def tearDown(self):
        # For some reason, cfg.CONF is registered a required option named
        # auth_url after these tests run, which occasionally blocks test
        # case test_event_pipeline_endpoint_requeue_on_failure, so we
        # unregister it here.
        self.CONF.reset()
        # self.CONF.unregister_opt(cfg.StrOpt('service_auth_url'),
        #                          group='monasca')
        super(TestMonascaPublisher, self).tearDown()

    @mock.patch("ceilometer.publisher.monasca_data_filter."
                "MonascaDataFilter._get_mapping",
                side_effect=[field_mappings])
    def test_publisher_publish(self, mapping_patch):
        self.CONF.set_override('batch_mode', False, group='monasca')
        publisher = mon_publisher.MonascaPublisher(self.CONF, self.parsed_url)
        publisher.mon_client = mock.MagicMock()

        with mock.patch.object(publisher.mon_client,
                               'metrics_create') as mock_create:
            mock_create.return_value = FakeResponse(204)
            publisher.publish_samples(self.test_data)
            self.assertEqual(3, mock_create.call_count)
            self.assertEqual(1, mapping_patch.called)

    @mock.patch("ceilometer.publisher.monasca_data_filter."
                "MonascaDataFilter._get_mapping",
                side_effect=[field_mappings])
    def test_publisher_batch(self, mapping_patch):
        self.CONF.set_override('batch_mode', True, group='monasca')
        self.CONF.set_override('batch_count', 3, group='monasca')
        self.CONF.set_override('batch_polling_interval', 1, group='monasca')

        publisher = mon_publisher.MonascaPublisher(self.CONF, self.parsed_url)
        publisher.mon_client = mock.MagicMock()
        with mock.patch.object(publisher.mon_client,
                               'metrics_create') as mock_create:
            mock_create.return_value = FakeResponse(204)
            publisher.publish_samples(self.test_data)
            time.sleep(10)
            self.assertEqual(1, mock_create.call_count)
            self.assertEqual(1, mapping_patch.called)

    @mock.patch("ceilometer.publisher.monasca_data_filter."
                "MonascaDataFilter._get_mapping",
                side_effect=[field_mappings])
    def test_publisher_batch_retry(self, mapping_patch):
        self.CONF.set_override('batch_mode', True, group='monasca')
        self.CONF.set_override('batch_count', 3, group='monasca')
        self.CONF.set_override('batch_polling_interval', 1, group='monasca')
        self.CONF.set_override('retry_on_failure', True, group='monasca')
        # Constant in code for @periodicals, can't override
        # self.CONF.set_override('retry_interval', 2, group='monasca')
        self.CONF.set_override('batch_max_retries', 1, group='monasca')

        publisher = mon_publisher.MonascaPublisher(self.CONF, self.parsed_url)
        publisher.mon_client = mock.MagicMock()
        with mock.patch.object(publisher.mon_client,
                               'metrics_create') as mock_create:
            raise_http_error = [False, False, False, True]
            mock_create.side_effect = self.create_side_effect(
                mon_client.MonascaServiceException, raise_http_error)
            publisher.publish_samples(self.test_data)
            time.sleep(60)
            self.assertEqual(4, mock_create.call_count)
            self.assertEqual(1, mapping_patch.called)

    @mock.patch("ceilometer.publisher.monasca_data_filter."
                "MonascaDataFilter._get_mapping",
                side_effect=[field_mappings])
    def test_publisher_archival_on_failure(self, mapping_patch):
        self.CONF.set_override('archive_on_failure', True, group='monasca')
        self.CONF.set_override('batch_mode', False, group='monasca')
        self.fake_publisher = mock.Mock()

        self.useFixture(
            fixtures.MockPatch('ceilometer.publisher.file.FilePublisher',
                               return_value=self.fake_publisher))

        publisher = mon_publisher.MonascaPublisher(self.CONF, self.parsed_url)
        publisher.mon_client = mock.MagicMock()

        with mock.patch.object(publisher.mon_client,
                               'metrics_create') as mock_create:
            mock_create.side_effect = Exception
            metrics_archiver = self.fake_publisher.publish_samples
            publisher.publish_samples(self.test_data)
            self.assertEqual(1, metrics_archiver.called)
            self.assertEqual(3, metrics_archiver.call_count)
Exemplo n.º 10
0
    def get_samples(self, manager, cache, resources):
        # TODO: global conf
        sess = get_session(conf2)
        neutron = clientN.Client(session=sess)

        # initialize some variables:
        pool_size = 0
        alloc_ip = 0
        used_ip = 0
        subNet = list()
        subNetId = list()
        regionArray = list()
        try:
            nL = neutron.list_networks()
        except Exception as e:
            LOG.error(e)
            raise e

        netlist = cfg.CONF.region.netlist if cfg.CONF.region.netlist else list(
        )
        LOG.debug("Netlist %s", netlist)

        # compute the size of the pool
        if nL and "networks" in nL:
            for nLi in nL["networks"]:
                if ("id" in nLi) and ("name"
                                      in nLi) and nLi["name"] in netlist and (
                                          "subnets" in nLi):
                    for sNi in nLi['subnets']:
                        sN = neutron.show_subnet(sNi)
                        if ("subnet" in sN) and ("cidr" in sN["subnet"]) and (
                                "allocation_pools" in sN["subnet"]):
                            subNetId.append(sN['subnet']['id'])
                            if sN["subnet"]["allocation_pools"] and len(
                                    sN["subnet"]["allocation_pools"]) > 0:
                                for pool in sN["subnet"]["allocation_pools"]:
                                    subNet.append(
                                        IPRange(pool["start"], pool["end"]))
                                    pool_size += IPRange(
                                        pool["start"], pool["end"]).size

        # compute the IP usage
        netF = neutron.list_floatingips()
        if netF and "floatingips" in netF:
            for netFi in netF["floatingips"]:
                for tmp_pool in subNet:
                    if "floating_ip_address" in netFi and IPAddress(
                            netFi["floating_ip_address"]) in tmp_pool:
                        alloc_ip += 1
                        if "fixed_ip_address" in netFi and netFi[
                                "fixed_ip_address"]:
                            used_ip += 1
                            break
                        break

        # check if some routers are using IPs
        r_L = neutron.list_routers()
        if "routers" in r_L:
            for r_li in r_L["routers"]:
                if "external_gateway_info" in r_li \
                        and r_li["external_gateway_info"] \
                        and "external_fixed_ips" in r_li["external_gateway_info"] \
                        and len(r_li["external_gateway_info"]["external_fixed_ips"]) > 0:
                    for tmp_r_id in r_li["external_gateway_info"][
                            "external_fixed_ips"]:
                        if "subnet_id" in tmp_r_id and tmp_r_id[
                                "subnet_id"] in subNetId:
                            alloc_ip += 1
                            used_ip += 1

        # create region Object
        # build metadata
        metaD["name"] = (cfg.CONF.service_credentials.region_name
                         if cfg.CONF.service_credentials.region_name else None)
        metaD["latitude"] = (cfg.CONF.region.latitude
                             if cfg.CONF.region.latitude else 0.0)
        metaD["longitude"] = (cfg.CONF.region.longitude
                              if cfg.CONF.region.longitude else 0.0)
        metaD["location"] = (cfg.CONF.region.location
                             if cfg.CONF.region.location else None)
        metaD["ram_allocation_ratio"] = (
            cfg.CONF.region.ram_allocation_ratio
            if cfg.CONF.region.ram_allocation_ratio else None)
        metaD["cpu_allocation_ratio"] = (
            cfg.CONF.region.cpu_allocation_ratio
            if cfg.CONF.region.cpu_allocation_ratio else None)

        # store components versions as metadata
        manager = OSVersionComponent.OpenStackComponentVersionManager()
        c_versions = manager.get_all_components_version()
        for c in c_versions:
            if c['isInstalled']:
                name = c['component'] + "_version"
                version = c['attributes']['version']
                metaD[name] = version
            else:
                LOG.debug("Component not intalled: %s " % (c))

        LOG.debug("Publish region metadata: %s " % (metaD))

        # build samples
        regionArray.append({
            'name': 'region.pool_ip',
            'unit': '#',
            'value': (pool_size if pool_size else 0)
        })
        regionArray.append({
            'name': 'region.allocated_ip',
            'unit': '#',
            'value': (alloc_ip if alloc_ip else 0)
        })
        regionArray.append({
            'name': 'region.used_ip',
            'unit': '#',
            'value': (used_ip if used_ip else 0)
        })

        # loop over the region Object
        for regionInfo in regionArray:
            my_sample = sample.Sample(
                name=regionInfo['name'],
                type="gauge",
                unit=regionInfo['unit'],
                volume=regionInfo['value'],
                user_id=None,
                project_id=None,
                resource_id=cfg.CONF.service_credentials.region_name,
                timestamp=timeutils.isotime(),
                resource_metadata=metaD)
            LOG.debug("Publish sample: %s" % (my_sample))
            yield my_sample
Exemplo n.º 11
0
class TestFilePublisher(test.BaseTestCase):

    test_data = [
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
    ]

    def test_file_publisher_maxbytes(self):
        # Test valid configurations
        tempdir = tempfile.mkdtemp()
        name = '%s/log_file' % tempdir
        parsed_url = utils.urlsplit('file://%s?max_bytes=50&backup_count=3' %
                                    name)
        publisher = file.FilePublisher(parsed_url)
        publisher.publish_samples(None, self.test_data)

        handler = publisher.publisher_logger.handlers[0]
        self.assertIsInstance(handler, logging.handlers.RotatingFileHandler)
        self.assertEqual(
            [50, name, 3],
            [handler.maxBytes, handler.baseFilename, handler.backupCount])
        # The rotating file gets created since only allow 50 bytes.
        self.assertTrue(os.path.exists('%s.1' % name))

    def test_file_publisher(self):
        # Test missing max bytes, backup count configurations
        tempdir = tempfile.mkdtemp()
        name = '%s/log_file_plain' % tempdir
        parsed_url = utils.urlsplit('file://%s' % name)
        publisher = file.FilePublisher(parsed_url)
        publisher.publish_samples(None, self.test_data)

        handler = publisher.publisher_logger.handlers[0]
        self.assertIsInstance(handler, logging.handlers.RotatingFileHandler)
        self.assertEqual(
            [0, name, 0],
            [handler.maxBytes, handler.baseFilename, handler.backupCount])
        # Test the content is corrected saved in the file
        self.assertTrue(os.path.exists(name))
        with open(name, 'r') as f:
            content = f.read()
        for sample in self.test_data:
            self.assertTrue(sample.id in content)
            self.assertTrue(sample.timestamp in content)

    def test_file_publisher_invalid(self):
        # Test invalid max bytes, backup count configurations
        tempdir = tempfile.mkdtemp()
        parsed_url = utils.urlsplit('file://%s/log_file_bad'
                                    '?max_bytes=yus&backup_count=5y' % tempdir)
        publisher = file.FilePublisher(parsed_url)
        publisher.publish_samples(None, self.test_data)

        self.assertIsNone(publisher.publisher_logger)
    def setUp(self):
        super(TestListResourcesBase, self).setUp()

        for cnt in [
                sample.Sample(
                    'instance',
                    'cumulative',
                    '',
                    1,
                    'user-id',
                    'project-id',
                    'resource-id',
                    timestamp=datetime.datetime(2012, 7, 2, 10, 40),
                    resource_metadata={
                        'display_name': 'test-server',
                        'tag': 'self.counter'
                    },
                    source='test_list_resources',
                ),
                sample.Sample(
                    'instance',
                    'cumulative',
                    '',
                    1,
                    'user-id',
                    'project-id',
                    'resource-id-alternate',
                    timestamp=datetime.datetime(2012, 7, 2, 10, 41),
                    resource_metadata={
                        'display_name': 'test-server',
                        'tag': 'self.counter2'
                    },
                    source='test_list_resources',
                ),
                sample.Sample(
                    'instance',
                    'cumulative',
                    '',
                    1,
                    'user-id2',
                    'project-id2',
                    'resource-id2',
                    timestamp=datetime.datetime(2012, 7, 2, 10, 42),
                    resource_metadata={
                        'display_name': 'test-server',
                        'tag': 'self.counter3'
                    },
                    source='test_list_resources',
                ),
                sample.Sample(
                    'instance',
                    'cumulative',
                    '',
                    1,
                    'user-id',
                    'project-id',
                    'resource-id',
                    timestamp=datetime.datetime(2012, 7, 2, 10, 43),
                    resource_metadata={
                        'display_name': 'test-server',
                        'tag': 'self.counter4'
                    },
                    source='test_list_resources',
                )
        ]:
            msg = rpc.meter_message_from_counter(
                cnt, cfg.CONF.publisher_rpc.metering_secret)
            self.conn.record_metering_data(msg)
Exemplo n.º 13
0
def make_test_data(
    conn,
    name,
    meter_type,
    unit,
    volume,
    random_min,
    random_max,
    user_id,
    project_id,
    resource_id,
    start,
    end,
    interval,
    resource_metadata={},
    source='artificial',
):

    # Compute start and end timestamps for the new data.
    if isinstance(start, datetime.datetime):
        timestamp = start
    else:
        timestamp = timeutils.parse_strtime(start)

    if not isinstance(end, datetime.datetime):
        end = timeutils.parse_strtime(end)

    increment = datetime.timedelta(minutes=interval)

    print('Adding new events for meter %s.' % (name))
    # Generate events
    n = 0
    total_volume = volume
    meter_names = ["meter" + name + str(i) for i in range(1, 50, 1)]
    resource_ids = [
        "resource" + resource_id + str(i) for i in range(1, 500, 1)
    ]

    id = threading.current_thread().ident

    print("id, curr_sampl_count, avg, s")

    t0 = time.time()
    while timestamp <= end:
        if (random_min >= 0 and random_max >= 0):
            # If there is a random element defined, we will add it to
            # user given volume.
            if isinstance(random_min, int) and isinstance(random_max, int):
                total_volume += random.randint(random_min, random_max)
            else:
                total_volume += random.uniform(random_min, random_max)

        c = sample.Sample(
            name=random.choice(meter_names),
            type=meter_type,
            unit=unit,
            volume=total_volume,
            user_id=user_id,
            project_id=project_id,
            resource_id=random.choice(resource_ids),
            timestamp=timestamp,
            resource_metadata=resource_metadata,
            source=source,
        )
        data = utils.meter_message_from_counter(
            c, cfg.CONF.publisher.metering_secret)
        conn.record_metering_data(data)
        n += 1
        timestamp = timestamp + increment
        t1 = time.time()
        if not n % 1000:
            print("%d, %d, %f, %f" % (id, get_current_sample_count(conn),
                                      (n / (t1 - t0)), t1))

        if (meter_type == 'gauge' or meter_type == 'delta'):
            # For delta and gauge, we don't want to increase the value
            # in time by random element. So we always set it back to
            # volume.
            total_volume = volume

    t1 = time.time()
    totaltime = t1 - t0
    print("%d, %d, %f, %f" % (id, get_current_sample_count(conn),
                              (n / (t1 - t0)), t1))

    print(
        'Id %d Added %d samples total time %f sec avg: %f samples/sec ts: %f' %
        (id, n, totaltime, (n / totaltime), t1))
Exemplo n.º 14
0
    def post(self, direct='', samples=None):
        """Post a list of new Samples to Telemetry.

        :param direct: a flag indicates whether the samples will be posted
                       directly to storage or not.
        :param samples: a list of samples within the request body.
        """
        rbac.enforce('create_samples', pecan.request)

        direct = strutils.bool_from_string(direct)
        if not samples:
            msg = _('Samples should be included in request body')
            raise base.ClientSideError(msg)

        now = timeutils.utcnow()
        auth_project = rbac.get_limited_to_project(pecan.request.headers)
        def_source = pecan.request.cfg.sample_source
        def_project_id = pecan.request.headers.get('X-Project-Id')
        def_user_id = pecan.request.headers.get('X-User-Id')

        published_samples = []
        for s in samples:
            if self.meter_name != s.counter_name:
                raise wsme.exc.InvalidInput('counter_name', s.counter_name,
                                            'should be %s' % self.meter_name)

            if s.message_id:
                raise wsme.exc.InvalidInput('message_id', s.message_id,
                                            'The message_id must not be set')

            if s.counter_type not in sample.TYPES:
                raise wsme.exc.InvalidInput(
                    'counter_type', s.counter_type,
                    'The counter type must be: ' + ', '.join(sample.TYPES))

            s.user_id = (s.user_id or def_user_id)
            s.project_id = (s.project_id or def_project_id)
            s.source = '%s:%s' % (s.project_id, (s.source or def_source))
            s.timestamp = (s.timestamp or now)

            if auth_project and auth_project != s.project_id:
                # non admin user trying to cross post to another project_id
                auth_msg = 'can not post samples to other projects'
                raise wsme.exc.InvalidInput('project_id', s.project_id,
                                            auth_msg)

            published_sample = sample.Sample(
                name=s.counter_name,
                type=s.counter_type,
                unit=s.counter_unit,
                volume=s.counter_volume,
                user_id=s.user_id,
                project_id=s.project_id,
                resource_id=s.resource_id,
                timestamp=s.timestamp.isoformat(),
                resource_metadata=utils.restore_nesting(s.resource_metadata,
                                                        separator='.'),
                source=s.source)
            s.message_id = published_sample.id

            sample_dict = publisher_utils.meter_message_from_counter(
                published_sample, pecan.request.cfg.publisher.telemetry_secret)
            if direct:
                ts = timeutils.parse_isotime(sample_dict['timestamp'])
                sample_dict['timestamp'] = timeutils.normalize_time(ts)
                pecan.request.storage_conn.record_metering_data(sample_dict)
            else:
                published_samples.append(sample_dict)
        if not direct:
            pecan.request.notifier.sample(
                {
                    'user': def_user_id,
                    'tenant': def_project_id,
                    'is_admin': True
                }, 'telemetry.api', {'samples': published_samples})

        return samples
Exemplo n.º 15
0
class TestUDPPublisher(base.BaseTestCase):
    test_data = [
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test3',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
    ]

    @staticmethod
    def _make_fake_socket(published):
        def _fake_socket_socket(family, type):
            def record_data(msg, dest):
                published.append((msg, dest))

            udp_socket = mock.Mock()
            udp_socket.sendto = record_data
            return udp_socket

        return _fake_socket_socket

    def setUp(self):
        super(TestUDPPublisher, self).setUp()
        self.CONF = service.prepare_service([], [])
        self.CONF.publisher.telemetry_secret = 'not-so-secret'

    def test_published(self):
        self.data_sent = []
        with mock.patch('socket.socket',
                        self._make_fake_socket(self.data_sent)):
            publisher = udp.UDPPublisher(self.CONF,
                                         netutils.urlsplit('udp://somehost'))
        publisher.publish_samples(self.test_data)

        self.assertEqual(5, len(self.data_sent))

        sent_counters = []

        for data, dest in self.data_sent:
            counter = msgpack.loads(data, encoding="utf-8")
            sent_counters.append(counter)

            # Check destination
            self.assertEqual(('somehost', 4952), dest)

        # Check that counters are equal
        def sort_func(counter):
            return counter['counter_name']

        counters = [
            utils.meter_message_from_counter(d, "not-so-secret")
            for d in self.test_data
        ]
        counters.sort(key=sort_func)
        sent_counters.sort(key=sort_func)
        self.assertEqual(counters, sent_counters)

    @staticmethod
    def _raise_ioerror(*args):
        raise IOError

    def _make_broken_socket(self, family, type):
        udp_socket = mock.Mock()
        udp_socket.sendto = self._raise_ioerror
        return udp_socket

    def test_publish_error(self):
        with mock.patch('socket.socket', self._make_broken_socket):
            publisher = udp.UDPPublisher(self.CONF,
                                         netutils.urlsplit('udp://localhost'))
        publisher.publish_samples(self.test_data)
class TestKafkaPublisher(tests_base.BaseTestCase):
    test_event_data = [
        event.Event(message_id=uuid.uuid4(),
                    event_type='event_%d' % i,
                    generated=datetime.datetime.utcnow(),
                    traits=[],
                    raw={}) for i in range(0, 5)
    ]

    test_data = [
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test3',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
    ]

    def setUp(self):
        super(TestKafkaPublisher, self).setUp()

    @mock.patch.object(KafkaBrokerPublisher, '_get_client')
    def test_publish(self, mock_method):
        publisher = KafkaBrokerPublisher(
            netutils.urlsplit('kafka://127.0.0.1:9092?topic=ceilometer'))

        with mock.patch.object(publisher, '_send') as fake_send:
            fake_send.side_effect = mock.Mock()
            publisher.publish_samples(mock.MagicMock(), self.test_data)
            self.assertEqual(1, len(fake_send.mock_calls))
            self.assertEqual(0, len(publisher.local_queue))

    @mock.patch.object(KafkaBrokerPublisher, '_get_client')
    def test_publish_without_options(self, mock_method):
        publisher = KafkaBrokerPublisher(
            netutils.urlsplit('kafka://127.0.0.1:9092'))

        with mock.patch.object(publisher, '_send') as fake_send:
            fake_send.side_effect = mock.Mock()
            publisher.publish_samples(mock.MagicMock(), self.test_data)
            self.assertEqual(1, len(fake_send.mock_calls))
            self.assertEqual(0, len(publisher.local_queue))

    @mock.patch.object(KafkaBrokerPublisher, '_get_client')
    def test_publish_to_host_without_policy(self, mock_method):
        publisher = KafkaBrokerPublisher(
            netutils.urlsplit('kafka://127.0.0.1:9092?topic=ceilometer'))
        self.assertEqual('default', publisher.policy)

        publisher = KafkaBrokerPublisher(
            netutils.urlsplit(
                'kafka://127.0.0.1:9092?topic=ceilometer&policy=test'))
        self.assertEqual('default', publisher.policy)

    @mock.patch.object(KafkaBrokerPublisher, '_get_client')
    def test_publish_to_host_with_default_policy(self, mock_method):
        publisher = KafkaBrokerPublisher(
            netutils.urlsplit(
                'kafka://127.0.0.1:9092?topic=ceilometer&policy=default'))

        with mock.patch.object(publisher, '_send') as fake_send:
            fake_send.side_effect = TypeError
            self.assertRaises(TypeError, publisher.publish_samples,
                              mock.MagicMock(), self.test_data)
            self.assertEqual(100, len(fake_send.mock_calls))
            self.assertEqual(0, len(publisher.local_queue))

    @mock.patch.object(KafkaBrokerPublisher, '_get_client')
    def test_publish_to_host_with_drop_policy(self, mock_method):
        publisher = KafkaBrokerPublisher(
            netutils.urlsplit(
                'kafka://127.0.0.1:9092?topic=ceilometer&policy=drop'))

        with mock.patch.object(publisher, '_send') as fake_send:
            fake_send.side_effect = Exception("test")
            publisher.publish_samples(mock.MagicMock(), self.test_data)
            self.assertEqual(1, len(fake_send.mock_calls))
            self.assertEqual(0, len(publisher.local_queue))

    @mock.patch.object(KafkaBrokerPublisher, '_get_client')
    def test_publish_to_host_with_queue_policy(self, mock_method):
        publisher = KafkaBrokerPublisher(
            netutils.urlsplit(
                'kafka://127.0.0.1:9092?topic=ceilometer&policy=queue'))

        with mock.patch.object(publisher, '_send') as fake_send:
            fake_send.side_effect = Exception("test")
            publisher.publish_samples(mock.MagicMock(), self.test_data)
            self.assertEqual(1, len(fake_send.mock_calls))
            self.assertEqual(1, len(publisher.local_queue))

    @mock.patch.object(KafkaBrokerPublisher, '_get_client')
    def test_publish_to_down_host_with_default_queue_size(self, mock_method):
        publisher = KafkaBrokerPublisher(
            netutils.urlsplit(
                'kafka://127.0.0.1:9092?topic=ceilometer&policy=queue'))

        for i in range(0, 2000):
            for s in self.test_data:
                s.name = 'test-%d' % i
            publisher.publish_samples(mock.MagicMock(), self.test_data)

        self.assertEqual(1024, len(publisher.local_queue))
        self.assertEqual('test-976',
                         publisher.local_queue[0][0]['counter_name'])
        self.assertEqual('test-1999',
                         publisher.local_queue[1023][0]['counter_name'])

    @mock.patch.object(KafkaBrokerPublisher, '_get_client')
    def test_publish_to_host_from_down_to_up_with_queue(self, mock_method):
        publisher = KafkaBrokerPublisher(
            netutils.urlsplit(
                'kafka://127.0.0.1:9092?topic=ceilometer&policy=queue'))

        for i in range(0, 16):
            for s in self.test_data:
                s.name = 'test-%d' % i
            publisher.publish_samples(mock.MagicMock(), self.test_data)

        self.assertEqual(16, len(publisher.local_queue))

        with mock.patch.object(publisher, '_send') as fake_send:
            fake_send.return_value = mock.Mock()
            for s in self.test_data:
                s.name = 'test-%d' % 16
            publisher.publish_samples(mock.MagicMock(), self.test_data)
            self.assertEqual(0, len(publisher.local_queue))

    @mock.patch.object(KafkaBrokerPublisher, '_get_client')
    def test_publish_event_with_default_policy(self, mock_method):
        publisher = KafkaBrokerPublisher(
            netutils.urlsplit('kafka://127.0.0.1:9092?topic=ceilometer'))

        with mock.patch.object(KafkaBrokerPublisher, '_send') as fake_send:
            publisher.publish_events(mock.MagicMock(), self.test_event_data)
            self.assertEqual(1, len(fake_send.mock_calls))

        with mock.patch.object(KafkaBrokerPublisher, '_send') as fake_send:
            fake_send.side_effect = TypeError
            self.assertRaises(TypeError, publisher.publish_events,
                              mock.MagicMock(), self.test_event_data)
            self.assertEqual(100, len(fake_send.mock_calls))
            self.assertEqual(0, len(publisher.local_queue))
Exemplo n.º 17
0
class TestPublish(tests_base.BaseTestCase):
    test_data = [
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test3',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
    ]

    def setUp(self):
        super(TestPublish, self).setUp()
        self.CONF = self.useFixture(config.Config()).conf
        self.setup_messaging(self.CONF)
        self.published = []

    def test_published_no_mock(self):
        publisher = rpc.RPCPublisher(network_utils.urlsplit('rpc://'))

        endpoint = mock.MagicMock(['record_metering_data'])
        collector = messaging.get_rpc_server(
            self.transport, self.CONF.publisher_rpc.metering_topic, endpoint)
        endpoint.record_metering_data.side_effect = (
            lambda *args, **kwds: collector.stop())

        collector.start()
        eventlet.sleep()
        publisher.publish_samples(context.RequestContext(), self.test_data)
        collector.wait()

        class Matcher(object):
            @staticmethod
            def __eq__(data):
                for i, sample_item in enumerate(data):
                    if sample_item['counter_name'] != self.test_data[i].name:
                        return False
                return True

        endpoint.record_metering_data.assert_called_once_with(mock.ANY,
                                                              data=Matcher())

    def test_publish_target(self):
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?target=custom_procedure_call'))
        cast_context = mock.MagicMock()
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            prepare.return_value = cast_context
            publisher.publish_samples(mock.MagicMock(), self.test_data)

        prepare.assert_called_once_with(
            topic=self.CONF.publisher_rpc.metering_topic)
        cast_context.cast.assert_called_once_with(mock.ANY,
                                                  'custom_procedure_call',
                                                  data=mock.ANY)

    def test_published_with_per_meter_topic(self):
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?per_meter_topic=1'))
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            publisher.publish_samples(mock.MagicMock(), self.test_data)

            class MeterGroupMatcher(object):
                def __eq__(self, meters):
                    return len(set(meter['counter_name']
                                   for meter in meters)) == 1

            topic = self.CONF.publisher_rpc.metering_topic
            expected = [
                mock.call(topic=topic),
                mock.call().cast(mock.ANY,
                                 'record_metering_data',
                                 data=mock.ANY),
                mock.call(topic=topic + '.test'),
                mock.call().cast(mock.ANY,
                                 'record_metering_data',
                                 data=MeterGroupMatcher()),
                mock.call(topic=topic + '.test2'),
                mock.call().cast(mock.ANY,
                                 'record_metering_data',
                                 data=MeterGroupMatcher()),
                mock.call(topic=topic + '.test3'),
                mock.call().cast(mock.ANY,
                                 'record_metering_data',
                                 data=MeterGroupMatcher())
            ]
            self.assertEqual(expected, prepare.mock_calls)

    def test_published_concurrency(self):
        """Test concurrent access to the local queue of the rpc publisher."""

        publisher = rpc.RPCPublisher(network_utils.urlsplit('rpc://'))
        cast_context = mock.MagicMock()

        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:

            def fake_prepare_go(topic):
                return cast_context

            def fake_prepare_wait(topic):
                prepare.side_effect = fake_prepare_go
                # Sleep to simulate concurrency and allow other threads to work
                eventlet.sleep(0)
                return cast_context

            prepare.side_effect = fake_prepare_wait

            job1 = eventlet.spawn(publisher.publish_samples, mock.MagicMock(),
                                  self.test_data)
            job2 = eventlet.spawn(publisher.publish_samples, mock.MagicMock(),
                                  self.test_data)

            job1.wait()
            job2.wait()

        self.assertEqual('default', publisher.policy)
        self.assertEqual(2, len(cast_context.cast.mock_calls))
        self.assertEqual(0, len(publisher.local_queue))

    @mock.patch('ceilometer.publisher.rpc.LOG')
    def test_published_with_no_policy(self, mylog):
        publisher = rpc.RPCPublisher(network_utils.urlsplit('rpc://'))
        side_effect = oslo.messaging._drivers.common.RPCException()
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            prepare.side_effect = side_effect

            self.assertRaises(oslo.messaging._drivers.common.RPCException,
                              publisher.publish_samples, mock.MagicMock(),
                              self.test_data)
            self.assertTrue(mylog.info.called)
            self.assertEqual('default', publisher.policy)
            self.assertEqual(0, len(publisher.local_queue))
            prepare.assert_called_once_with(
                topic=self.CONF.publisher_rpc.metering_topic)

    @mock.patch('ceilometer.publisher.rpc.LOG')
    def test_published_with_policy_block(self, mylog):
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=default'))
        side_effect = oslo.messaging._drivers.common.RPCException()
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            prepare.side_effect = side_effect
            self.assertRaises(oslo.messaging._drivers.common.RPCException,
                              publisher.publish_samples, mock.MagicMock(),
                              self.test_data)
            self.assertTrue(mylog.info.called)
            self.assertEqual(0, len(publisher.local_queue))
            prepare.assert_called_once_with(
                topic=self.CONF.publisher_rpc.metering_topic)

    @mock.patch('ceilometer.publisher.rpc.LOG')
    def test_published_with_policy_incorrect(self, mylog):
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=notexist'))
        side_effect = oslo.messaging._drivers.common.RPCException()
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            prepare.side_effect = side_effect
            self.assertRaises(oslo.messaging._drivers.common.RPCException,
                              publisher.publish_samples, mock.MagicMock(),
                              self.test_data)
            self.assertTrue(mylog.warn.called)
            self.assertEqual('default', publisher.policy)
            self.assertEqual(0, len(publisher.local_queue))
            prepare.assert_called_once_with(
                topic=self.CONF.publisher_rpc.metering_topic)

    def test_published_with_policy_drop_and_rpc_down(self):
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=drop'))
        side_effect = oslo.messaging._drivers.common.RPCException()
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            prepare.side_effect = side_effect
            publisher.publish_samples(mock.MagicMock(), self.test_data)
            self.assertEqual(0, len(publisher.local_queue))
            prepare.assert_called_once_with(
                topic=self.CONF.publisher_rpc.metering_topic)

    def test_published_with_policy_queue_and_rpc_down(self):
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=queue'))
        side_effect = oslo.messaging._drivers.common.RPCException()
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            prepare.side_effect = side_effect

            publisher.publish_samples(mock.MagicMock(), self.test_data)
            self.assertEqual(1, len(publisher.local_queue))
            prepare.assert_called_once_with(
                topic=self.CONF.publisher_rpc.metering_topic)

    def test_published_with_policy_queue_and_rpc_down_up(self):
        self.rpc_unreachable = True
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=queue'))

        side_effect = oslo.messaging._drivers.common.RPCException()
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            prepare.side_effect = side_effect
            publisher.publish_samples(mock.MagicMock(), self.test_data)

            self.assertEqual(1, len(publisher.local_queue))

            prepare.side_effect = mock.MagicMock()
            publisher.publish_samples(mock.MagicMock(), self.test_data)

            self.assertEqual(0, len(publisher.local_queue))

            topic = self.CONF.publisher_rpc.metering_topic
            expected = [
                mock.call(topic=topic),
                mock.call(topic=topic),
                mock.call(topic=topic)
            ]
            self.assertEqual(expected, prepare.mock_calls)

    def test_published_with_policy_sized_queue_and_rpc_down(self):
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=queue&max_queue_length=3'))

        side_effect = oslo.messaging._drivers.common.RPCException()
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            prepare.side_effect = side_effect
            for i in range(0, 5):
                for s in self.test_data:
                    s.source = 'test-%d' % i
                publisher.publish_samples(mock.MagicMock(), self.test_data)

        self.assertEqual(3, len(publisher.local_queue))
        self.assertEqual('test-2', publisher.local_queue[0][2][0]['source'])
        self.assertEqual('test-3', publisher.local_queue[1][2][0]['source'])
        self.assertEqual('test-4', publisher.local_queue[2][2][0]['source'])

    def test_published_with_policy_default_sized_queue_and_rpc_down(self):
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=queue'))

        side_effect = oslo.messaging._drivers.common.RPCException()
        with mock.patch.object(publisher.rpc_client, 'prepare') as prepare:
            prepare.side_effect = side_effect
            for i in range(0, 2000):
                for s in self.test_data:
                    s.source = 'test-%d' % i
                publisher.publish_samples(mock.MagicMock(), self.test_data)

        self.assertEqual(1024, len(publisher.local_queue))
        self.assertEqual('test-976', publisher.local_queue[0][2][0]['source'])
        self.assertEqual('test-1999',
                         publisher.local_queue[1023][2][0]['source'])
Exemplo n.º 18
0
class TestPublish(base.TestCase):

    test_data = [
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test3',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
    ]

    def faux_cast(self, context, topic, msg):
        if self.rpc_unreachable:
            #note(sileht): Ugly, but when rabbitmq is unreachable
            # and rabbitmq_max_retries is not 0
            # oslo.rpc do a sys.exit(1), so we do the same
            # things here until this is fixed in oslo
            raise SystemExit(1)
        else:
            self.published.append((topic, msg))

    def setUp(self):
        super(TestPublish, self).setUp()
        self.published = []
        self.rpc_unreachable = False
        self.stubs.Set(oslo_rpc, 'cast', self.faux_cast)

    def test_published(self):
        publisher = rpc.RPCPublisher(network_utils.urlsplit('rpc://'))
        publisher.publish_samples(None, self.test_data)
        self.assertEqual(len(self.published), 1)
        self.assertEqual(self.published[0][0],
                         cfg.CONF.publisher_rpc.metering_topic)
        self.assertIsInstance(self.published[0][1]['args']['data'], list)
        self.assertEqual(self.published[0][1]['method'],
                         'record_metering_data')

    def test_publish_target(self):
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?target=custom_procedure_call'))
        publisher.publish_samples(None, self.test_data)
        self.assertEqual(len(self.published), 1)
        self.assertEqual(self.published[0][0],
                         cfg.CONF.publisher_rpc.metering_topic)
        self.assertIsInstance(self.published[0][1]['args']['data'], list)
        self.assertEqual(self.published[0][1]['method'],
                         'custom_procedure_call')

    def test_published_with_per_meter_topic(self):
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?per_meter_topic=1'))
        publisher.publish_samples(None, self.test_data)
        self.assertEqual(len(self.published), 4)
        for topic, rpc_call in self.published:
            meters = rpc_call['args']['data']
            self.assertIsInstance(meters, list)
            if topic != cfg.CONF.publisher_rpc.metering_topic:
                self.assertEqual(
                    len(set(meter['counter_name'] for meter in meters)), 1,
                    "Meter are published grouped by name")

        topics = [topic for topic, meter in self.published]
        self.assertIn(cfg.CONF.publisher_rpc.metering_topic, topics)
        self.assertIn(cfg.CONF.publisher_rpc.metering_topic + '.' + 'test',
                      topics)
        self.assertIn(cfg.CONF.publisher_rpc.metering_topic + '.' + 'test2',
                      topics)
        self.assertIn(cfg.CONF.publisher_rpc.metering_topic + '.' + 'test3',
                      topics)

    def test_published_concurrency(self):
        """This test the concurrent access to the local queue
        of the rpc publisher
        """
        def faux_cast_go(context, topic, msg):
            self.published.append((topic, msg))

        def faux_cast_wait(context, topic, msg):
            self.stubs.Set(oslo_rpc, 'cast', faux_cast_go)
            # Sleep to simulate concurrency and allow other threads to work
            eventlet.sleep(0)
            self.published.append((topic, msg))

        self.stubs.Set(oslo_rpc, 'cast', faux_cast_wait)

        publisher = rpc.RPCPublisher(network_utils.urlsplit('rpc://'))
        job1 = eventlet.spawn(publisher.publish_samples, None, self.test_data)
        job2 = eventlet.spawn(publisher.publish_samples, None, self.test_data)

        job1.wait()
        job2.wait()

        self.assertEqual(publisher.policy, 'default')
        self.assertEqual(len(self.published), 2)
        self.assertEqual(len(publisher.local_queue), 0)

    def test_published_with_no_policy(self):
        self.rpc_unreachable = True
        publisher = rpc.RPCPublisher(network_utils.urlsplit('rpc://'))
        self.assertRaises(SystemExit, publisher.publish_samples, None,
                          self.test_data)
        self.assertEqual(publisher.policy, 'default')
        self.assertEqual(len(self.published), 0)
        self.assertEqual(len(publisher.local_queue), 0)

    def test_published_with_policy_block(self):
        self.rpc_unreachable = True
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=default'))
        self.assertRaises(SystemExit, publisher.publish_samples, None,
                          self.test_data)
        self.assertEqual(len(self.published), 0)
        self.assertEqual(len(publisher.local_queue), 0)

    def test_published_with_policy_incorrect(self):
        self.rpc_unreachable = True
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=notexist'))
        self.assertRaises(SystemExit, publisher.publish_samples, None,
                          self.test_data)
        self.assertEqual(publisher.policy, 'default')
        self.assertEqual(len(self.published), 0)
        self.assertEqual(len(publisher.local_queue), 0)

    def test_published_with_policy_drop_and_rpc_down(self):
        self.rpc_unreachable = True
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=drop'))
        publisher.publish_samples(None, self.test_data)
        self.assertEqual(len(self.published), 0)
        self.assertEqual(len(publisher.local_queue), 0)

    def test_published_with_policy_queue_and_rpc_down(self):
        self.rpc_unreachable = True
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=queue'))
        publisher.publish_samples(None, self.test_data)
        self.assertEqual(len(self.published), 0)
        self.assertEqual(len(publisher.local_queue), 1)

    def test_published_with_policy_queue_and_rpc_down_up(self):
        self.rpc_unreachable = True
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=queue'))
        publisher.publish_samples(None, self.test_data)
        self.assertEqual(len(self.published), 0)
        self.assertEqual(len(publisher.local_queue), 1)

        self.rpc_unreachable = False
        publisher.publish_samples(None, self.test_data)

        self.assertEqual(len(self.published), 2)
        self.assertEqual(len(publisher.local_queue), 0)

    def test_published_with_policy_sized_queue_and_rpc_down(self):
        self.rpc_unreachable = True
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=queue&max_queue_length=3'))
        for i in range(0, 5):
            for s in self.test_data:
                s.source = 'test-%d' % i
            publisher.publish_samples(None, self.test_data)
        self.assertEqual(len(self.published), 0)
        self.assertEqual(len(publisher.local_queue), 3)
        self.assertEqual(
            publisher.local_queue[0][2]['args']['data'][0]['source'], 'test-2')
        self.assertEqual(
            publisher.local_queue[1][2]['args']['data'][0]['source'], 'test-3')
        self.assertEqual(
            publisher.local_queue[2][2]['args']['data'][0]['source'], 'test-4')

    def test_published_with_policy_default_sized_queue_and_rpc_down(self):
        self.rpc_unreachable = True
        publisher = rpc.RPCPublisher(
            network_utils.urlsplit('rpc://?policy=queue'))
        for i in range(0, 2000):
            for s in self.test_data:
                s.source = 'test-%d' % i
            publisher.publish_samples(None, self.test_data)
        self.assertEqual(len(self.published), 0)
        self.assertEqual(len(publisher.local_queue), 1024)
        self.assertEqual(
            publisher.local_queue[0][2]['args']['data'][0]['source'],
            'test-976')
        self.assertEqual(
            publisher.local_queue[1023][2]['args']['data'][0]['source'],
            'test-1999')
Exemplo n.º 19
0
class BasePublisherTestCase(tests_base.BaseTestCase):
    test_event_data = [
        event.Event(message_id=uuid.uuid4(),
                    event_type='event_%d' % i,
                    generated=datetime.datetime.utcnow(),
                    traits=[], raw={})
        for i in range(0, 5)
    ]

    test_sample_data = [
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test3',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
    ]

    def setUp(self):
        super(BasePublisherTestCase, self).setUp()
        self.CONF = service.prepare_service([], [])
        self.setup_messaging(self.CONF)
Exemplo n.º 20
0
class TestFilePublisher(base.TestCase):

    test_data = [
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
    ]

    def test_file_publisher(self):
        # Test valid configurations
        parsed_url = urlsplit(
            'file:///tmp/log_file?max_bytes=50&backup_count=3')
        publisher = file.FilePublisher(parsed_url)
        publisher.publish_counters(None, self.test_data)

        handler = publisher.publisher_logger.handlers[0]
        self.assertTrue(
            isinstance(handler, logging.handlers.RotatingFileHandler))
        self.assertEqual(
            [handler.maxBytes, handler.baseFilename, handler.backupCount],
            [50, '/tmp/log_file', 3])
        # The rotating file gets created since only allow 50 bytes.
        self.assertTrue(os.path.exists('/tmp/log_file.1'))

        # Test missing max bytes, backup count configurations
        parsed_url = urlsplit('file:///tmp/log_file_plain')
        publisher = file.FilePublisher(parsed_url)
        publisher.publish_counters(None, self.test_data)

        handler = publisher.publisher_logger.handlers[0]
        self.assertTrue(
            isinstance(handler, logging.handlers.RotatingFileHandler))
        self.assertEqual(
            [handler.maxBytes, handler.baseFilename, handler.backupCount],
            [0, '/tmp/log_file_plain', 0])

        # The rotating file gets created since only allow 50 bytes.
        self.assertTrue(os.path.exists('/tmp/log_file_plain'))

        # Test invalid max bytes, backup count configurations
        parsed_url = urlsplit(
            'file:///tmp/log_file_bad?max_bytes=yus&backup_count=5y')
        publisher = file.FilePublisher(parsed_url)
        publisher.publish_counters(None, self.test_data)

        self.assertIsNone(publisher.publisher_logger)
Exemplo n.º 21
0
    def get_samples(manager, cache, resources):
        nt = client.Client(version='2', session=sess)

        for host in resources:
            LOG.debug(_('checking host %s'), host)
            try:
                info = nt.hosts.get(host)
                values = []
                if len(info) >= 3:
                    # total
                    values.append({
                        'name':
                        'ram.tot',
                        'unit':
                        'MB',
                        'value':
                        (info[0].memory_mb if info[0].memory_mb else 0)
                    })
                    values.append({
                        'name':
                        'disk.tot',
                        'unit':
                        'GB',
                        'value': (info[0].disk_gb if info[0].disk_gb else 0)
                    })
                    values.append({
                        'name': 'cpu.tot',
                        'unit': 'cpu',
                        'value': (info[0].cpu if info[0].cpu else 0)
                    })
                    # now
                    values.append({
                        'name':
                        'ram.now',
                        'unit':
                        'MB',
                        'value':
                        (info[1].memory_mb if info[1].memory_mb else 0)
                    })
                    values.append({
                        'name':
                        'disk.now',
                        'unit':
                        'GB',
                        'value': (info[1].disk_gb if info[1].disk_gb else 0)
                    })
                    values.append({
                        'name': 'cpu.now',
                        'unit': 'cpu',
                        'value': (info[1].cpu if info[1].cpu else 0)
                    })
                    # max
                    values.append({
                        'name':
                        'ram.max',
                        'unit':
                        'MB',
                        'value':
                        (info[2].memory_mb if info[2].memory_mb else 0)
                    })
                    values.append({
                        'name':
                        'disk.max',
                        'unit':
                        'GB',
                        'value': (info[2].disk_gb if info[2].disk_gb else 0)
                    })
                    values.append({
                        'name': 'cpu.max',
                        'unit': 'cpu',
                        'value': (info[2].cpu if info[2].cpu else 0)
                    })

                for item in values:
                    my_sample = sample.Sample(
                        name="compute.node.%s" % item['name'],
                        type=sample.TYPE_GAUGE,
                        unit=item['unit'],
                        volume=item['value'],
                        user_id=None,
                        project_id=None,
                        resource_id="%s_%s" % (host, host),
                        timestamp=timeutils.isotime(),
                        resource_metadata={})
                    LOG.debug("Publish sample: %s" % (my_sample))
                    yield my_sample

            except Exception as err:
                LOG.exception(_('could not get info for host %(host)s: %(e)s'),
                              {
                                  'host': host,
                                  'e': err
                              })
Exemplo n.º 22
0
class TestUDPPublisher(base.BaseTestCase):
    test_data = [
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test2',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
        sample.Sample(
            name='test3',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id='test_run_tasks',
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
            source=COUNTER_SOURCE,
        ),
    ]

    def _make_fake_socket(self, published):
        def _fake_socket_socket(family, type):
            def record_data(msg, dest):
                published.append((msg, dest))

            udp_socket = mock.Mock()
            udp_socket.sendto = record_data
            return udp_socket

        return _fake_socket_socket

    def setUp(self):
        super(TestUDPPublisher, self).setUp()
        self.CONF = self.useFixture(fixture_config.Config()).conf
        self.CONF.publisher.metering_secret = 'not-so-secret'

    def test_published(self):
        self.data_sent = []
        with mock.patch('socket.socket',
                        self._make_fake_socket(self.data_sent)):
            publisher = udp.UDPPublisher(netutils.urlsplit('udp://somehost'))
        publisher.publish_samples(None, self.test_data)

        self.assertEqual(5, len(self.data_sent))

        sent_counters = []

        for data, dest in self.data_sent:
            counter = msgpack.loads(data)
            sent_counters.append(counter)

            # Check destination
            self.assertEqual(('somehost', self.CONF.collector.udp_port), dest)

        # Check that counters are equal
        self.assertEqual(
            sorted([
                utils.meter_message_from_counter(d, "not-so-secret")
                for d in self.test_data
            ]), sorted(sent_counters))

    @staticmethod
    def _raise_ioerror(*args):
        raise IOError

    def _make_broken_socket(self, family, type):
        udp_socket = mock.Mock()
        udp_socket.sendto = self._raise_ioerror
        return udp_socket

    def test_publish_error(self):
        with mock.patch('socket.socket', self._make_broken_socket):
            publisher = udp.UDPPublisher(netutils.urlsplit('udp://localhost'))
        publisher.publish_samples(None, self.test_data)
Exemplo n.º 23
0
 def setUp(self):
     super(TestListMeters, self).setUp()
     self.messages = []
     for cnt in [
             sample.Sample('meter.test',
                           'cumulative',
                           '',
                           1,
                           'user-id',
                           'project-id',
                           'resource-id',
                           timestamp=datetime.datetime(2012, 7, 2, 10, 40),
                           resource_metadata={
                               'display_name': 'test-server',
                               'tag': 'self.sample',
                               'size': 123,
                               'util': 0.75,
                               'is_public': True
                           },
                           source='test_source'),
             sample.Sample('meter.test',
                           'cumulative',
                           '',
                           3,
                           'user-id',
                           'project-id',
                           'resource-id',
                           timestamp=datetime.datetime(2012, 7, 2, 11, 40),
                           resource_metadata={
                               'display_name': 'test-server',
                               'tag': 'self.sample1',
                               'size': 0,
                               'util': 0.47,
                               'is_public': False
                           },
                           source='test_source'),
             sample.Sample('meter.mine',
                           'gauge',
                           '',
                           1,
                           'user-id',
                           'project-id',
                           'resource-id2',
                           timestamp=datetime.datetime(2012, 7, 2, 10, 41),
                           resource_metadata={
                               'display_name': 'test-server',
                               'tag': 'self.sample2',
                               'size': 456,
                               'util': 0.64,
                               'is_public': False
                           },
                           source='test_source'),
             sample.Sample('meter.test',
                           'cumulative',
                           '',
                           1,
                           'user-id2',
                           'project-id2',
                           'resource-id3',
                           timestamp=datetime.datetime(2012, 7, 2, 10, 42),
                           resource_metadata={
                               'display_name': 'test-server',
                               'tag': 'self.sample3',
                               'size': 0,
                               'util': 0.75,
                               'is_public': False
                           },
                           source='test_source'),
             sample.Sample('meter.mine',
                           'gauge',
                           '',
                           1,
                           'user-id4',
                           'project-id2',
                           'resource-id4',
                           timestamp=datetime.datetime(2012, 7, 2, 10, 43),
                           resource_metadata={
                               'display_name': 'test-server',
                               'tag': 'self.sample4',
                               'properties': {
                                   'prop_1': 'prop_value',
                                   'prop_2': {
                                       'sub_prop_1': 'sub_prop_value'
                                   }
                               },
                               'size': 0,
                               'util': 0.58,
                               'is_public': True
                           },
                           source='test_source1')
     ]:
         msg = utils.meter_message_from_counter(
             cnt, self.CONF.publisher.metering_secret)
         self.messages.append(msg)
         self.conn.record_metering_data(msg)
Exemplo n.º 24
0
    def setUp(self):
        super(TestListMeters, self).setUp()

        for cnt in [
                sample.Sample(
                    'meter.test',
                    'cumulative',
                    '',
                    1,
                    'user-id',
                    'project-id',
                    'resource-id',
                    timestamp=datetime.datetime(2012, 7, 2, 10, 40),
                    resource_metadata={'display_name': 'test-server',
                                       'tag': 'self.counter'},
                    source='test_source'),
                sample.Sample(
                    'meter.test',
                    'cumulative',
                    '',
                    3,
                    'user-id',
                    'project-id',
                    'resource-id',
                    timestamp=datetime.datetime(2012, 7, 2, 11, 40),
                    resource_metadata={'display_name': 'test-server',
                                       'tag': 'self.counter1'},
                    source='test_source'),
                sample.Sample(
                    'meter.mine',
                    'gauge',
                    '',
                    1,
                    'user-id',
                    'project-id',
                    'resource-id2',
                    timestamp=datetime.datetime(2012, 7, 2, 10, 41),
                    resource_metadata={'display_name': 'test-server',
                                       'tag': 'self.counter2'},
                    source='test_source'),
                sample.Sample(
                    'meter.test',
                    'cumulative',
                    '',
                    1,
                    'user-id2',
                    'project-id2',
                    'resource-id3',
                    timestamp=datetime.datetime(2012, 7, 2, 10, 42),
                    resource_metadata={'display_name': 'test-server',
                                       'tag': 'self.counter3'},
                    source='test_source'),
                sample.Sample(
                    'meter.mine',
                    'gauge',
                    '',
                    1,
                    'user-id4',
                    'project-id2',
                    'resource-id4',
                    timestamp=datetime.datetime(2012, 7, 2, 10, 43),
                    resource_metadata={'display_name': 'test-server',
                                       'tag': 'self.counter4'},
                    source='test_source')]:
            msg = rpc.meter_message_from_counter(
                cnt,
                cfg.CONF.publisher_rpc.metering_secret)
            self.conn.record_metering_data(msg)
Exemplo n.º 25
0
class TestHttpPublisher(base.BaseTestCase):

    resource_id = str(uuid.uuid4())

    sample_data = [
        sample.Sample(
            name='alpha',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id=resource_id,
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='beta',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id=resource_id,
            timestamp=datetime.datetime.utcnow().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
        sample.Sample(
            name='gamma',
            type=sample.TYPE_CUMULATIVE,
            unit='',
            volume=1,
            user_id='test',
            project_id='test',
            resource_id=resource_id,
            timestamp=datetime.datetime.now().isoformat(),
            resource_metadata={'name': 'TestPublish'},
        ),
    ]

    event_data = [
        event.Event(message_id=str(uuid.uuid4()),
                    event_type='event_%d' % i,
                    generated=datetime.datetime.utcnow().isoformat(),
                    traits=[],
                    raw={'payload': {
                        'some': 'aa'
                    }}) for i in range(3)
    ]

    def setUp(self):
        super(TestHttpPublisher, self).setUp()
        self.CONF = self.useFixture(fixture_config.Config()).conf

    def test_http_publisher_config(self):
        """Test publisher config parameters."""
        # invalid hostname, the given url, results in an empty hostname
        parsed_url = urlparse.urlparse('http:/aaa.bb/path')
        self.assertRaises(ValueError, http.HttpPublisher, self.CONF,
                          parsed_url)

        # invalid port
        parsed_url = urlparse.urlparse('http://*****:*****@mock.patch('ceilometer.publisher.http.LOG')
    def test_http_post_samples(self, thelog):
        """Test publisher post."""
        parsed_url = urlparse.urlparse('http://*****:*****@mock.patch('ceilometer.publisher.http.LOG')
    def test_http_post_events(self, thelog):
        """Test publisher post."""
        parsed_url = urlparse.urlparse('http://*****:*****@mock.patch('ceilometer.publisher.http.LOG')
    def test_http_post_empty_data(self, thelog):
        parsed_url = urlparse.urlparse('http://localhost:90/path1')
        publisher = http.HttpPublisher(self.CONF, parsed_url)

        res = requests.Response()
        res.status_code = 200
        with mock.patch.object(requests.Session, 'post',
                               return_value=res) as m_req:
            publisher.publish_events([])

        self.assertEqual(0, m_req.call_count)
        self.assertTrue(thelog.debug.called)

    def _post_batch_control_test(self, method, data, batch):
        parsed_url = urlparse.urlparse('http://localhost:90/path1?'
                                       'batch=%s' % batch)
        publisher = http.HttpPublisher(self.CONF, parsed_url)

        with mock.patch.object(requests.Session, 'post') as post:
            getattr(publisher, method)(data)
            self.assertEqual(1 if batch else 3, post.call_count)

    def test_post_batch_sample(self):
        self._post_batch_control_test('publish_samples', self.sample_data, 1)

    def test_post_no_batch_sample(self):
        self._post_batch_control_test('publish_samples', self.sample_data, 0)

    def test_post_batch_event(self):
        self._post_batch_control_test('publish_events', self.event_data, 1)

    def test_post_no_batch_event(self):
        self._post_batch_control_test('publish_events', self.event_data, 0)

    def test_post_verify_ssl_default(self):
        parsed_url = urlparse.urlparse('http://localhost:90/path1')
        publisher = http.HttpPublisher(self.CONF, parsed_url)

        with mock.patch.object(requests.Session, 'post') as post:
            publisher.publish_samples(self.sample_data)
            self.assertTrue(post.call_args[1]['verify'])

    def test_post_verify_ssl_True(self):
        parsed_url = urlparse.urlparse('http://localhost:90/path1?'
                                       'verify_ssl=True')
        publisher = http.HttpPublisher(self.CONF, parsed_url)

        with mock.patch.object(requests.Session, 'post') as post:
            publisher.publish_samples(self.sample_data)
            self.assertTrue(post.call_args[1]['verify'])

    def test_post_verify_ssl_False(self):
        parsed_url = urlparse.urlparse('http://localhost:90/path1?'
                                       'verify_ssl=False')
        publisher = http.HttpPublisher(self.CONF, parsed_url)

        with mock.patch.object(requests.Session, 'post') as post:
            publisher.publish_samples(self.sample_data)
            self.assertFalse(post.call_args[1]['verify'])

    def test_post_verify_ssl_path(self):
        parsed_url = urlparse.urlparse('http://localhost:90/path1?'
                                       'verify_ssl=/path/to/cert.crt')
        publisher = http.HttpPublisher(self.CONF, parsed_url)

        with mock.patch.object(requests.Session, 'post') as post:
            publisher.publish_samples(self.sample_data)
            self.assertEqual('/path/to/cert.crt', post.call_args[1]['verify'])

    def test_post_raw_only(self):
        parsed_url = urlparse.urlparse('http://localhost:90/path1?raw_only=1')
        publisher = http.HttpPublisher(self.CONF, parsed_url)

        with mock.patch.object(requests.Session, 'post') as post:
            publisher.publish_events(self.event_data)
            self.assertEqual(
                '[{"some": "aa"}, {"some": "aa"}, {"some": "aa"}]',
                post.call_args[1]['data'])
Exemplo n.º 26
0
    def post(self, samples):
        """Post a list of new Samples to Telemetry.

        :param samples: a list of samples within the request body.
        """

        rbac.enforce('create_samples', pecan.request)

        now = timeutils.utcnow()
        auth_project = rbac.get_limited_to_project(pecan.request.headers)
        def_source = pecan.request.cfg.sample_source
        def_project_id = pecan.request.headers.get('X-Project-Id')
        def_user_id = pecan.request.headers.get('X-User-Id')

        published_samples = []
        for s in samples:
            for p in pecan.request.pipeline_manager.pipelines:
                if p.support_meter(s.counter_name):
                    break
            else:
                message = _("The metric %s is not supported by metering "
                            "pipeline configuration.") % s.counter_name
                raise base.ClientSideError(message, status_code=409)

            if self.meter_name != s.counter_name:
                raise wsme.exc.InvalidInput('counter_name', s.counter_name,
                                            'should be %s' % self.meter_name)

            if s.message_id:
                raise wsme.exc.InvalidInput('message_id', s.message_id,
                                            'The message_id must not be set')

            if s.counter_type not in sample.TYPES:
                raise wsme.exc.InvalidInput(
                    'counter_type', s.counter_type,
                    'The counter type must be: ' + ', '.join(sample.TYPES))

            s.user_id = (s.user_id or def_user_id)
            s.project_id = (s.project_id or def_project_id)
            s.source = '%s:%s' % (s.project_id, (s.source or def_source))
            s.timestamp = (s.timestamp or now)

            if auth_project and auth_project != s.project_id:
                # non admin user trying to cross post to another project_id
                auth_msg = 'can not post samples to other projects'
                raise wsme.exc.InvalidInput('project_id', s.project_id,
                                            auth_msg)

            published_sample = sample.Sample(
                name=s.counter_name,
                type=s.counter_type,
                unit=s.counter_unit,
                volume=s.counter_volume,
                user_id=s.user_id,
                project_id=s.project_id,
                resource_id=s.resource_id,
                timestamp=s.timestamp.isoformat(),
                resource_metadata=utils.restore_nesting(s.resource_metadata,
                                                        separator='.'),
                source=s.source)
            published_samples.append(published_sample)

            s.message_id = published_sample.id

        with pecan.request.pipeline_manager.publisher(
                context.get_admin_context()) as publisher:
            publisher(published_samples)

        return samples
Exemplo n.º 27
0
    def publish_sample(self, env, bytes_received, bytes_sent):
        path = urlparse.quote(env['PATH_INFO'])
        method = env['REQUEST_METHOD']
        headers = {}
        for header in env:
            if header.startswith('HTTP_') and env[header]:
                key = header[5:]
                if isinstance(env[header], six.text_type):
                    headers[key] = env[header].encode('utf-8')
                else:
                    headers[key] = str(env[header])

        try:
            container = obj = None
            version, account, remainder = path.replace('/', '',
                                                       1).split('/', 2)
            if not version or not account:
                raise ValueError('Invalid path: %s' % path)
            if remainder:
                if '/' in remainder:
                    container, obj = remainder.split('/', 1)
                else:
                    container = remainder
        except ValueError:
            return

        now = timeutils.utcnow().isoformat()

        resource_metadata = {
            "path": path,
            "version": version,
            "container": container,
            "object": obj,
        }

        for header in self.metadata_headers:
            if header.upper() in headers:
                resource_metadata['http_header_%s' % header] = headers.get(
                    header.upper())

        with self.pipeline_manager.publisher(
                context.get_admin_context()) as publisher:
            if bytes_received:
                publisher([
                    sample.Sample(name='storage.objects.incoming.bytes',
                                  type=sample.TYPE_DELTA,
                                  unit='B',
                                  volume=bytes_received,
                                  user_id=env.get('HTTP_X_USER_ID'),
                                  project_id=env.get('HTTP_X_TENANT_ID'),
                                  resource_id=account.partition(
                                      self.reseller_prefix)[2],
                                  timestamp=now,
                                  resource_metadata=resource_metadata)
                ])

            if bytes_sent:
                publisher([
                    sample.Sample(name='storage.objects.outgoing.bytes',
                                  type=sample.TYPE_DELTA,
                                  unit='B',
                                  volume=bytes_sent,
                                  user_id=env.get('HTTP_X_USER_ID'),
                                  project_id=env.get('HTTP_X_TENANT_ID'),
                                  resource_id=account.partition(
                                      self.reseller_prefix)[2],
                                  timestamp=now,
                                  resource_metadata=resource_metadata)
                ])

            # publish the event for each request
            # request method will be recorded in the metadata
            resource_metadata['method'] = method.lower()
            publisher([
                sample.Sample(name='storage.api.request',
                              type=sample.TYPE_DELTA,
                              unit='request',
                              volume=1,
                              user_id=env.get('HTTP_X_USER_ID'),
                              project_id=env.get('HTTP_X_TENANT_ID'),
                              resource_id=account.partition(
                                  self.reseller_prefix)[2],
                              timestamp=now,
                              resource_metadata=resource_metadata)
            ])
Exemplo n.º 28
0
def make_test_data(name,
                   meter_type,
                   unit,
                   volume,
                   random_min,
                   random_max,
                   user_id,
                   project_id,
                   resource_id,
                   start,
                   end,
                   interval,
                   resource_metadata=None,
                   source='artificial'):
    resource_metadata = resource_metadata or {
        'display_name': 'toto',
        'host': 'tata',
        'image_ref_url': 'test',
        'instance_flavor_id': 'toto',
    }
    # Compute start and end timestamps for the new data.
    if isinstance(start, datetime.datetime):
        timestamp = start
    else:
        timestamp = timeutils.parse_strtime(start)

    if not isinstance(end, datetime.datetime):
        end = timeutils.parse_strtime(end)

    increment = datetime.timedelta(minutes=interval)

    print('Adding new events for meter %s.' % (name))
    # Generate events
    n = 0
    total_volume = volume
    while timestamp <= end:
        if (random_min >= 0 and random_max >= 0):
            # If there is a random element defined, we will add it to
            # user given volume.
            if isinstance(random_min, int) and isinstance(random_max, int):
                total_volume += random.randint(random_min, random_max)
            else:
                total_volume += random.uniform(random_min, random_max)

        c = sample.Sample(
            name=name,
            type=meter_type,
            unit=unit,
            volume=total_volume,
            user_id=user_id,
            project_id=project_id,
            resource_id=resource_id,
            timestamp=timestamp,
            resource_metadata=resource_metadata,
            source=source,
        )
        data = utils.meter_message_from_counter(
            c, cfg.CONF.publisher.telemetry_secret)

        yield data
        n += 1
        timestamp = timestamp + increment

        if (meter_type == 'gauge' or meter_type == 'delta'):
            # For delta and gauge, we don't want to increase the value
            # in time by random element. So we always set it back to
            # volume.
            total_volume = volume

    print('Added %d new events for meter %s.' % (n, name))
Exemplo n.º 29
0
class PublisherWorkflowTest(base.BaseTestCase,
                            testscenarios.TestWithScenarios):

    sample_scenarios = [
        ('cpu', dict(
            sample=sample.Sample(
                resource_id=str(uuid.uuid4()) + "_foobar",
                name='cpu',
                unit='ns',
                type=sample.TYPE_CUMULATIVE,
                volume=500,
                user_id='test_user',
                project_id='test_project',
                source='openstack',
                timestamp='2012-05-08 20:23:48.028195',
                resource_metadata={
                    'host': 'foo',
                    'image_ref': 'imageref!',
                    'instance_flavor_id': 1234,
                    'display_name': 'myinstance',
                },
            ),
            metric_attributes={
                "archive_policy_name": "ceilometer-low-rate",
                "unit": "ns",
                "measures": [{
                    'timestamp': '2012-05-08 20:23:48.028195',
                    'value': 500
                }]
            },
            postable_attributes={
                'user_id': 'test_user',
                'project_id': 'test_project',
            },
            patchable_attributes={
                'host': 'foo',
                'image_ref': 'imageref!',
                'flavor_id': 1234,
                'display_name': 'myinstance',
            },
            resource_type='instance')),
        ('disk.root.size', dict(
            sample=sample.Sample(
                resource_id=str(uuid.uuid4()) + "_foobar",
                name='disk.root.size',
                unit='GB',
                type=sample.TYPE_GAUGE,
                volume=2,
                user_id='test_user',
                project_id='test_project',
                source='openstack',
                timestamp='2012-05-08 20:23:48.028195',
                resource_metadata={
                    'host': 'foo',
                    'image_ref': 'imageref!',
                    'instance_flavor_id': 1234,
                    'display_name': 'myinstance',
                },
            ),
            metric_attributes={
                "archive_policy_name": "ceilometer-low",
                "unit": "GB",
                "measures": [{
                    'timestamp': '2012-05-08 20:23:48.028195',
                    'value': 2
                }]
            },
            postable_attributes={
                'user_id': 'test_user',
                'project_id': 'test_project',
            },
            patchable_attributes={
                'host': 'foo',
                'image_ref': 'imageref!',
                'flavor_id': 1234,
                'display_name': 'myinstance',
            },
            resource_type='instance')),
        ('hardware.ipmi.node.power', dict(
            sample=sample.Sample(
                resource_id=str(uuid.uuid4()) + "_foobar",
                name='hardware.ipmi.node.power',
                unit='W',
                type=sample.TYPE_GAUGE,
                volume=2,
                user_id='test_user',
                project_id='test_project',
                source='openstack',
                timestamp='2012-05-08 20:23:48.028195',
                resource_metadata={
                    'useless': 'not_used',
                },
            ),
            metric_attributes={
                "archive_policy_name": "ceilometer-low",
                "unit": "W",
                "measures": [{
                    'timestamp': '2012-05-08 20:23:48.028195',
                    'value': 2
                }]
            },
            postable_attributes={
                'user_id': 'test_user',
                'project_id': 'test_project',
            },
            patchable_attributes={
            },
            resource_type='ipmi')),
    ]

    default_workflow = dict(resource_exists=True,
                            post_measure_fail=False,
                            create_resource_fail=False,
                            create_resource_race=False,
                            update_resource_fail=False,
                            retry_post_measures_fail=False)
    workflow_scenarios = [
        ('normal_workflow', {}),
        ('new_resource', dict(resource_exists=False)),
        ('new_resource_compat', dict(resource_exists=False)),
        ('new_resource_fail', dict(resource_exists=False,
                                   create_resource_fail=True)),
        ('new_resource_race', dict(resource_exists=False,
                                   create_resource_race=True)),
        ('resource_update_fail', dict(update_resource_fail=True)),
        ('retry_fail', dict(resource_exists=False,
                            retry_post_measures_fail=True)),
        ('measure_fail', dict(post_measure_fail=True)),
    ]

    @classmethod
    def generate_scenarios(cls):
        workflow_scenarios = []
        for name, wf_change in cls.workflow_scenarios:
            wf = cls.default_workflow.copy()
            wf.update(wf_change)
            workflow_scenarios.append((name, wf))
        cls.scenarios = testscenarios.multiply_scenarios(cls.sample_scenarios,
                                                         workflow_scenarios)

    def setUp(self):
        super(PublisherWorkflowTest, self).setUp()
        conf = ceilometer_service.prepare_service(argv=[], config_files=[])
        self.conf = self.useFixture(config_fixture.Config(conf))
        ks_client = mock.Mock()
        ks_client.projects.find.return_value = mock.Mock(
            name='gnocchi', id='a2d42c23-d518-46b6-96ab-3fba2e146859')
        self.useFixture(fixtures.MockPatch(
            'ceilometer.keystone_client.get_client',
            return_value=ks_client))
        self.useFixture(fixtures.MockPatch(
            'ceilometer.keystone_client.get_session',
            return_value=ks_client))
        self.ks_client = ks_client

    @mock.patch('gnocchiclient.v1.client.Client')
    def test_delete_event_workflow(self, fakeclient_cls):
        url = netutils.urlsplit("gnocchi://")
        self.publisher = gnocchi.GnocchiPublisher(self.conf.conf, url)

        fakeclient = fakeclient_cls.return_value

        fakeclient.resource.search.side_effect = [
            [{"id": "b26268d6-8bb5-11e6-baff-00224d8226cd",
              "type": "instance_disk",
              "instance_id": "9f9d01b9-4a58-4271-9e27-398b21ab20d1"}],
            [{"id": "b1c7544a-8bb5-11e6-850e-00224d8226cd",
              "type": "instance_network_interface",
              "instance_id": "9f9d01b9-4a58-4271-9e27-398b21ab20d1"}],
        ]

        search_params = {
            '=': {'instance_id': '9f9d01b9-4a58-4271-9e27-398b21ab20d1'}
        }

        now = timeutils.utcnow()
        self.useFixture(utils_fixture.TimeFixture(now))

        expected_calls = [
            mock.call.resource.search('instance_network_interface',
                                      search_params),
            mock.call.resource.search('instance_disk', search_params),
            mock.call.resource.update(
                'instance', '9f9d01b9-4a58-4271-9e27-398b21ab20d1',
                {'ended_at': now.isoformat()}),
            mock.call.resource.update(
                'instance_disk',
                'b26268d6-8bb5-11e6-baff-00224d8226cd',
                {'ended_at': now.isoformat()}),
            mock.call.resource.update(
                'instance_network_interface',
                'b1c7544a-8bb5-11e6-850e-00224d8226cd',
                {'ended_at': now.isoformat()}),
            mock.call.resource.update(
                'image', 'dc337359-de70-4044-8e2c-80573ba6e577',
                {'ended_at': now.isoformat()}),
            mock.call.resource.update(
                'volume', '6cc6e7dd-d17d-460f-ae79-7e08a216ce96',
                {'ended_at': now.isoformat()}),
            mock.call.resource.update(
                'network', '705e2c08-08e8-45cb-8673-5c5be955569b',
                {'ended_at': now.isoformat()})
        ]

        self.publisher.publish_events([INSTANCE_DELETE_START,
                                       IMAGE_DELETE_START,
                                       VOLUME_DELETE_END,
                                       FLOATINGIP_DELETE_END])
        self.assertEqual(8, len(fakeclient.mock_calls))
        for call in expected_calls:
            self.assertIn(call, fakeclient.mock_calls)

    @mock.patch('gnocchiclient.v1.client.Client')
    def test_create_event_workflow(self, fakeclient_cls):
        url = netutils.urlsplit("gnocchi://")
        self.publisher = gnocchi.GnocchiPublisher(self.conf.conf, url)

        fakeclient = fakeclient_cls.return_value

        now = timeutils.utcnow()
        self.useFixture(utils_fixture.TimeFixture(now))

        expected_calls = [
            mock.call.resource.create(
                'instance',
                {'id': '9f9d01b9-4a58-4271-9e27-398b21ab20d1',
                 'user_id': '1e3ce043029547f1a61c1996d1a531a2',
                 'project_id': '7c150a59fe714e6f9263774af9688f0e',
                 'availability_zone': 'zone1',
                 'flavor_name': 'm1.tiny',
                 'flavor_id': '2',
                 'host': 'vagrant-precise'}),
        ]

        self.publisher.publish_events([INSTANCE_CREATE_END])
        self.assertEqual(1, len(fakeclient.mock_calls))
        for call in expected_calls:
            self.assertIn(call, fakeclient.mock_calls)

    @mock.patch('ceilometer.publisher.gnocchi.LOG')
    @mock.patch('gnocchiclient.v1.client.Client')
    def test_workflow(self, fakeclient_cls, logger):

        fakeclient = fakeclient_cls.return_value

        resource_id = self.sample.resource_id.replace("/", "_")
        metric_name = self.sample.name
        gnocchi_id = uuid.uuid4()

        expected_calls = [
            mock.call.archive_policy.create({"name": "ceilometer-low",
                                             "back_window": 0,
                                             "aggregation_methods": ["mean"],
                                             "definition": mock.ANY}),
            mock.call.archive_policy.create({"name": "ceilometer-low-rate",
                                             "back_window": 0,
                                             "aggregation_methods": [
                                                 "mean", "rate:mean"],
                                             "definition": mock.ANY}),
            mock.call.archive_policy.create({"name": "ceilometer-high",
                                             "back_window": 0,
                                             "aggregation_methods": ["mean"],
                                             "definition": mock.ANY}),
            mock.call.archive_policy.create({"name": "ceilometer-high-rate",
                                             "back_window": 0,
                                             "aggregation_methods": [
                                                 "mean", "rate:mean"],
                                             "definition": mock.ANY}),
            mock.call.metric.batch_resources_metrics_measures(
                {resource_id: {metric_name: self.metric_attributes}},
                create_metrics=True)
        ]
        expected_debug = [
            mock.call('filtered project found: %s',
                      'a2d42c23-d518-46b6-96ab-3fba2e146859'),
            mock.call('Processing sample [%s] for resource ID [%s].',
                      self.sample, resource_id),
        ]

        measures_posted = False
        batch_side_effect = []
        if self.post_measure_fail:
            batch_side_effect += [Exception('boom!')]
        elif not self.resource_exists:
            batch_side_effect += [
                gnocchi_exc.BadRequest(
                    400, {"cause": "Unknown resources",
                          'detail': [{
                              'resource_id': gnocchi_id,
                              'original_resource_id': resource_id}]})]

            attributes = self.postable_attributes.copy()
            attributes.update(self.patchable_attributes)
            attributes['id'] = self.sample.resource_id
            expected_calls.append(mock.call.resource.create(
                self.resource_type, attributes))

            if self.create_resource_fail:
                fakeclient.resource.create.side_effect = [Exception('boom!')]
            elif self.create_resource_race:
                fakeclient.resource.create.side_effect = [
                    gnocchi_exc.ResourceAlreadyExists(409)]
            else:  # not resource_exists
                expected_debug.append(mock.call(
                    'Resource %s created', self.sample.resource_id))

            if not self.create_resource_fail:
                expected_calls.append(
                    mock.call.metric.batch_resources_metrics_measures(
                        {resource_id: {metric_name: self.metric_attributes}},
                        create_metrics=True)
                )

                if self.retry_post_measures_fail:
                    batch_side_effect += [Exception('boom!')]
                else:
                    measures_posted = True

        else:
            measures_posted = True

        if measures_posted:
            batch_side_effect += [None]
            expected_debug.append(
                mock.call("%d measures posted against %d metrics through %d "
                          "resources", len(self.metric_attributes["measures"]),
                          1, 1)
            )

        if self.patchable_attributes:
            expected_calls.append(mock.call.resource.update(
                self.resource_type, resource_id,
                self.patchable_attributes))
            if self.update_resource_fail:
                fakeclient.resource.update.side_effect = [Exception('boom!')]
            else:
                expected_debug.append(mock.call(
                    'Resource %s updated', self.sample.resource_id))

        batch = fakeclient.metric.batch_resources_metrics_measures
        batch.side_effect = batch_side_effect

        url = netutils.urlsplit("gnocchi://")
        publisher = gnocchi.GnocchiPublisher(self.conf.conf, url)
        publisher.publish_samples([self.sample])

        # Check that the last log message is the expected one
        if (self.post_measure_fail
                or self.create_resource_fail
                or self.retry_post_measures_fail
                or (self.update_resource_fail and self.patchable_attributes)):
            logger.error.assert_called_with('boom!', exc_info=True)
        else:
            self.assertEqual(0, logger.error.call_count)
        self.assertEqual(expected_calls, fakeclient.mock_calls)
        self.assertEqual(expected_debug, logger.debug.mock_calls)
Exemplo n.º 30
0
    def publish_sample(self, env, bytes_received, bytes_sent):
        req = REQUEST.Request(env)
        try:
            version, account, container, obj = split_path(req.path, 2, 4, True)
        except ValueError:
            return
        now = timeutils.utcnow().isoformat()

        resource_metadata = {
            "path": req.path,
            "version": version,
            "container": container,
            "object": obj,
        }

        for header in self.metadata_headers:
            if header.upper() in req.headers:
                resource_metadata['http_header_%s' % header] = req.headers.get(
                    header.upper())

        with self.pipeline_manager.publisher(
                context.get_admin_context()) as publisher:
            if bytes_received:
                publisher([
                    sample.Sample(name='storage.objects.incoming.bytes',
                                  type=sample.TYPE_DELTA,
                                  unit='B',
                                  volume=bytes_received,
                                  user_id=env.get('HTTP_X_USER_ID'),
                                  project_id=env.get('HTTP_X_TENANT_ID'),
                                  resource_id=account.partition(
                                      self.reseller_prefix)[2],
                                  timestamp=now,
                                  resource_metadata=resource_metadata)
                ])

            if bytes_sent:
                publisher([
                    sample.Sample(name='storage.objects.outgoing.bytes',
                                  type=sample.TYPE_DELTA,
                                  unit='B',
                                  volume=bytes_sent,
                                  user_id=env.get('HTTP_X_USER_ID'),
                                  project_id=env.get('HTTP_X_TENANT_ID'),
                                  resource_id=account.partition(
                                      self.reseller_prefix)[2],
                                  timestamp=now,
                                  resource_metadata=resource_metadata)
                ])

            # publish the event for each request
            # request method will be recorded in the metadata
            resource_metadata['method'] = req.method.lower()
            publisher([
                sample.Sample(name='storage.api.request',
                              type=sample.TYPE_DELTA,
                              unit='request',
                              volume=1,
                              user_id=env.get('HTTP_X_USER_ID'),
                              project_id=env.get('HTTP_X_TENANT_ID'),
                              resource_id=account.partition(
                                  self.reseller_prefix)[2],
                              timestamp=now,
                              resource_metadata=resource_metadata)
            ])