Exemplo n.º 1
0
 def test_replica_set(self):
     self.CONF.set_override('connection',
                            str(tests_db.MongoDBFakeConnectionUrl()) +
                            '?replicaSet=foobar',
                            group='database')
     conn = impl_mongodb.Connection(self.CONF)
     self.assertTrue(conn.conn)
Exemplo n.º 2
0
 def test_replica_set(self):
     self.CONF.set_override('connection',
                            self.db_manager.connection +
                            '?replicaSet=foobar',
                            group='database')
     conn = impl_mongodb.Connection(self.CONF)
     self.assertTrue(conn.conn)
Exemplo n.º 3
0
 def test_replica_set(self):
     url = self.db_manager._url + '?replicaSet=foobar'
     conn = impl_mongodb.Connection(url)
     self.assertTrue(conn.conn)
Exemplo n.º 4
0
 def test_connection_pooling(self):
     test_conn = impl_mongodb.Connection(self.db_manager.url)
     self.assertEqual(self.conn.conn, test_conn.conn)
Exemplo n.º 5
0
 def test_connection_pooling(self):
     self.assertEqual(self.conn.conn,
                      impl_mongodb.Connection(self.CONF).conn)
Exemplo n.º 6
0
def record_samples(samples_count=50000, resources_count=5000, conf=None):
    print('%s. %s. Start record samples' %
          (datetime.datetime.utcnow(), multiprocessing.current_process().name))
    cfg.CONF(["--config-file", "/etc/ceilometer/ceilometer.conf"],
             project='ceilometer')

    cl = impl_mongodb.Connection(cfg.CONF.database.connection)
    db = cl.db
    one_second = datetime.timedelta(seconds=1) * (conf.get('interval') or 1)
    timestamp = datetime.datetime.utcnow() - one_second * (samples_count + 1)
    sample = copy.deepcopy(sample_dict)
    resource_ids = create_resources(resources_count)
    resources_timestamps = {}
    resource_metadatas = {}
    batch = []
    for i in range(samples_count):
        m = copy.deepcopy(metadata)
        sample['_id'] = uuid.uuid4().hex
        sample['message_id'] = uuid.uuid4().hex
        sample['timestamp'] = timestamp
        sample['counter_name'] = conf.get('name') or 'cpu_util'
        sample['counter_volume'] = random.randint(0, 1600)
        sample['counter_unit'] = conf.get('unit') or '%'
        sample['recorded_at'] = timestamp
        sample['project_id'] = conf.get('project')
        sample['user_id'] = conf.get('user')
        timestamp += one_second
        timestamp = timestamp.replace(microsecond=0)

        resource_index = random.randint(0, resources_count - 1)
        resource_id = resource_ids[resource_index]
        sample['resource_id'] = resource_id
        m['host'] = "host.%s" % resource_index
        sample['resource_metadata'] = m
        if resource_id not in resource_metadatas:
            resource_metadatas[resource_id] = m
        batch.append(copy.deepcopy(sample))
        if len(batch) >= 5000:
            db.meter.insert(batch)
            batch = []
        if not resources_timestamps.get(resource_id):
            resources_timestamps[resource_id] = [timestamp, timestamp]
        resources_timestamps[resource_id][1] = timestamp

    resource_batch = []
    for resource, timestamps in resources_timestamps.items():
        resource_dict = {
            "_id":
            resource,
            "first_sample_timestamp":
            timestamps[0],
            "last_sample_timestamp":
            timestamps[0] +
            datetime.timedelta(seconds=random.randint(0, 1000)),
            "metadata":
            resource_metadatas[resource],
            "user_id":
            conf.get('user'),
            "project_id":
            conf.get('project'),
            "source":
            "jira",
            "meter": [
                {
                    "counter_name": conf.get('name', 'cpu_util'),
                    "counter_unit": conf.get('unit', '%'),
                    "counter_type": 'gauge'
                },
            ]
        }
        resource_batch.append(resource_dict)
    if batch:
        db.meter.insert(batch)
    if resource_batch:
        db.resource.insert(resource_batch)
    print("%s. %s. Writed %s samples and %s resources" %
          (datetime.datetime.utcnow(), multiprocessing.current_process().name,
           samples_count, resources_count))