def test_recursive_keypairs_with_list_of_dict(self): small = 1 big = 1 << 64 expected = [('a', 'A'), ('b', 'B'), ('nested:list', ['{%d: 99, %d: 42}' % (small, big)])] data = {'a': 'A', 'b': 'B', 'nested': {'list': [{small: 99, big: 42}]}} pairs = list(utils.recursive_keypairs(data)) self.assertEqual(len(expected), len(pairs)) for k, v in pairs: # the keys 1 and 1<<64 cause a hash collision on 64bit platforms if k == 'nested:list': self.assertIn(v, [[('{%d: 99, %d: 42}' % (small, big)).encode('ascii')], [('{%d: 99, %dL: 42}' % (small, big)).encode('ascii')], [('{%d: 42, %d: 99}' % (big, small)).encode('ascii')], [('{%dL: 42, %d: 99}' % (big, small)).encode('ascii')]]) else: self.assertIn((k, v), expected)
def test_recursive_keypairs(self): data = {'a': 'A', 'b': 'B', 'nested': {'a': 'A', 'b': 'B'}} pairs = list(utils.recursive_keypairs(data)) self.assertEqual([('a', 'A'), ('b', 'B'), ('nested:a', 'A'), ('nested:b', 'B')], pairs)
def _flatten_metadata(metadata): """Return flattened resource metadata without nested structures and with all values converted to unicode strings. """ if metadata: return dict( (k, unicode(v)) for k, v in utils.recursive_keypairs(metadata, separator='.') if type(v) not in set([list, set])) return {}
def test_recursive_keypairs_with_list_of_dict(self): small = 1 big = 1 << 64 expected = [('a', 'A'), ('b', 'B'), ('nested:list', ['{%d: 99, %dL: 42}' % (small, big)])] # the keys 1 and 1<<64 cause a hash collision on 64bit platforms for nested in [{small: 99, big: 42}, {big: 42, small: 99}]: data = {'a': 'A', 'b': 'B', 'nested': {'list': [nested]}} pairs = list(utils.recursive_keypairs(data)) self.assertEqual(expected, pairs)
def _flatten_metadata(metadata): """Return flattened resource metadata without nested structures and with all values converted to unicode strings. """ if metadata: return dict((k, unicode(v)) for k, v in utils.recursive_keypairs(metadata, separator='.') if type(v) not in set([list, set])) return {}
def compute_signature(message, secret): """Return the signature for a message dictionary.""" digest_maker = hmac.new(secret, '', hashlib.sha256) for name, value in utils.recursive_keypairs(message): if name == 'message_signature': # Skip any existing signature value, which would not have # been part of the original message. continue digest_maker.update(name) digest_maker.update(six.text_type(value).encode('utf-8')) return digest_maker.hexdigest()
def compute_signature(message, secret): """Return the signature for a message dictionary.""" digest_maker = hmac.new(secret, '', hashlib.sha256) for name, value in utils.recursive_keypairs(message): if name == 'message_signature': # Skip any existing signature value, which would not have # been part of the original message. continue digest_maker.update(six.text_type(name).encode('utf-8')) digest_maker.update(six.text_type(value).encode('utf-8')) return digest_maker.hexdigest()
def test_recursive_keypairs(): data = {'a': 'A', 'b': 'B', 'nested': {'a': 'A', 'b': 'B', }, } pairs = list(utils.recursive_keypairs(data)) assert pairs == [('a', 'A'), ('b', 'B'), ('nested:a', 'A'), ('nested:b', 'B'), ]
def test_recursive_keypairs_with_separator(self): data = { 'a': 'A', 'b': 'B', 'nested': { 'a': 'A', 'b': 'B', }, } separator = '.' pairs = list(utils.recursive_keypairs(data, separator)) self.assertEqual([('a', 'A'), ('b', 'B'), ('nested.a', 'A'), ('nested.b', 'B')], pairs)
def test_recursive_keypairs_with_separator(self): data = {'a': 'A', 'b': 'B', 'nested': {'a': 'A', 'b': 'B', }, } separator = '.' pairs = list(utils.recursive_keypairs(data, separator)) self.assertEqual(pairs, [('a', 'A'), ('b', 'B'), ('nested.a', 'A'), ('nested.b', 'B')])
def test_recursive_keypairs(): data = { 'a': 'A', 'b': 'B', 'nested': { 'a': 'A', 'b': 'B', }, } pairs = list(utils.recursive_keypairs(data)) assert pairs == [ ('a', 'A'), ('b', 'B'), ('nested:a', 'A'), ('nested:b', 'B'), ]
def process_notification(self, message): return [ counter.Counter( name='instance.scheduled', type=counter.TYPE_DELTA, volume=1, unit='instance', user_id=None, project_id=message['payload']['request_spec'] ['instance_properties']['project_id'], resource_id=message['payload']['instance_id'], timestamp=message['timestamp'], resource_metadata=dict( utils.recursive_keypairs(message['payload'])), ) ]
def process_notification(self, message): return [ counter.Counter( name='instance.scheduled', type=counter.TYPE_DELTA, volume=1, unit='instance', user_id=None, project_id= message['payload']['request_spec'] ['instance_properties']['project_id'], resource_id=message['payload']['instance_id'], timestamp=message['timestamp'], resource_metadata=dict( utils.recursive_keypairs(message['payload'])), ) ]
def flatten_metadata(metadata): """Return flattened resource metadata. Metadata is returned with flattened nested structures (except nested sets) and with all values converted to unicode strings. """ if metadata: # After changing recursive_keypairs` output we need to keep # flattening output unchanged. # Example: recursive_keypairs({'a': {'b':{'c':'d'}}}, '.') # output before: a.b:c=d # output now: a.b.c=d # So to keep the first variant just replace all dots except the first return dict( (k.replace('.', ':').replace(':', '.', 1), six.text_type(v)) for k, v in utils.recursive_keypairs(metadata, separator='.') if type(v) is not set) return {}
def flatten_metadata(metadata): """Return flattened resource metadata. Metadata is returned with flattened nested structures (except nested sets) and with all values converted to unicode strings. """ if metadata: # After changing recursive_keypairs` output we need to keep # flattening output unchanged. # Example: recursive_keypairs({'a': {'b':{'c':'d'}}}, '.') # output before: a.b:c=d # output now: a.b.c=d # So to keep the first variant just replace all dots except the first return dict((k.replace('.', ':').replace(':', '.', 1), six.text_type(v)) for k, v in utils.recursive_keypairs(metadata, separator='.') if type(v) is not set) return {}
def _flatten_capabilities(capabilities): return dict((k, v) for k, v in utils.recursive_keypairs(capabilities))
def record_metering_data(self, data): """Write the data to the backend storage system. :param data: a dictionary such as returned by ceilometer.meter.meter_message_from_counter """ project_table = self.conn.table(self.PROJECT_TABLE) user_table = self.conn.table(self.USER_TABLE) resource_table = self.conn.table(self.RESOURCE_TABLE) meter_table = self.conn.table(self.METER_TABLE) # store metadata fields with prefix "r_" resource_metadata = {} res_meta_copy = data['resource_metadata'] if res_meta_copy: for key, v in utils.recursive_keypairs(res_meta_copy, separator='.'): resource_metadata['f:r_%s' % key] = unicode(v) # Make sure we know about the user and project if data['user_id']: user = user_table.row(data['user_id']) sources = _load_hbase_list(user, 's') # Update if source is new if data['source'] not in sources: user['f:s_%s' % data['source']] = "1" user_table.put(data['user_id'], user) project = project_table.row(data['project_id']) sources = _load_hbase_list(project, 's') # Update if source is new if data['source'] not in sources: project['f:s_%s' % data['source']] = "1" project_table.put(data['project_id'], project) rts = reverse_timestamp(data['timestamp']) resource = resource_table.row(data['resource_id']) new_meter = _format_meter_reference( data['counter_name'], data['counter_type'], data['counter_unit']) new_resource = {'f:resource_id': data['resource_id'], 'f:project_id': data['project_id'], 'f:user_id': data['user_id'], 'f:source': data["source"], # store meters with prefix "m_" 'f:m_%s' % new_meter: "1" } new_resource.update(resource_metadata) # Update if resource has new information if new_resource != resource: meters = _load_hbase_list(resource, 'm') if new_meter not in meters: new_resource['f:m_%s' % new_meter] = "1" resource_table.put(data['resource_id'], new_resource) # Rowkey consists of reversed timestamp, meter and an md5 of # user+resource+project for purposes of uniqueness m = hashlib.md5() m.update("%s%s%s" % (data['user_id'], data['resource_id'], data['project_id'])) # We use reverse timestamps in rowkeys as they are sorted # alphabetically. row = "%s_%d_%s" % (data['counter_name'], rts, m.hexdigest()) # Convert timestamp to string as json.dumps won't ts = timeutils.strtime(data['timestamp']) record = {'f:timestamp': ts, 'f:counter_name': data['counter_name'], 'f:counter_type': data['counter_type'], 'f:counter_volume': str(data['counter_volume']), 'f:counter_unit': data['counter_unit'], # TODO(shengjie) consider using QualifierFilter # keep dimensions as column qualifier for quicker look up # TODO(shengjie) extra dimensions need to be added as CQ 'f:user_id': data['user_id'], 'f:project_id': data['project_id'], 'f:message_id': data['message_id'], 'f:resource_id': data['resource_id'], 'f:source': data['source'], # add in reversed_ts here for time range scan 'f:rts': str(rts) } # Need to record resource_metadata for more robust filtering. record.update(resource_metadata) # Don't want to be changing the original data object. data = copy.copy(data) data['timestamp'] = ts # Save original meter. record['f:message'] = json.dumps(data) meter_table.put(row, record)
def record_metering_data(self, data): """Write the data to the backend storage system. :param data: a dictionary such as returned by ceilometer.meter.meter_message_from_counter """ project_table = self.conn.table(self.PROJECT_TABLE) user_table = self.conn.table(self.USER_TABLE) resource_table = self.conn.table(self.RESOURCE_TABLE) meter_table = self.conn.table(self.METER_TABLE) # store metadata fields with prefix "r_" resource_metadata = {} res_meta_copy = data['resource_metadata'] if res_meta_copy: for key, v in utils.recursive_keypairs(res_meta_copy, separator='.'): resource_metadata['f:r_%s' % key] = unicode(v) # Make sure we know about the user and project if data['user_id']: user = user_table.row(data['user_id']) sources = _load_hbase_list(user, 's') # Update if source is new if data['source'] not in sources: user['f:s_%s' % data['source']] = "1" user_table.put(data['user_id'], user) project = project_table.row(data['project_id']) sources = _load_hbase_list(project, 's') # Update if source is new if data['source'] not in sources: project['f:s_%s' % data['source']] = "1" project_table.put(data['project_id'], project) rts = reverse_timestamp(data['timestamp']) resource = resource_table.row(data['resource_id']) new_meter = _format_meter_reference( data['counter_name'], data['counter_type'], data['counter_unit']) new_resource = {'f:resource_id': data['resource_id'], 'f:project_id': data['project_id'], 'f:user_id': data['user_id'], 'f:source': data["source"], # store meters with prefix "m_" 'f:m_%s' % new_meter: "1" } new_resource.update(resource_metadata) # Update if resource has new information if new_resource != resource: meters = _load_hbase_list(resource, 'm') if new_meter not in meters: new_resource['f:m_%s' % new_meter] = "1" resource_table.put(data['resource_id'], new_resource) # Rowkey consists of reversed timestamp, meter and an md5 of # user+resource+project for purposes of uniqueness m = hashlib.md5() m.update("%s%s%s" % (data['user_id'], data['resource_id'], data['project_id'])) # We use reverse timestamps in rowkeys as they are sorted # alphabetically. row = "%s_%d_%s" % (data['counter_name'], rts, m.hexdigest()) recorded_at = timeutils.utcnow() # Convert timestamp to string as json.dumps won't ts = timeutils.strtime(data['timestamp']) recorded_at_ts = timeutils.strtime(recorded_at) record = {'f:timestamp': ts, 'f:counter_name': data['counter_name'], 'f:counter_type': data['counter_type'], 'f:counter_volume': str(data['counter_volume']), 'f:counter_unit': data['counter_unit'], # TODO(shengjie) consider using QualifierFilter # keep dimensions as column qualifier for quicker look up # TODO(shengjie) extra dimensions need to be added as CQ 'f:user_id': data['user_id'], 'f:project_id': data['project_id'], 'f:message_id': data['message_id'], 'f:resource_id': data['resource_id'], 'f:source': data['source'], 'f:recorded_at': recorded_at, # add in reversed_ts here for time range scan 'f:rts': str(rts) } # Need to record resource_metadata for more robust filtering. record.update(resource_metadata) # Don't want to be changing the original data object. data = copy.copy(data) data['timestamp'] = ts data['recorded_at'] = recorded_at_ts # Save original meter. record['f:message'] = json.dumps(data) meter_table.put(row, record)