def _iter_flow(extractor, data): for flow_statistic in data["flow"]["flowStatistics"]: for statistic in flow_statistic["flowStatistic"]: resource_meta = {"flow_id": statistic["flow"]["id"], "table_id": statistic["tableId"]} for key, value in utils.dict_to_keyval(statistic["flow"], "flow"): resource_meta[key.replace(".", "_")] = value yield extractor(statistic, flow_statistic["node"]["id"], resource_meta)
def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) meter = Table("meter", meta, autoload=True) meta_tables = {} for t_name, t_type, t_nullable in tables: meta_tables[t_name] = Table( t_name, meta, Column("id", Integer, ForeignKey("meter.id"), primary_key=True), Column("meta_key", String(255), index=True, primary_key=True), Column("value", t_type, nullable=t_nullable), mysql_engine="InnoDB", mysql_charset="utf8", ) meta_tables[t_name].create() for row in select([meter]).execute(): if row["resource_metadata"]: meter_id = row["id"] rmeta = json.loads(row["resource_metadata"]) for key, v in utils.dict_to_keyval(rmeta): ins = None if isinstance(v, six.string_types) or v is None: ins = meta_tables["metadata_text"].insert() elif isinstance(v, bool): ins = meta_tables["metadata_bool"].insert() elif isinstance(v, six.integer_types): ins = meta_tables["metadata_int"].insert() elif isinstance(v, float): ins = meta_tables["metadata_float"].insert() if ins is not None: ins.values(id=meter_id, meta_key=key, value=v).execute()
def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) meter = Table('meter', meta, autoload=True) meta_tables = {} for t_name, t_type, t_nullable in tables: meta_tables[t_name] = Table( t_name, meta, Column('id', Integer, ForeignKey('meter.id'), primary_key=True), Column('meta_key', String(255), index=True, primary_key=True), Column('value', t_type, nullable=t_nullable), mysql_engine='InnoDB', mysql_charset='utf8', ) meta_tables[t_name].create() for row in select([meter]).execute(): if row['resource_metadata']: meter_id = row['id'] rmeta = json.loads(row['resource_metadata']) for key, v in utils.dict_to_keyval(rmeta): ins = None if isinstance(v, six.string_types) or v is None: ins = meta_tables['metadata_text'].insert() elif isinstance(v, bool): ins = meta_tables['metadata_bool'].insert() elif isinstance(v, (int, long)): ins = meta_tables['metadata_int'].insert() elif isinstance(v, float): ins = meta_tables['metadata_float'].insert() if ins is not None: ins.values(id=meter_id, meta_key=key, value=v).execute()
def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) meter = Table('meter', meta, autoload=True) meta_tables = {} for t_name, t_type, t_nullable in tables: meta_tables[t_name] = Table( t_name, meta, Column('id', Integer, ForeignKey('meter.id'), primary_key=True), Column('meta_key', String(255), index=True, primary_key=True), Column('value', t_type, nullable=t_nullable), mysql_engine='InnoDB', mysql_charset='utf8', ) meta_tables[t_name].create() for row in select([meter]).execute(): if row['resource_metadata']: meter_id = row['id'] rmeta = json.loads(row['resource_metadata']) for key, v in utils.dict_to_keyval(rmeta): ins = None if isinstance(v, six.string_types) or v is None: ins = meta_tables['metadata_text'].insert() elif isinstance(v, bool): ins = meta_tables['metadata_bool'].insert() elif isinstance(v, six.integer_types): ins = meta_tables['metadata_int'].insert() elif isinstance(v, float): ins = meta_tables['metadata_float'].insert() if ins is not None: ins.values(id=meter_id, meta_key=key, value=v).execute()
def record_metering_data(self, data): """Write the data to the backend storage system. :param data: a dictionary such as returned by ceilometer.meter.meter_message_from_counter """ session = self._engine_facade.get_session() with session.begin(): # Record the raw data for the sample. rmetadata = data["resource_metadata"] meter = self._create_meter(session, data["counter_name"], data["counter_type"], data["counter_unit"]) sample = models.Sample(meter_id=meter.id) session.add(sample) sample.resource_id = data["resource_id"] sample.project_id = data["project_id"] sample.user_id = data["user_id"] sample.timestamp = data["timestamp"] sample.resource_metadata = rmetadata sample.volume = data["counter_volume"] sample.message_signature = data["message_signature"] sample.message_id = data["message_id"] sample.source_id = data["source"] session.flush() if rmetadata: if isinstance(rmetadata, dict): for key, v in utils.dict_to_keyval(rmetadata): try: _model = sql_utils.META_TYPE_MAP[type(v)] except KeyError: LOG.warn(_("Unknown metadata type. Key (%s) will " "not be queryable."), key) else: session.add(_model(id=sample.id, meta_key=key, value=v))
def _create_resource(conn, res_id, user_id, project_id, source_id, rmeta): # TODO(gordc): implement lru_cache to improve performance try: res = models.Resource.__table__ m_hash = jsonutils.dumps(rmeta, sort_keys=True) if six.PY3: m_hash = m_hash.encode('utf-8') m_hash = hashlib.md5(m_hash).hexdigest() trans = conn.begin_nested() if conn.dialect.name == 'sqlite': trans = conn.begin() with trans: res_row = conn.execute( sa.select([res.c.internal_id]).where( sa.and_(res.c.resource_id == res_id, res.c.user_id == user_id, res.c.project_id == project_id, res.c.source_id == source_id, res.c.metadata_hash == m_hash))).first() internal_id = res_row[0] if res_row else None if internal_id is None: result = conn.execute(res.insert(), resource_id=res_id, user_id=user_id, project_id=project_id, source_id=source_id, resource_metadata=rmeta, metadata_hash=m_hash) internal_id = result.inserted_primary_key[0] if rmeta and isinstance(rmeta, dict): meta_map = {} for key, v in utils.dict_to_keyval(rmeta): try: _model = sql_utils.META_TYPE_MAP[type(v)] if meta_map.get(_model) is None: meta_map[_model] = [] meta_map[_model].append({ 'id': internal_id, 'meta_key': key, 'value': v }) except KeyError: LOG.warning( _("Unknown metadata type. Key " "(%s) will not be queryable."), key) for _model in meta_map.keys(): conn.execute(_model.__table__.insert(), meta_map[_model]) except dbexc.DBDuplicateEntry: # retry function to pick up duplicate committed object internal_id = Connection._create_resource(conn, res_id, user_id, project_id, source_id, rmeta) return internal_id
def _iter_flow(extractor, data): for flow_statistic in data['flow']['flowStatistics']: for statistic in flow_statistic['flowStatistic']: resource_meta = {'flow_id': statistic['flow']['id'], 'table_id': statistic['tableId']} for key, value in utils.dict_to_keyval(statistic['flow'], 'flow'): resource_meta[key.replace('.', '_')] = value yield extractor(statistic, flow_statistic['node']['id'], resource_meta)
def _create_resource(conn, res_id, user_id, project_id, source_id, rmeta): # TODO(gordc): implement lru_cache to improve performance try: res = models.Resource.__table__ m_hash = jsonutils.dumps(rmeta, sort_keys=True) if six.PY3: m_hash = m_hash.encode('utf-8') m_hash = hashlib.md5(m_hash).hexdigest() trans = conn.begin_nested() if conn.dialect.name == 'sqlite': trans = conn.begin() with trans: res_row = conn.execute( sa.select([res.c.internal_id]) .where(sa.and_(res.c.resource_id == res_id, res.c.user_id == user_id, res.c.project_id == project_id, res.c.source_id == source_id, res.c.metadata_hash == m_hash))).first() internal_id = res_row[0] if res_row else None if internal_id is None: result = conn.execute(res.insert(), resource_id=res_id, user_id=user_id, project_id=project_id, source_id=source_id, resource_metadata=rmeta, metadata_hash=m_hash) internal_id = result.inserted_primary_key[0] if rmeta and isinstance(rmeta, dict): meta_map = {} for key, v in utils.dict_to_keyval(rmeta): try: _model = sql_utils.META_TYPE_MAP[type(v)] if meta_map.get(_model) is None: meta_map[_model] = [] meta_map[_model].append( {'id': internal_id, 'meta_key': key, 'value': v}) except KeyError: LOG.warning(_("Unknown metadata type. Key " "(%s) will not be queryable."), key) for _model in meta_map.keys(): conn.execute(_model.__table__.insert(), meta_map[_model]) except dbexc.DBDuplicateEntry: # retry function to pick up duplicate committed object internal_id = Connection._create_resource( conn, res_id, user_id, project_id, source_id, rmeta) return internal_id
def _real_record_metering_data(cls, data): """Write the data to the backend storage system. :param data: a dictionary such as returned by ceilometer.meter.meter_message_from_counter """ session = sqlalchemy_session.get_session() with session.begin(): # Record the updated resource metadata rmetadata = data['resource_metadata'] source = cls._create_or_update(session, models.Source, data['source']) user = cls._create_or_update(session, models.User, data['user_id'], source) project = cls._create_or_update(session, models.Project, data['project_id'], source) resource = cls._create_or_update(session, models.Resource, data['resource_id'], source, user=user, project=project, resource_metadata=rmetadata) # Record the raw data for the meter. meter = models.Meter(counter_type=data['counter_type'], counter_unit=data['counter_unit'], counter_name=data['counter_name'], resource=resource) session.add(meter) if not filter(lambda x: x.id == source.id, meter.sources): meter.sources.append(source) meter.project = project meter.user = user meter.timestamp = data['timestamp'] meter.resource_metadata = rmetadata meter.counter_volume = data['counter_volume'] meter.message_signature = data['message_signature'] meter.message_id = data['message_id'] session.flush() if rmetadata: if isinstance(rmetadata, dict): for key, v in utils.dict_to_keyval(rmetadata): try: _model = META_TYPE_MAP[type(v)] except KeyError: LOG.warn( _("Unknown metadata type. Key (%s) will " "not be queryable."), key) else: session.add( _model(id=meter.id, meta_key=key, value=v))
def record_metering_data(self, data): """Write the data to the backend storage system. :param data: a dictionary such as returned by ceilometer.meter.meter_message_from_counter """ session = self._get_db_session() with session.begin(): # Record the updated resource metadata rmetadata = data["resource_metadata"] source = self._create_or_update(session, models.Source, data["source"]) user = self._create_or_update(session, models.User, data["user_id"], source) project = self._create_or_update(session, models.Project, data["project_id"], source) resource = self._create_or_update( session, models.Resource, data["resource_id"], source, user=user, project=project, resource_metadata=rmetadata, ) # Record the raw data for the meter. meter = models.Meter( counter_type=data["counter_type"], counter_unit=data["counter_unit"], counter_name=data["counter_name"], resource=resource, ) session.add(meter) if not filter(lambda x: x.id == source.id, meter.sources): meter.sources.append(source) meter.project = project meter.user = user meter.timestamp = data["timestamp"] meter.resource_metadata = rmetadata meter.counter_volume = data["counter_volume"] meter.message_signature = data["message_signature"] meter.message_id = data["message_id"] session.flush() if rmetadata: if isinstance(rmetadata, dict): for key, v in utils.dict_to_keyval(rmetadata): try: _model = META_TYPE_MAP[type(v)] except KeyError: LOG.warn(_("Unknown metadata type. Key (%s) will " "not be queryable."), key) else: session.add(_model(id=meter.id, meta_key=key, value=v))
def test_dict_to_kv(self): data = {'a': 'A', 'b': 'B', 'nested': {'a': 'A', 'b': 'B', }, 'nested2': [{'c': 'A'}, {'c': 'B'}] } pairs = list(utils.dict_to_keyval(data)) self.assertEqual(pairs, [('a', 'A'), ('b', 'B'), ('nested2[0].c', 'A'), ('nested2[1].c', 'B'), ('nested.a', 'A'), ('nested.b', 'B')])
def test_dict_to_kv(self): data = {'a': 'A', 'b': 'B', 'nested': {'a': 'A', 'b': 'B', }, 'nested2': [{'c': 'A'}, {'c': 'B'}] } pairs = list(utils.dict_to_keyval(data)) self.assertEqual([('a', 'A'), ('b', 'B'), ('nested.a', 'A'), ('nested.b', 'B'), ('nested2[0].c', 'A'), ('nested2[1].c', 'B')], sorted(pairs, key=lambda x: x[0]))
def _create_resource(session, res_id, user_id, project_id, source_id, rmeta): # TODO(gordc): implement lru_cache to improve performance try: nested = session.connection().dialect.name != 'sqlite' m_hash = jsonutils.dumps(rmeta, sort_keys=True) with session.begin(nested=nested, subtransactions=not nested): obj = (session.query(models.Resource.internal_id) .filter(models.Resource.resource_id == res_id) .filter(models.Resource.user_id == user_id) .filter(models.Resource.project_id == project_id) .filter(models.Resource.source_id == source_id) .filter(models.Resource.metadata_hash == hashlib.md5(m_hash).hexdigest()).first()) obj_id = obj[0] if obj else None if obj_id is None: obj = models.Resource(resource_id=res_id, user_id=user_id, project_id=project_id, source_id=source_id, resource_metadata=rmeta) session.add(obj) session.flush() obj_id = obj.internal_id if rmeta and isinstance(rmeta, dict): meta_map = {} for key, v in utils.dict_to_keyval(rmeta): try: _model = sql_utils.META_TYPE_MAP[type(v)] if meta_map.get(_model) is None: meta_map[_model] = [] meta_map[_model].append( {'id': obj_id, 'meta_key': key, 'value': v}) except KeyError: LOG.warn(_("Unknown metadata type. Key (%s) " "will not be queryable."), key) for _model in meta_map.keys(): session.execute(_model.__table__.insert(), meta_map[_model]) except dbexc.DBDuplicateEntry: # retry function to pick up duplicate committed object obj_id = Connection._create_resource(session, res_id, user_id, project_id, source_id, rmeta) return obj_id
def record_metering_data(self, data): """Write the data to the backend storage system. :param data: a dictionary such as returned by ceilometer.meter.meter_message_from_counter """ session = self._engine_facade.get_session() with session.begin(): # Record the raw data for the sample. rmetadata = data['resource_metadata'] meter = self._create_meter(session, data['counter_name'], data['counter_type'], data['counter_unit']) sample = models.Sample(meter_id=meter.id) session.add(sample) sample.resource_id = data['resource_id'] sample.project_id = data['project_id'] sample.user_id = data['user_id'] sample.timestamp = data['timestamp'] sample.resource_metadata = rmetadata sample.volume = data['counter_volume'] sample.message_signature = data['message_signature'] sample.message_id = data['message_id'] sample.source_id = data['source'] session.flush() if rmetadata: if isinstance(rmetadata, dict): for key, v in utils.dict_to_keyval(rmetadata): try: _model = sql_utils.META_TYPE_MAP[type(v)] except KeyError: LOG.warn( _("Unknown metadata type. Key (%s) will " "not be queryable."), key) else: session.add( _model(id=sample.id, meta_key=key, value=v))
def dump_metadata(meta): resource_metadata = {} for key, v in utils.dict_to_keyval(meta): resource_metadata[key] = v return resource_metadata
def record_metering_data(data): """Write the data to the backend storage system. :param data: a dictionary such as returned by ceilometer.meter.meter_message_from_counter """ session = sqlalchemy_session.get_session() with session.begin(): if data['source']: source = session.query(Source).get(data['source']) if not source: source = Source(id=data['source']) session.add(source) else: source = None # create/update user && project, add/update their sources list if data['user_id']: user = session.merge(User(id=str(data['user_id']))) if not filter(lambda x: x.id == source.id, user.sources): user.sources.append(source) else: user = None if data['project_id']: project = session.merge(Project(id=str(data['project_id']))) if not filter(lambda x: x.id == source.id, project.sources): project.sources.append(source) else: project = None # Record the updated resource metadata rmetadata = data['resource_metadata'] resource = session.merge(Resource(id=str(data['resource_id']))) if not filter(lambda x: x.id == source.id, resource.sources): resource.sources.append(source) resource.project = project resource.user = user # Current metadata being used and when it was last updated. resource.resource_metadata = rmetadata # Record the raw data for the meter. meter = Meter(counter_type=data['counter_type'], counter_unit=data['counter_unit'], counter_name=data['counter_name'], resource=resource) session.add(meter) if not filter(lambda x: x.id == source.id, meter.sources): meter.sources.append(source) meter.project = project meter.user = user meter.timestamp = data['timestamp'] meter.resource_metadata = rmetadata meter.counter_volume = data['counter_volume'] meter.message_signature = data['message_signature'] meter.message_id = data['message_id'] session.flush() if rmetadata: if isinstance(rmetadata, dict): for key, v in utils.dict_to_keyval(rmetadata): try: _model = META_TYPE_MAP[type(v)] except KeyError: LOG.warn(_("Unknown metadata type. Key (%s) will " "not be queryable."), key) else: session.add(_model(id=meter.id, meta_key=key, value=v)) session.flush()
def record_metering_data(data): """Write the data to the backend storage system. :param data: a dictionary such as returned by ceilometer.meter.meter_message_from_counter """ session = sqlalchemy_session.get_session() with session.begin(): if data['source']: source = session.query(Source).get(data['source']) if not source: source = Source(id=data['source']) session.add(source) else: source = None # create/update user && project, add/update their sources list if data['user_id']: user = session.merge(User(id=str(data['user_id']))) if not filter(lambda x: x.id == source.id, user.sources): user.sources.append(source) else: user = None if data['project_id']: project = session.merge(Project(id=str(data['project_id']))) if not filter(lambda x: x.id == source.id, project.sources): project.sources.append(source) else: project = None # Record the updated resource metadata rmetadata = data['resource_metadata'] resource = session.merge(Resource(id=str(data['resource_id']))) if not filter(lambda x: x.id == source.id, resource.sources): resource.sources.append(source) resource.project = project resource.user = user # Current metadata being used and when it was last updated. resource.resource_metadata = rmetadata # Record the raw data for the meter. meter = Meter(counter_type=data['counter_type'], counter_unit=data['counter_unit'], counter_name=data['counter_name'], resource=resource) session.add(meter) if not filter(lambda x: x.id == source.id, meter.sources): meter.sources.append(source) meter.project = project meter.user = user meter.timestamp = data['timestamp'] meter.resource_metadata = rmetadata meter.counter_volume = data['counter_volume'] meter.message_signature = data['message_signature'] meter.message_id = data['message_id'] session.flush() if rmetadata: if isinstance(rmetadata, dict): for key, v in utils.dict_to_keyval(rmetadata): try: _model = META_TYPE_MAP[type(v)] except KeyError: LOG.warn( _("Unknown metadata type. Key (%s) will " "not be queryable."), key) else: session.add( _model(id=meter.id, meta_key=key, value=v)) session.flush()