def record_events(self, event_models): """Write the events to Hbase. :param event_models: a list of models.Event objects. """ error = None with self.conn_pool.connection() as conn: events_table = conn.table(self.EVENT_TABLE) for event_model in event_models: # Row key consists of timestamp and message_id from # models.Event or purposes of storage event sorted by # timestamp in the database. ts = event_model.generated row = hbase_utils.prepare_key(hbase_utils.timestamp(ts, reverse=False), event_model.message_id) event_type = event_model.event_type traits = {} if event_model.traits: for trait in event_model.traits: key = hbase_utils.prepare_key(trait.name, trait.dtype) traits[key] = trait.value record = hbase_utils.serialize_entry(traits, event_type=event_type, timestamp=ts, raw=event_model.raw) try: events_table.put(row, record) except Exception as ex: LOG.exception(_LE("Failed to record event: %s") % ex) error = ex if error: raise error
def record_events(self, event_models): """Write the events to Hbase. :param event_models: a list of models.Event objects. """ error = None with self.conn_pool.connection() as conn: events_table = conn.table(self.EVENT_TABLE) for event_model in event_models: # Row key consists of timestamp and message_id from # models.Event or purposes of storage event sorted by # timestamp in the database. ts = event_model.generated row = hbase_utils.prepare_key( hbase_utils.timestamp(ts, reverse=False), event_model.message_id) event_type = event_model.event_type traits = {} if event_model.traits: for trait in event_model.traits: key = hbase_utils.prepare_key(trait.name, trait.dtype) traits[key] = trait.value record = hbase_utils.serialize_entry(traits, event_type=event_type, timestamp=ts, raw=event_model.raw) try: events_table.put(row, record) except Exception as ex: LOG.exception(_LE("Failed to record event: %s") % ex) error = ex if error: raise error
def record_metering_data(self, data): """Write the data to the backend storage system. :param data: a dictionary such as returned by ceilometer.publisher.utils.meter_message_from_counter """ # We must not record thing. data.pop("monotonic_time", None) with self.conn_pool.connection() as conn: resource_table = conn.table(self.RESOURCE_TABLE) meter_table = conn.table(self.METER_TABLE) resource_metadata = data.get('resource_metadata', {}) # Determine the name of new meter rts = hbase_utils.timestamp(data['timestamp']) new_meter = hbase_utils.prepare_key(rts, data['source'], data['counter_name'], data['counter_type'], data['counter_unit']) # TODO(nprivalova): try not to store resource_id resource = hbase_utils.serialize_entry( **{ 'source': data['source'], 'meter': { new_meter: data['timestamp'] }, 'resource_metadata': resource_metadata, 'resource_id': data['resource_id'], 'project_id': data['project_id'], 'user_id': data['user_id'] }) # Here we put entry in HBase with our own timestamp. This is needed # when samples arrive out-of-order # If we use timestamp=data['timestamp'] the newest data will be # automatically 'on the top'. It is needed to keep metadata # up-to-date: metadata from newest samples is considered as actual. ts = int(time.mktime(data['timestamp'].timetuple()) * 1000) resource_table.put(hbase_utils.encode_unicode(data['resource_id']), resource, ts) # Rowkey consists of reversed timestamp, meter and a # message uuid for purposes of uniqueness row = hbase_utils.prepare_key(data['counter_name'], rts, data['message_id']) record = hbase_utils.serialize_entry( data, **{ 'source': data['source'], 'rts': rts, 'message': data, 'recorded_at': timeutils.utcnow() }) meter_table.put(row, record)
def migrate_resource_table(conn, table): """Migrate table 'resource' in HBase. Change qualifiers format from "%s+%s+%s!%s!%s" % (rts, source, counter_name, counter_type,counter_unit) in columns with meters f:m_* to new separator format "%s:%s:%s:%s:%s" % (rts, source, counter_name, counter_type,counter_unit) """ resource_table = conn.table(table) resource_filter = ("QualifierFilter(=, " "'regexstring:m_\\d{19}\\+" "[\\w-\\._]*\\+[\\w-\\._!]')") gen = resource_table.scan(filter=resource_filter) for row, data in gen: columns = [] updated_columns = dict() column_prefix = "f:" for column, value in data.items(): if column.startswith('f:m_'): columns.append(column) parts = column[2:].split("+", 2) parts.extend(parts.pop(2).split("!")) column = hbase_utils.prepare_key(*parts) updated_columns[column_prefix + column] = value resource_table.put(row, updated_columns) resource_table.delete(row, columns)
def migrate_resource_table(conn, table): """Migrate table 'resource' in HBase. Change qualifiers format from "%s+%s+%s!%s!%s" % (rts, source, counter_name, counter_type,counter_unit) in columns with meters f:m_* to new separator format "%s:%s:%s:%s:%s" % (rts, source, counter_name, counter_type,counter_unit) """ resource_table = conn.table(table) resource_filter = ("QualifierFilter(=, " "'regexstring:m_\\d{19}\\+" "[\\w-\\._]*\\+[\\w-\\._!]')") gen = resource_table.scan(filter=resource_filter) for row, data in gen: columns = [] updated_columns = dict() column_prefix = "f:" for column, value in data.items(): if column.startswith('f:m_'): columns.append(column) parts = column[2:].split("+", 2) parts.extend(parts.pop(2).split("!")) column = hbase_utils.prepare_key(*parts) updated_columns[column_prefix + column] = value resource_table.put(row, updated_columns) resource_table.delete(row, columns)
def record_alarm_change(self, alarm_change): """Record alarm change event.""" alarm_change_dict = hbase_utils.serialize_entry(alarm_change) ts = alarm_change.get('timestamp') or datetime.datetime.now() rts = hbase_utils.timestamp(ts) with self.conn_pool.connection() as conn: alarm_history_table = conn.table(self.ALARM_HISTORY_TABLE) alarm_history_table.put( hbase_utils.prepare_key(alarm_change.get('alarm_id'), rts), alarm_change_dict)
def record_alarm_change(self, alarm_change): """Record alarm change event.""" alarm_change_dict = hbase_utils.serialize_entry(alarm_change) ts = alarm_change.get('timestamp') or datetime.datetime.now() rts = hbase_utils.timestamp(ts) with self.conn_pool.connection() as conn: alarm_history_table = conn.table(self.ALARM_HISTORY_TABLE) alarm_history_table.put( hbase_utils.prepare_key(alarm_change.get('alarm_id'), rts), alarm_change_dict)
def record_metering_data(self, data): """Write the data to the backend storage system. :param data: a dictionary such as returned by ceilometer.meter.meter_message_from_counter """ with self.conn_pool.connection() as conn: resource_table = conn.table(self.RESOURCE_TABLE) meter_table = conn.table(self.METER_TABLE) resource_metadata = data.get("resource_metadata", {}) # Determine the name of new meter rts = hbase_utils.timestamp(data["timestamp"]) new_meter = hbase_utils.prepare_key( rts, data["source"], data["counter_name"], data["counter_type"], data["counter_unit"] ) # TODO(nprivalova): try not to store resource_id resource = hbase_utils.serialize_entry( **{ "source": data["source"], "meter": {new_meter: data["timestamp"]}, "resource_metadata": resource_metadata, "resource_id": data["resource_id"], "project_id": data["project_id"], "user_id": data["user_id"], } ) # Here we put entry in HBase with our own timestamp. This is needed # when samples arrive out-of-order # If we use timestamp=data['timestamp'] the newest data will be # automatically 'on the top'. It is needed to keep metadata # up-to-date: metadata from newest samples is considered as actual. ts = int(time.mktime(data["timestamp"].timetuple()) * 1000) resource_table.put(hbase_utils.encode_unicode(data["resource_id"]), resource, ts) # Rowkey consists of reversed timestamp, meter and a # message uuid for purposes of uniqueness row = hbase_utils.prepare_key(data["counter_name"], rts, data["message_id"]) record = hbase_utils.serialize_entry( data, **{"source": data["source"], "rts": rts, "message": data, "recorded_at": timeutils.utcnow()} ) meter_table.put(row, record)
def migrate_alarm_history_table(conn, table): """Migrate table 'alarm_h' in HBase. Change row format from ""%s_%s" % alarm_id, rts, to new separator format "%s:%s" % alarm_id, rts """ alarm_h_table = conn.table(table) alarm_h_filter = "RowFilter(=, 'regexstring:\\w*_\\d{19}')" gen = alarm_h_table.scan(filter=alarm_h_filter) for row, data in gen: row_parts = row.rsplit('_', 1) alarm_h_table.put(hbase_utils.prepare_key(*row_parts), data) alarm_h_table.delete(row)
def migrate_alarm_history_table(conn, table): """Migrate table 'alarm_h' in HBase. Change row format from ""%s_%s" % alarm_id, rts, to new separator format "%s:%s" % alarm_id, rts """ alarm_h_table = conn.table(table) alarm_h_filter = "RowFilter(=, 'regexstring:\\w*_\\d{19}')" gen = alarm_h_table.scan(filter=alarm_h_filter) for row, data in gen: row_parts = row.rsplit('_', 1) alarm_h_table.put(hbase_utils.prepare_key(*row_parts), data) alarm_h_table.delete(row)
def migrate_meter_table(conn, table): """Migrate table 'meter' in HBase. Change row format from "%s_%d_%s" % (counter_name, rts, message_signature) to new separator format "%s:%s:%s" % (counter_name, rts, message_signature) """ meter_table = conn.table(table) meter_filter = ("RowFilter(=, " "'regexstring:[\\w\\._-]*_\\d{19}_\\w*')") gen = meter_table.scan(filter=meter_filter) for row, data in gen: parts = row.rsplit('_', 2) new_row = hbase_utils.prepare_key(*parts) meter_table.put(new_row, data) meter_table.delete(row)
def migrate_meter_table(conn, table): """Migrate table 'meter' in HBase. Change row format from "%s_%d_%s" % (counter_name, rts, message_signature) to new separator format "%s:%s:%s" % (counter_name, rts, message_signature) """ meter_table = conn.table(table) meter_filter = "RowFilter(=, " "'regexstring:[\\w\\._-]*_\\d{19}_\\w*')" gen = meter_table.scan(filter=meter_filter) for row, data in gen: parts = row.rsplit("_", 2) new_row = hbase_utils.prepare_key(*parts) meter_table.put(new_row, data) meter_table.delete(row)
def record_events(self, event_models): """Write the events to Hbase. :param event_models: a list of models.Event objects. :return problem_events: a list of events that could not be saved in a (reason, event) tuple. From the reasons that are enumerated in storage.models.Event only the UNKNOWN_PROBLEM is applicable here. """ problem_events = [] with self.conn_pool.connection() as conn: events_table = conn.table(self.EVENT_TABLE) for event_model in event_models: # Row key consists of timestamp and message_id from # models.Event or purposes of storage event sorted by # timestamp in the database. ts = event_model.generated row = hbase_utils.prepare_key( hbase_utils.timestamp(ts, reverse=False), event_model.message_id) event_type = event_model.event_type traits = {} if event_model.traits: for trait in event_model.traits: key = hbase_utils.prepare_key(trait.name, trait.dtype) traits[key] = trait.value record = hbase_utils.serialize_entry(traits, event_type=event_type, timestamp=ts, raw=event_model.raw) try: events_table.put(row, record) except Exception as ex: LOG.debug(_("Failed to record event: %s") % ex) problem_events.append((models.Event.UNKNOWN_PROBLEM, event_model)) return problem_events
def record_events(self, event_models): """Write the events to Hbase. :param event_models: a list of models.Event objects. :return problem_events: a list of events that could not be saved in a (reason, event) tuple. From the reasons that are enumerated in storage.models.Event only the UNKNOWN_PROBLEM is applicable here. """ problem_events = [] with self.conn_pool.connection() as conn: events_table = conn.table(self.EVENT_TABLE) for event_model in event_models: # Row key consists of timestamp and message_id from # models.Event or purposes of storage event sorted by # timestamp in the database. ts = event_model.generated row = hbase_utils.prepare_key( hbase_utils.timestamp(ts, reverse=False), event_model.message_id) event_type = event_model.event_type traits = {} if event_model.traits: for trait in event_model.traits: key = hbase_utils.prepare_key(trait.name, trait.dtype) traits[key] = trait.value record = hbase_utils.serialize_entry(traits, event_type=event_type, timestamp=ts, raw=event_model.raw) try: events_table.put(row, record) except Exception as ex: LOG.debug(_("Failed to record event: %s") % ex) problem_events.append( (models.Event.UNKNOWN_PROBLEM, event_model)) return problem_events
def migrate_event_table(conn, table): """Migrate table 'event' in HBase. Change row format from ""%d_%s" % timestamp, event_id, to new separator format "%s:%s" % timestamp, event_id Also change trait columns from %s+%s % trait.name, trait.dtype to %s:%s % trait.name, trait.dtype """ event_table = conn.table(table) event_filter = "RowFilter(=, 'regexstring:\\d*_\\w*')" gen = event_table.scan(filter=event_filter) trait_pattern = re.compile("f:[\w\-_]*\+\w") column_prefix = "f:" for row, data in gen: row_parts = row.split("_", 1) update_data = {} for column, value in data.items(): if trait_pattern.match(column): trait_parts = column[2:].rsplit('+', 1) column = hbase_utils.prepare_key(*trait_parts) update_data[column_prefix + column] = value new_row = hbase_utils.prepare_key(*row_parts) event_table.put(new_row, update_data) event_table.delete(row)
def migrate_event_table(conn, table): """Migrate table 'event' in HBase. Change row format from ""%d_%s" % timestamp, event_id, to new separator format "%s:%s" % timestamp, event_id Also change trait columns from %s+%s % trait.name, trait.dtype to %s:%s % trait.name, trait.dtype """ event_table = conn.table(table) event_filter = "RowFilter(=, 'regexstring:\\d*_\\w*')" gen = event_table.scan(filter=event_filter) trait_pattern = re.compile("f:[\w\-_]*\+\w") column_prefix = "f:" for row, data in gen: row_parts = row.split("_", 1) update_data = {} for column, value in data.items(): if trait_pattern.match(column): trait_parts = column[2:].rsplit('+', 1) column = hbase_utils.prepare_key(*trait_parts) update_data[column_prefix + column] = value new_row = hbase_utils.prepare_key(*row_parts) event_table.put(new_row, update_data) event_table.delete(row)