class ResourcesDefinition(object): MANDATORY_FIELDS = {'resource_type': six.string_types, 'metrics': list} JSONPATH_RW_PARSER = parser.ExtentedJsonPathParser() def __init__(self, definition_cfg, default_archive_policy, legacy_archive_policy_definition): self._default_archive_policy = default_archive_policy self._legacy_archive_policy_definition =\ legacy_archive_policy_definition self.cfg = definition_cfg for field, field_type in self.MANDATORY_FIELDS.items(): if field not in self.cfg: raise ResourcesDefinitionException( _LE("Required field %s not specified") % field, self.cfg) if not isinstance(self.cfg[field], field_type): raise ResourcesDefinitionException( _LE("Required field %(field)s should be a %(type)s") % { 'field': field, 'type': field_type }, self.cfg) self._field_getter = {} for name, fval in self.cfg.get('attributes', {}).items(): if isinstance(fval, six.integer_types): self._field_getter[name] = fval else: try: parts = self.JSONPATH_RW_PARSER.parse(fval) except Exception as e: raise ResourcesDefinitionException( _LE("Parse error in JSONPath specification " "'%(jsonpath)s': %(err)s") % dict(jsonpath=fval, err=e), self.cfg) self._field_getter[name] = functools.partial( self._parse_jsonpath_field, parts) @staticmethod def _parse_jsonpath_field(parts, sample): values = [ match.value for match in parts.find(sample) if match.value is not None ] if values: return values[0] def match(self, metric_name): for t in self.cfg['metrics']: if utils.match(metric_name, t): return True return False def attributes(self, sample): attrs = {} for attr, getter in self._field_getter.items(): if callable(getter): value = getter(sample) else: value = getter if value is not None: attrs[attr] = value return attrs def metrics(self): metrics = {} for t in self.cfg['metrics']: archive_policy = self.cfg.get( 'archive_policy', self._legacy_archive_policy_definition.get(t)) metrics[t] = dict(archive_policy_name=archive_policy or self._default_archive_policy) return metrics
class Definition(object): JSONPATH_RW_PARSER = parser.ExtentedJsonPathParser() GETTERS_CACHE = {} def __init__(self, name, cfg, plugin_manager): self.cfg = cfg self.name = name self.plugin = None if isinstance(cfg, dict): if 'fields' not in cfg: raise DefinitionException( _("The field 'fields' is required for %s") % name, self.cfg) if 'plugin' in cfg: plugin_cfg = cfg['plugin'] if isinstance(plugin_cfg, six.string_types): plugin_name = plugin_cfg plugin_params = {} else: try: plugin_name = plugin_cfg['name'] except KeyError: raise DefinitionException( _('Plugin specified, but no plugin name supplied ' 'for %s') % name, self.cfg) plugin_params = plugin_cfg.get('parameters') if plugin_params is None: plugin_params = {} try: plugin_ext = plugin_manager[plugin_name] except KeyError: raise DefinitionException( _('No plugin named %(plugin)s available for ' '%(name)s') % dict(plugin=plugin_name, name=name), self.cfg) plugin_class = plugin_ext.plugin self.plugin = plugin_class(**plugin_params) fields = cfg['fields'] else: # Simple definition "foobar: jsonpath" fields = cfg if isinstance(fields, list): # NOTE(mdragon): if not a string, we assume a list. if len(fields) == 1: fields = fields[0] else: fields = '|'.join('(%s)' % path for path in fields) if isinstance(fields, six.integer_types): self.getter = fields else: try: self.getter = self.make_getter(fields) except Exception as e: raise DefinitionException( _("Parse error in JSONPath specification " "'%(jsonpath)s' for %(name)s: %(err)s") % dict(jsonpath=fields, name=name, err=e), self.cfg) def _get_path(self, match): if match.context is not None: for path_element in self._get_path(match.context): yield path_element yield str(match.path) def parse(self, obj, return_all_values=False): if callable(self.getter): values = self.getter(obj) else: return self.getter values = [ match for match in values if return_all_values or match.value is not None ] if self.plugin is not None: if return_all_values and not self.plugin.support_return_all_values: raise DefinitionException( "Plugin %s don't allows to " "return multiple values" % self.cfg["plugin"]["name"], self.cfg) values_map = [('.'.join(self._get_path(match)), match.value) for match in values] values = [ v for v in self.plugin.trait_values(values_map) if v is not None ] else: values = [match.value for match in values if match is not None] if return_all_values: return values else: return values[0] if values else None def make_getter(self, fields): if fields in self.GETTERS_CACHE: return self.GETTERS_CACHE[fields] else: getter = self.JSONPATH_RW_PARSER.parse(fields).find self.GETTERS_CACHE[fields] = getter return getter
class CeiloscaMappingDefinition(object): JSONPATH_RW_PARSER = parser.ExtentedJsonPathParser() REQUIRED_FIELDS = [ 'name', 'monasca_metric_name', 'type', 'unit', 'source', 'resource_metadata', 'resource_id', 'project_id', 'user_id', 'region' ] def __init__(self, definition_cfg): self.cfg = definition_cfg missing = [ field for field in self.REQUIRED_FIELDS if not self.cfg.get(field) ] if missing: raise CeiloscaMappingDefinitionException( "Required fields %s not specified" % missing, self.cfg) self._monasca_metric_name = self.cfg.get('monasca_metric_name') if isinstance(self._monasca_metric_name, six.string_types): self._monasca_metric_name = [self._monasca_metric_name] if ('type' not in self.cfg.get('lookup', []) and self.cfg['type'] not in sample.TYPES): raise CeiloscaMappingDefinitionException( "Invalid type %s specified" % self.cfg['type'], self.cfg) self._field_getter = {} for name, field in self.cfg.items(): if name in ["monasca_metric_name", "lookup"] or not field: continue elif isinstance(field, six.integer_types): self._field_getter[name] = field elif isinstance(field, six.string_types) and not \ field.startswith('$'): self._field_getter[name] = field elif isinstance(field, dict) and name == 'resource_metadata': meta = {} for key, val in field.items(): parts = self.parse_jsonpath(val) meta[key] = functools.partial(self._parse_jsonpath_field, parts) self._field_getter['resource_metadata'] = meta else: parts = self.parse_jsonpath(field) self._field_getter[name] = functools.partial( self._parse_jsonpath_field, parts) def parse_jsonpath(self, field): try: parts = self.JSONPATH_RW_PARSER.parse(field) except Exception as e: raise CeiloscaMappingDefinitionException( "Parse error in JSONPath specification " "'%(jsonpath)s': %(err)s" % dict(jsonpath=field, err=e), self.cfg) return parts def parse_fields(self, field, message, all_values=False): getter = self._field_getter.get(field) if not getter: return elif isinstance(getter, dict): dict_val = {} for key, val in getter.items(): dict_val[key] = val(message, all_values) return dict_val elif callable(getter): return getter(message, all_values) else: return getter @staticmethod def _parse_jsonpath_field(parts, message, all_values): values = [ match.value for match in parts.find(message) if match.value is not None ] if values: if not all_values: return values[0] return values
class TraitDefinition(object): JSONPATH_RW_PARSER = parser.ExtentedJsonPathParser() def __init__(self, name, trait_cfg, plugin_manager): self.cfg = trait_cfg self.name = name type_name = trait_cfg.get('type', 'text') if 'plugin' in trait_cfg: plugin_cfg = trait_cfg['plugin'] if isinstance(plugin_cfg, six.string_types): plugin_name = plugin_cfg plugin_params = {} else: try: plugin_name = plugin_cfg['name'] except KeyError: raise EventDefinitionException( _('Plugin specified, but no plugin name supplied for ' 'trait %s') % name, self.cfg) plugin_params = plugin_cfg.get('parameters') if plugin_params is None: plugin_params = {} try: plugin_ext = plugin_manager[plugin_name] except KeyError: raise EventDefinitionException( _('No plugin named %(plugin)s available for ' 'trait %(trait)s') % dict(plugin=plugin_name, trait=name), self.cfg) plugin_class = plugin_ext.plugin self.plugin = plugin_class(**plugin_params) else: self.plugin = None if 'fields' not in trait_cfg: raise EventDefinitionException( _("Required field in trait definition not specified: " "'%s'") % 'fields', self.cfg) fields = trait_cfg['fields'] if not isinstance(fields, six.string_types): # NOTE(mdragon): if not a string, we assume a list. if len(fields) == 1: fields = fields[0] else: fields = '|'.join('(%s)' % path for path in fields) try: self.fields = self.JSONPATH_RW_PARSER.parse(fields) except Exception as e: raise EventDefinitionException( _("Parse error in JSONPath specification " "'%(jsonpath)s' for %(trait)s: %(err)s") % dict(jsonpath=fields, trait=name, err=e), self.cfg) self.trait_type = models.Trait.get_type_by_name(type_name) if self.trait_type is None: raise EventDefinitionException( _("Invalid trait type '%(type)s' for trait %(trait)s") % dict(type=type_name, trait=name), self.cfg) def _get_path(self, match): if match.context is not None: for path_element in self._get_path(match.context): yield path_element yield str(match.path) def to_trait(self, notification_body): values = [ match for match in self.fields.find(notification_body) if match.value is not None ] if self.plugin is not None: value_map = [('.'.join(self._get_path(match)), match.value) for match in values] value = self.plugin.trait_value(value_map) else: value = values[0].value if values else None if value is None: return None # NOTE(mdragon): some openstack projects (mostly Nova) emit '' # for null fields for things like dates. if self.trait_type != models.Trait.TEXT_TYPE and value == '': return None value = models.Trait.convert_value(self.trait_type, value) return models.Trait(self.name, self.trait_type, value)
class MonascaDataFilter(object): JSONPATH_RW_PARSER = parser.ExtentedJsonPathParser() def __init__(self, conf): self.conf = conf self._mapping = {} self._mapping = self._get_mapping() def _get_mapping(self): with open(self.conf.monasca.monasca_mappings, 'r') as f: try: return yaml.safe_load(f) except yaml.YAMLError as err: if hasattr(err, 'problem_mark'): mark = err.problem_mark errmsg = ("Invalid YAML syntax in Monasca Data " "Filter file %(file)s at line: " "%(line)s, column: %(column)s." % dict(file=self.conf.monasca.monasca_mappings, line=mark.line + 1, column=mark.column + 1)) else: errmsg = ("YAML error reading Monasca Data Filter " "file %(file)s" % dict(file=self.conf.monasca.monasca_mappings)) LOG.error(errmsg) raise UnableToLoadMappings(err.message) def _convert_timestamp(self, timestamp): if isinstance(timestamp, datetime.datetime): ts = timestamp else: ts = timeutils.parse_isotime(timestamp) tdelta = (ts - datetime.datetime(1970, 1, 1, tzinfo=ts.tzinfo)) # convert timestamp to milli seconds as Monasca expects return int(tdelta.total_seconds() * 1000) def _convert_to_sample(self, s): return sample_util.Sample(name=s['counter_name'], type=s['counter_type'], unit=s['counter_unit'], volume=s['counter_volume'], user_id=s['user_id'], project_id=s['project_id'], resource_id=s['resource_id'], timestamp=s['timestamp'], resource_metadata=s['resource_metadata'], source=s.get('source')).as_dict() def get_value_for_nested_dictionary(self, lst, dct): val = dct for element in lst: if isinstance(val, dict) and element in val: val = val.get(element) else: return return val def parse_jsonpath(self, field): try: parts = self.JSONPATH_RW_PARSER.parse(field) except Exception as e: raise CeiloscaMappingDefinitionException( "Parse error in JSONPath specification " "'%(jsonpath)s': %(err)s" % dict(jsonpath=field, err=e)) return parts def _get_value_metadata_for_key(self, sample_meta, meta_key): """Get the data for the given key, supporting JSONPath""" if isinstance(meta_key, dict): # extract key and jsonpath # If following convention, dict will have one and only one # element of the form <monasca key>: <json path> if len(meta_key.keys()) == 1: mon_key = list(meta_key.keys())[0] else: # If no keys or more keys than one raise CeiloscaMappingDefinitionException( "Field definition format mismatch, should " "have only one key:value pair. %(meta_key)s" % {'meta_key': meta_key}, meta_key) json_path = meta_key[mon_key] parts = self.parse_jsonpath(json_path) val_matches = parts.find(sample_meta) if len(val_matches) > 0: # resolve the find to the first match and get value val = val_matches[0].value if not isinstance(val, (str, six.text_type)) \ and not isinstance(val, int): # Don't support lists or dicts or ... raise CeiloscaMappingDefinitionException( "Metadata format mismatch, value " "should be a simple string. %(valuev)s" % {'valuev': val}, meta_key) else: val = 'None' return mon_key, val else: # simple string val = sample_meta.get(meta_key, None) if val is not None: return meta_key, val else: # one more attempt using a dotted notation # TODO(joadavis) Deprecate this . notation code # in favor of jsonpath if len(meta_key.split('.')) > 1: val = self.get_value_for_nested_dictionary( meta_key.split('.'), sample_meta) if val is not None: return meta_key, val else: return meta_key, 'None' else: return meta_key, 'None' def process_sample_for_monasca(self, sample_obj): if not self._mapping: raise NoMappingsFound("Unable to process the sample") dimensions = {} dimensions['datasource'] = 'ceilometer' # control_plane, cluster and cloud_name can be None, but we use # literal 'None' for such case dimensions['control_plane'] = self.conf.monasca.control_plane or 'None' dimensions['cluster'] = self.conf.monasca.cluster or 'None' dimensions['cloud_name'] = self.conf.monasca.cloud_name or 'None' if isinstance(sample_obj, sample_util.Sample): sample = sample_obj.as_dict() elif isinstance(sample_obj, dict): if 'counter_name' in sample_obj: sample = self._convert_to_sample(sample_obj) else: sample = sample_obj for dim in self._mapping['dimensions']: val = sample.get(dim, None) if val: dimensions[dim] = val else: dimensions[dim] = 'None' sample_meta = sample.get('resource_metadata', None) value_meta = {} meter_name = sample.get('name') or sample.get('counter_name') if sample_meta: for meta_key in self._mapping['metadata']['common']: monasca_key, val = self._get_value_metadata_for_key( sample_meta, meta_key) value_meta[monasca_key] = val if meter_name in self._mapping['metadata'].keys(): for meta_key in self._mapping['metadata'][meter_name]: monasca_key, val = self._get_value_metadata_for_key( sample_meta, meta_key) value_meta[monasca_key] = val meter_value = sample.get('volume') or sample.get('counter_volume') if meter_value is None: meter_value = 0 metric = dict( name=meter_name, timestamp=self._convert_timestamp(sample['timestamp']), value=meter_value, dimensions=dimensions, value_meta=value_meta, ) LOG.debug( "Generated metric with name %(name)s," " timestamp %(timestamp)s, value %(value)s," " dimensions %(dimensions)s" % { 'name': metric['name'], 'timestamp': metric['timestamp'], 'value': metric['value'], 'dimensions': metric['dimensions'] }) return metric
def parse(path): """Parse a JSONPath expression use the global parser.""" global PARSER if not PARSER: PARSER = parser.ExtentedJsonPathParser() return PARSER.parse(path)