def _send_metrics(descriptors, points): """Calls 'timeseries:write' API to push metrics to Cloud Monitoring. Args: descriptors: dict {name -> Descriptor object}. points: list of dicts {'desc': <name>, 'labels': <list>, 'point': <dict>} where point dict is in a format of _Metric.to_dict(). """ if not points: return conf = MonitoringConfig.cached() project_id = conf.project_id or app_identity.get_application_id() service_account_key = conf.service_account_key # Register all metrics (in parallel). futures = [] for name in sorted(descriptors): futures.append( _register_metric(descriptors[name], service_account_key, project_id)) ndb.Future.wait_all(futures) for f in futures: f.check_success() def make_point_dict(p): # See https://cloud.google.com/monitoring/v2beta2/timeseries/write. desc = descriptors[p['desc']] labels = {l[0]: v for l, v in zip(desc.labels, p['labels'])} point = p['point'] desc.validate_value(point['value']) value_key = '%sValue' % desc.value_type return { 'timeseriesDesc': { 'metric': 'custom.cloudmonitoring.googleapis.com/%s' % desc.name, 'labels': { 'custom.cloudmonitoring.googleapis.com/%s' % k: v for k, v in labels.iteritems() }, }, 'point': { 'start': utils.timestamp_to_datetime( point['start']).strftime(_TS_FORMAT), 'end': utils.timestamp_to_datetime( point['end']).strftime(_TS_FORMAT), value_key: point['value'], }, } # Split points in batches and send each batch in parallel. futures = [] for batch in _split_in_batches(points, _MAX_BATCH_SIZE): futures.append(net.json_request_async( url=_WRITE_URL.format(project_id=project_id), method='POST', payload={'timeseries': [make_point_dict(p) for p in batch]}, scopes=[_MONITORING_SCOPE], service_account_key=service_account_key)) ndb.Future.wait_all(futures) for f in futures: f.check_success()
def _register_metric(descriptor, service_account_key, project_id): """Registers a metric if it is not registered yet.""" # Use datastore (and memcache via NDB) to keep "already registered" flag. Do # not bother with transactions since Monitoring API call below is idempotent. key = ndb.Key(_MonitoringMetric, '%s:%s' % (project_id, descriptor.name)) existing = yield key.get_async() if existing and existing.descriptor == descriptor.to_dict(): return # See https://cloud.google.com/monitoring/v2beta2/metricDescriptors/create. # Monitoring API doesn't mind when the metric is updated with modified labels. resp = yield net.json_request_async( url=_CREATE_URL.format(project_id=project_id), method='POST', payload={ 'name': 'custom.cloudmonitoring.googleapis.com/%s' % descriptor.name, 'description': descriptor.description, 'labels': [ { 'key': 'custom.cloudmonitoring.googleapis.com/%s' % k, 'description': d, } for k, d in descriptor.labels ], 'typeDescriptor': { 'metricType': descriptor.metric_type, 'valueType': descriptor.value_type, }, }, scopes=[_MONITORING_SCOPE], service_account_key=service_account_key) yield _MonitoringMetric(key=key, descriptor=descriptor.to_dict()).put_async() logging.info('Metric %s is updated: %s', descriptor.name, resp)
def _validate_by_service_async(service, config_set, path, content, ctx): """Validates a config with an external service.""" try: metadata = yield services.get_metadata_async(service.id) except services.DynamicMetadataError as ex: logging.error("Could not load dynamic metadata for %s: %s", service.id, ex) return assert metadata and metadata.validation url = metadata.validation.url if not url: return match = False for p in metadata.validation.patterns: # TODO(nodir): optimize if necessary. if validation.compile_pattern(p.config_set)(config_set) and validation.compile_pattern(p.path)(path): match = True break if not match: return res = None def report_error(text): text = ("Error during external validation: %s\n" "url: %s\n" "config_set: %s\n" "path: %s\n" "response: %r") % ( text, url, config_set, path, res, ) logging.error(text) ctx.critical(text) try: req = {"config_set": config_set, "path": path, "content": base64.b64encode(content)} res = yield net.json_request_async(url, method="POST", payload=req, scopes=net.EMAIL_SCOPE) except net.Error as ex: report_error("Net error: %s" % ex) return try: for msg in res.get("messages", []): if not isinstance(msg, dict): report_error("invalid response: message is not a dict: %r" % msg) continue severity = msg.get("severity") or "INFO" if severity not in service_config_pb2.ValidationResponseMessage.Severity.keys(): report_error("invalid response: unexpected message severity: %s" % severity) continue # It is safe because we've validated |severity|. func = getattr(ctx, severity.lower()) func(msg.get("text") or "") except Exception as ex: report_error(ex)
def _api_call_async(self, path, allow_not_found=True, **kwargs): assert path url = 'https://%s/_ah/api/config/v1/%s' % (self.service_hostname, path) kwargs.setdefault('scopes', net.EMAIL_SCOPE) try: response = yield net.json_request_async(url, **kwargs) raise ndb.Return(response) except net.NotFoundError as ex: if allow_not_found: raise ndb.Return(None) logging.warning('404 response: %s', ex.response) raise
def _call_async(method, endpoint, payload=None): """Makes HTTP request to Pub/Sub service. Args: method: HTTP verb, such as 'GET' or 'PUT'. endpoint: URL of the endpoint, relative to pubsub.googleapis.com/v1/. payload: Body of the request to send as JSON. """ return net.json_request_async( url='https://pubsub.googleapis.com/v1/' + endpoint, method=method, payload=payload, scopes=['https://www.googleapis.com/auth/pubsub'])
def get_metadata_async(service_id): """Returns service dynamic metadata. Memcaches results for 1 min. Never returns None. Raises: ServiceNotFoundError if service |service_id| is not found. DynamicMetadataError if metadata endpoint response is bad. """ service = yield get_service_async(service_id) if service is None: raise ServiceNotFoundError('Service "%s" not found', service_id) if not service.metadata_url: raise ndb.Return(service_config_pb2.ServiceDynamicMetadata()) try: res = yield net.json_request_async( service.metadata_url, scopes=net.EMAIL_SCOPE) except net.Error as ex: raise DynamicMetadataError('Net error: %s' % ex.message) raise ndb.Return(_dict_to_dynamic_metadata(res))
def _validate_by_service_async(service, config_set, path, content, ctx): """Validates a config with an external service.""" try: metadata = yield services.get_metadata_async(service.id) except services.DynamicMetadataError as ex: logging.error('Could not load dynamic metadata for %s: %s', service.id, ex) return assert metadata and metadata.validation url = metadata.validation.url if not url: return match = False for p in metadata.validation.patterns: # TODO(nodir): optimize if necessary. if (validation.compile_pattern(p.config_set)(config_set) and validation.compile_pattern(p.path)(path)): match = True break if not match: return res = None def report_error(text): text = ( 'Error during external validation: %s\n' 'url: %s\n' 'config_set: %s\n' 'path: %s\n' 'response: %r') % (text, url, config_set, path, res) logging.error(text) ctx.critical(text) try: req = { 'config_set': config_set, 'path': path, 'content': base64.b64encode(content), } res = yield net.json_request_async( url, method='POST', payload=req, scopes=net.EMAIL_SCOPE) except net.Error as ex: report_error('Net error: %s' % ex) return try: for msg in res.get('messages', []): if not isinstance(msg, dict): report_error('invalid response: message is not a dict: %r' % msg) continue severity = msg.get('severity') or 'INFO' if (severity not in service_config_pb2.ValidationResponseMessage.Severity.keys()): report_error( 'invalid response: unexpected message severity: %s' % severity) continue # It is safe because we've validated |severity|. func = getattr(ctx, severity.lower()) func(msg.get('text') or '') except Exception as ex: report_error(ex)