def _get_usage_info(service, date, verbose=False): """date should be a YYYY-MM-DD string. Returns a list of dicts.""" # The info we want is at, e.g. # https://console.developers.google.com/m/cloudstorage/b/ka_billing_export/o/khanacademy.org-2015-07-27.csv bucketname = 'ka_billing_export' filename = 'khanacademy.org-%s.csv' % date if verbose: print 'Fetching %s from bucket %s' % (filename, bucketname) try: csv_contents = cloudmonitoring_util.execute_with_retries( service.objects().get_media(bucket=bucketname, object=filename)) except apiclient.errors.HttpError as e: if e.resp['status'] == '404': raise UsageRecordNotFound(date) raise return list(csv.DictReader(cStringIO.StringIO(csv_contents)))
def _get_usage_info(service, date, verbose=False): """date should be a YYYY-MM-DD string. Returns a list of dicts.""" # The info we want is at, e.g. # https://console.developers.google.com/m/cloudstorage/b/ka_billing_export/o/khanacademy.org-2015-07-27.csv bucketname = "ka_billing_export" filename = "khanacademy.org-%s.csv" % date if verbose: print "Fetching %s from bucket %s" % (filename, bucketname) try: csv_contents = cloudmonitoring_util.execute_with_retries( service.objects().get_media(bucket=bucketname, object=filename) ) except apiclient.errors.HttpError as e: if e.resp["status"] == "404": raise UsageRecordNotFound(date) raise return list(csv.DictReader(cStringIO.StringIO(csv_contents)))
def _get_timeseries(metric, project_id, start_time_t, end_time_t): """service.timeseries().list(), but with caching and auto-paging.""" global _TIMESERIES cache_key = (project_id, metric, start_time_t, end_time_t) if cache_key in _TIMESERIES_CACHE: return _TIMESERIES_CACHE[cache_key] if _TIMESERIES is None: service = cloudmonitoring_util.get_cloudmonitoring_service() _TIMESERIES = service.timeseries() retval = {'timeseries': []} page_token = None while True: # TODO(csilvers): do I want to set 'window'? r = cloudmonitoring_util.execute_with_retries( _TIMESERIES.list( project=project_id, metric=metric, oldest=cloudmonitoring_util.to_rfc3339(start_time_t), youngest=cloudmonitoring_util.to_rfc3339(end_time_t), pageToken=page_token, count=10000)) # Merge these fields into retval. retval['kind'] = r['kind'] # Luckily, RFC3339 dates can be lexicographically compared. if 'youngest' not in retval or r['youngest'] > retval['youngest']: retval['youngest'] = r['youngest'] if 'oldest' not in retval or r['oldest'] < retval['oldest']: retval['oldest'] = r['oldest'] retval['timeseries'].extend(r.get('timeseries', [])) # Go to the next page of results, if necessary. if 'nextPageToken' in r: page_token = r['nextPageToken'] else: break _TIMESERIES_CACHE[cache_key] = retval return _TIMESERIES_CACHE[cache_key]
def _get_serial_port_output_lines_from_cloud_compute(service, project_id, gce_instance): """Return a list of serial port output lines for the gce instance. List of serial port output lines for the gce instance is ordered from least recent to most recent. With each port output as a separate list entry. Documentation: cloud.google.com/compute/docs/reference/latest/instances Examples of expected serial_port_output lines: Failed instance: gcm-StatusUpdate:TIME=1467830173000;STATUS=HEALTH_CHECK_UNHEALTHY; STATUS_MESSAGE=0 gcm-Heartbeat:1467830173000 gcm-StatusUpdate:TIME=1467830178000;STATUS=HEALTH_CHECK_UNHEALTHY; STATUS_MESSAGE=0 gcm-StatusUpdate:TIME=1467830183000;STATUS=HEALTH_CHECK_UNHEALTHY; STATUS_MESSAGE=0 Healthy instance: gcm-StatusUpdate:TIME=1467826034000;STATUS=ALL_COMMANDS_SUCCEEDED gcm-Heartbeat:1467830669000 gcm-StatusUpdate:TIME=1467826034000;STATUS=ALL_COMMANDS_SUCCEEDED gcm-Heartbeat:1467830699000 gcm-StatusUpdate:TIME=1467826034000;STATUS=ALL_COMMANDS_SUCCEEDED """ request = service.instances().getSerialPortOutput( project=project_id, zone=gce_instance.zone_name, instance=gce_instance.instance_name) try: response = cloudmonitoring_util.execute_with_retries(request) # This can fail, for example when an instance is spinning up. except apiclient.errors.HttpError: return [] return response['contents'].split('\n')
def _get_timeseries(metric, project_id, start_time_t, end_time_t): """service.projects().timeSeries().list() plus caching and auto-paging.""" global _TIMESERIES cache_key = (project_id, metric, start_time_t, end_time_t) if cache_key in _TIMESERIES_CACHE: return _TIMESERIES_CACHE[cache_key] if _TIMESERIES is None: service = cloudmonitoring_util.get_cloud_service('monitoring', 'v3') _TIMESERIES = service.projects().timeSeries() retval = {'timeSeries': []} page_token = None while True: # TODO(csilvers): do I want to set 'window'? r = cloudmonitoring_util.execute_with_retries( _TIMESERIES.list( name='projects/%s' % project_id, filter='metric.type = "%s"' % metric, interval_startTime=cloudmonitoring_util.to_rfc3339( start_time_t), interval_endTime=cloudmonitoring_util.to_rfc3339(end_time_t), pageToken=page_token, pageSize=10000)) retval['timeSeries'].extend(r.get('timeSeries', [])) # Go to the next page of results, if necessary. if 'nextPageToken' in r: page_token = r['nextPageToken'] else: break _TIMESERIES_CACHE[cache_key] = retval return _TIMESERIES_CACHE[cache_key]
def _get_timeseries(metric, project_id, start_time_t, end_time_t): """service.projects().timeSeries().list() plus caching and auto-paging.""" global _TIMESERIES cache_key = (project_id, metric, start_time_t, end_time_t) if cache_key in _TIMESERIES_CACHE: return _TIMESERIES_CACHE[cache_key] if _TIMESERIES is None: service = cloudmonitoring_util.get_cloudmonitoring_service() _TIMESERIES = service.projects().timeSeries() retval = {'timeSeries': []} page_token = None while True: # TODO(csilvers): do I want to set 'window'? r = cloudmonitoring_util.execute_with_retries( _TIMESERIES.list( name='projects/%s' % project_id, filter='metric.type = "%s"' % metric, interval_startTime=cloudmonitoring_util.to_rfc3339( start_time_t), interval_endTime=cloudmonitoring_util.to_rfc3339(end_time_t), pageToken=page_token, pageSize=10000)) retval['timeSeries'].extend(r.get('timeSeries', [])) # Go to the next page of results, if necessary. if 'nextPageToken' in r: page_token = r['nextPageToken'] else: break _TIMESERIES_CACHE[cache_key] = retval return _TIMESERIES_CACHE[cache_key]
def _get_instances_list_from_cloud_compute(service, project_id): """Get the aggregated GCE instances list via cloud compute API.""" request = service.instances().aggregatedList(project=project_id) response = cloudmonitoring_util.execute_with_retries(request) return response