Ejemplo n.º 1
0
async def fetch_zones(context: MetricsContext, project_id: str) -> List[str]:
    headers = context.create_gcp_request_headers(project_id)

    resp = await context.gcp_session.request(
        "GET",
        params={},
        url=f"{_GCP_COMPUTE_ENDPOINT}/compute/v1/projects/{project_id}/zones",
        headers=headers,
        raise_for_status=True)

    response_json = await resp.json()
    if resp.status != 200:
        raise Exception(
            f"Failed to fetch available zones, response is {response_json}")

    zone_items = response_json.get("items", [])
    return [zone["name"] for zone in zone_items]
Ejemplo n.º 2
0
async def generic_paging(
        project_id: str, url: Text, ctx: MetricsContext,
        mapper: Callable[[Dict[Any, Any]], List[Entity]]) -> List[Entity]:
    """Apply mapper function on any page returned by gcp api url."""
    headers = ctx.create_gcp_request_headers(project_id)

    get_page = True
    params: Dict[Text, Text] = {}
    entities: List[Entity] = []
    while get_page:
        resp = await ctx.gcp_session.request("GET",
                                             params=params,
                                             url=url,
                                             headers=headers)

        try:
            page = await resp.json()
        except Exception:
            error_message = await resp.text()
            error_message = ' '.join(error_message.split())
            ctx.log(f'Failed to decode JSON. {url} {error_message}')
            return entities

        if resp.status >= 400:
            ctx.log(
                project_id,
                f'Failed to retrieve information from googleapis. {url} {page}'
            )
            return entities

        try:
            entities.extend(mapper(page))
        except Exception as ex:
            ctx.log(project_id,
                    f"Failed to map response from googleapis. {url} {ex}")
            return entities

        get_page = "nextPageToken" in page
        if get_page:
            params["pageToken"] = page.get("nextPageToken", None)

    return entities
Ejemplo n.º 3
0
async def get_all_disabled_apis(context: MetricsContext, project_id: str):
    base_url = f"{GCP_SERVICE_USAGE_URL}{project_id}/services?filter=state:DISABLED"
    headers = context.create_gcp_request_headers(project_id)
    disabled_apis = set()
    try:
        response = await context.gcp_session.get(base_url, headers=headers, raise_for_status=True)
        disabled_services_json = await response.json()
        disabled_services = disabled_services_json.get("services", [])
        disabled_apis.update({disable_service.get("config", {}).get("name", "") for disable_service in disabled_services})
        while disabled_services_json.get("nextPageToken"):
            url = f"{base_url}&pageToken={disabled_services_json['nextPageToken']}"
            response = await context.gcp_session.get(url, headers=headers, raise_for_status=True)
            disabled_services_json = await response.json()
            disabled_services = disabled_services_json.get("services", [])
            disabled_apis.update({disable_service.get("config", {}).get("name", "") for disable_service in disabled_services})
        return disabled_apis
    except ClientResponseError as e:
        context.log(project_id, f'Disabled APIs call returned failed status code. {e}')
        return disabled_apis
    except Exception as e:
        context.log(project_id, f'Cannot get disabled APIs: {GCP_SERVICE_USAGE_URL}/projects/{project_id}/services?filter=state:DISABLED. {e}')
        return disabled_apis
async def fetch_metric(context: MetricsContext, project_id: str,
                       service: GCPService,
                       metric: Metric) -> List[IngestLine]:
    end_time = (context.execution_time - metric.ingest_delay)
    start_time = (end_time - context.execution_interval)

    reducer = 'REDUCE_SUM'
    aligner = 'ALIGN_SUM'

    if metric.value_type.lower() == 'bool':
        aligner = 'ALIGN_COUNT_TRUE'
    elif metric.google_metric_kind.lower().startswith('cumulative'):
        aligner = 'ALIGN_DELTA'

    params = [
        ('filter',
         f'metric.type = "{metric.google_metric}" {service.monitoring_filter}'.
         strip()), ('interval.startTime', start_time.isoformat() + "Z"),
        ('interval.endTime', end_time.isoformat() + "Z"),
        ('aggregation.alignmentPeriod',
         f"{metric.sample_period_seconds.total_seconds()}s"),
        ('aggregation.perSeriesAligner', aligner),
        ('aggregation.crossSeriesReducer', reducer)
    ]

    all_dimensions = (service.dimensions + metric.dimensions)
    dt_dimensions_mapping = DtDimensionsMap()
    for dimension in all_dimensions:
        if dimension.key_for_send_to_dynatrace:
            dt_dimensions_mapping.add_label_mapping(
                dimension.key_for_fetch_metric,
                dimension.key_for_send_to_dynatrace)

        params.append(
            ('aggregation.groupByFields', dimension.key_for_fetch_metric))

    headers = context.create_gcp_request_headers(project_id)

    should_fetch = True

    lines = []
    while should_fetch:
        context.gcp_metric_request_count[
            project_id] = context.gcp_metric_request_count.get(project_id,
                                                               0) + 1

        url = f"{_MONITORING_ROOT}/projects/{project_id}/timeSeries"
        resp = await context.gcp_session.request('GET',
                                                 url=url,
                                                 params=params,
                                                 headers=headers)
        page = await resp.json()
        # response body is https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list#response-body
        if 'error' in page:
            raise Exception(str(page))
        if 'timeSeries' not in page:
            break

        for time_serie in page['timeSeries']:
            typed_value_key = extract_typed_value_key(time_serie)
            dimensions = create_dimensions(context, service.name, time_serie,
                                           dt_dimensions_mapping)
            entity_id = create_entity_id(service, time_serie)

            for point in time_serie['points']:
                line = convert_point_to_ingest_line(dimensions, metric, point,
                                                    typed_value_key, entity_id)
                if line:
                    lines.append(line)

        next_page_token = page.get('nextPageToken', None)
        if next_page_token:
            update_params(next_page_token, params)
        else:
            should_fetch = False

    return lines