Exemplo n.º 1
0
def publish_thresholds(config_filepath, data, metadata):
    """List metric(s) threshold data for a project and publish it to pubsub topic.

    Args:
        config_filepath: str, path for config file.
        data: dict, that can be passed as input to project_utils._Project obj.
        metadata: dict, attributes that need to be passed.
    """
    config = config_utils.config(config_filepath)
    project = projects_lib.Project.from_dict(data)

    logging.info('Metrics: Listing metric(s) threshold(s) for project %s',
                 project.id)
    data = {
        'rows': [],
        'table_id': config.value('export.bigquery.tables.thresholds_table_id')
    }

    thresholds = config.value('thresholds') or {}
    if _ALL in thresholds:
        logging.info('Checking %s threshold(s) for project %s', _ALL,
                     project.id)
        threshold = thresholds.get(_ALL, _DEFAULT_THRESHOLD)
        data['rows'].extend(quota_helper.mql_thresholds_all(
            project, threshold))
    else:
        for quota_metric in config.value('quota.metrics', default=[]):
            threshold = thresholds.get(quota_metric, _DEFAULT_THRESHOLD)
            logging.info('Checking %s threshold(s) for project %s',
                         quota_metric, project.id)
            data['rows'].extend(
                quota_helper.mql_thresholds_single(project, quota_metric,
                                                   threshold))
    _publish_details(project, data, metadata, config)
Exemplo n.º 2
0
def publish(config_filepath, data, metadata):
    """List metric(s) for a project and publish data to pubsub topic.

    Args:
        config_filepath: str, path for config file.
        data: dict, that can be passed as input to project_utils._Project obj.
        metadata: dict, attributes that need to be passed.
    """
    config = config_utils.config(config_filepath)
    project = projects_lib.Project.from_dict(data)

    logging.info('Metrics: Listing metrics for project %s', project.id)
    data = {
        'rows': [],
        'table_id': config.value('export.bigquery.tables.metrics_table_id'),
    }
    if _ALL in config.value('export.metrics'):
        logging.debug('Metrics: Checking %s metric(s) for project %s', _ALL,
                      project.id)
        data['rows'].extend(quota_helper.mql_all(project))
    else:
        for quota_metric in config.value('export.metrics', default=[]):
            logging.debug('Metrics: Checking %s metric(s) for project %s',
                          quota_metric, project.id)
            data['rows'].extend(quota_helper.mql_single(project, quota_metric))
    _publish_details(project, data, metadata, config)
Exemplo n.º 3
0
def publish(config_filepath):
    """Check if there are quota(s) above threshold and publish a custom metric.

    Args:
        config_filepath: str, path for config file.
    """
    logging.info('Timeseries: Publishing timeseries for project')
    config = config_utils.config(config_filepath)

    project_id = config.value('project')
    dataset_id = config.value('export.bigquery.dataset')
    table_id = config.value('export.bigquery.tables.thresholds_table_id')
    full_table_id = '.'.join((project_id, dataset_id, table_id))

    query = ('select count(*) from `{full_table_id}` '
             'where timestamp like "{today}%"')
    query = query.format(full_table_id=full_table_id,
                         today=common_utils.today())
    logging.info('Checking: %s', query)
    result = bigquery_lib.query(query)
    if result and len(result) > 0 and result[0][0]:
        helper = functools.partial(_threshold_custom_metrics, None)
        _build_and_write_timeseries(config.value('project'), helper)
    else:
        logging.info('Nothing to report')
Exemplo n.º 4
0
def save(config_filepath, data, unused_metadata):
    """Save metric data to bigquery table.

    Args:
        config_filepath: str, path for config file.
        data: dict, with key 'rows' that needs to be saved to bigquery table.
    """
    logging.info('Metrics: Saving metrics to bigquery')
    config = config_utils.config(config_filepath)
    table_id, rows = data.get('table_id'), data.get('rows', [])
    project_id = config.value('project')
    dataset_id = config.value('export.bigquery.dataset')
    full_table_id = '.'.join((project_id, dataset_id, table_id))
    bigquery_lib.write_rows(full_table_id, rows)
def publish(config_filepath):
    """List projects and publish the data for each project to pubsub topic.

    Args:
        config_filepath: str, path for config file.
    """
    logging.info('Projects: Listing and publishing projects')
    config = config_utils.config(config_filepath)
    batch_id = common_utils.get_unique_id()
    timestamp = common_utils.zulu_timestamp()

    projects = config.value('export.projects', default=[])
    if _ALL in projects:
        projects = projects_lib.get_all()
    else:
        projects = projects_lib.get_selective(projects)

    for project in projects:
        project.timestamp = timestamp
        _publish_project_details(project, config, batch_id)
Exemplo n.º 6
0
def run():
    """Bootstrap required setup for the given project id."""
    config = config_utils.config('config.yaml')
    host_project_id = config.value('project')
    reporting_handler.create_custom_metric_descriptors(host_project_id)