Exemplo n.º 1
0
MAX_AGE_SCHEDULED = gae_ts_mon.FloatMetric(
    'buildbucket/builds/max_age_scheduled',
    'Age of the oldest SCHEDULED build.',
    (_build_fields('bucket', 'builder') +
     [gae_ts_mon.BooleanField('must_be_never_leased')]),
    units=gae_ts_mon.MetricsDataUnits.SECONDS)
SEQUENCE_NUMBER_GEN_DURATION_MS = gae_ts_mon.CumulativeDistributionMetric(
    'buildbucket/sequence_number/gen_duration',
    'Duration of a sequence number generation in ms',
    [gae_ts_mon.StringField('sequence')],
    # Bucketer for 1ms..5s range
    bucketer=BUCKETER_5_SEC,
    units=gae_ts_mon.MetricsDataUnits.MILLISECONDS)
TAG_INDEX_INCONSISTENT_ENTRIES = gae_ts_mon.NonCumulativeDistributionMetric(
    'buildbucket/tag_index/inconsistent_entries',
    'Number of inconsistent entries encountered during build search',
    [gae_ts_mon.StringField('tag')],
    # We can't have more than 1000 entries in a tag index.
    bucketer=BUCKETER_1K)
TAG_INDEX_SEARCH_SKIPPED_BUILDS = gae_ts_mon.NonCumulativeDistributionMetric(
    'buildbucket/tag_index/skipped_builds',
    'Number of builds we fetched, but skipped',
    [gae_ts_mon.StringField('tag')],
    # We can't have more than 1000 entries in a tag index.
    bucketer=BUCKETER_1K)


@ndb.tasklet
def set_build_count_metric_async(bucket_id, bucket_field, builder, status,
                                 experimental):
    q = model.Build.query(
        model.Build.bucket_id == bucket_id,
Exemplo n.º 2
0
    'Status of a job executor.',
    None)


# Global metric. Target fields:
# - hostname = 'autogen:<executor_id>' (bot id).
# Status value must be 'ready', 'running', or anything else, possibly
# swarming-specific, when it cannot run a job. E.g. 'quarantined' or
# 'dead'.
# Note that 'running' will report data as long as the job is running,
# so it is best to restrict data to status == 'pending.'
_jobs_pending_durations = gae_ts_mon.NonCumulativeDistributionMetric(
    'jobs/pending_durations',
    'Pending times of active jobs, in seconds.', [
        gae_ts_mon.StringField('spec_name'),
        gae_ts_mon.StringField('project_id'),
        gae_ts_mon.StringField('subproject_id'),
        gae_ts_mon.StringField('pool'),
        gae_ts_mon.StringField('status'),
    ],
    bucketer=_bucketer)


# Global metric. Target fields:
# - hostname = 'autogen:<executor_id>' (bot id).
# Status value must be 'ready', 'running', or anything else, possibly
# swarming-specific, when it cannot run a job. E.g. 'quarantined' or
# 'dead'.
# Note that 'running' will report data as long as the job is running,
# so it is best to restrict data to status == 'pending.'
_jobs_max_pending_duration = gae_ts_mon.FloatMetric(
    'jobs/max_pending_duration',
Exemplo n.º 3
0
# - subproject_id: e.g. 'blink'. Set to empty string if not used.
# - spec_name: name of a job specification, e.g. '<master>:<builder>:<test>'
#     for buildbot jobs.
# - result: one of 'success', 'failure', or 'infra-failure'.
jobs_completed = gae_ts_mon.CounterMetric(
    'jobs/completed',
    description='Number of completed jobs.')


jobs_durations = gae_ts_mon.CumulativeDistributionMetric(
    'jobs/durations', bucketer=_bucketer,
    description='Cycle times of completed jobs, in seconds.')


jobs_pending_durations = gae_ts_mon.NonCumulativeDistributionMetric(
    'jobs/pending_durations', bucketer=_bucketer,
    description='Pending times of active jobs, in seconds.')

jobs_max_pending_duration = gae_ts_mon.FloatMetric(
    'jobs/max_pending_duration',
    description='Maximum pending seconds of pending jobs.')

# Similar to jobs/completed and jobs/duration, but with a dedup field.
# - project_id: e.g. 'chromium'
# - subproject_id: e.g. 'blink'. Set to empty string if not used.
# - spec_name: name of a job specification, e.g. '<master>:<builder>:<test>'
#     for buildbot jobs.
# - deduped: boolean describing whether the job was deduped or not.
jobs_requested = gae_ts_mon.CounterMetric(
    'jobs/requested',
    description='Number of requested jobs over time.')
Exemplo n.º 4
0
    gae_ts_mon.CounterMetric('buildbucket/builds/lease_expired',
                             'Build lease expirations',
                             _string_fields('bucket', 'builder', 'status')))
inc_leases = _incrementer(
    gae_ts_mon.CounterMetric('buildbucket/builds/leases',
                             'Successful build leases or lease extensions',
                             _string_fields('bucket', 'builder')))

_BUILD_DURATION_FIELDS = _string_fields('bucket', 'builder', 'result',
                                        'failure_reason', 'cancelation_reason')

# requires the argument to have non-None create_time and complete_time.
add_build_cycle_duration = _adder(  # pragma: no branch
    gae_ts_mon.NonCumulativeDistributionMetric(
        'buildbucket/builds/cycle_durations',
        'Duration between build creation and completion',
        _BUILD_DURATION_FIELDS,
        bucketer=BUCKETER_48_HR,
        units=gae_ts_mon.MetricsDataUnits.SECONDS), lambda b:
    (b.complete_time - b.create_time).total_seconds())

# requires the argument to have non-None start_time and complete_time.
add_build_run_duration = _adder(  # pragma: no branch
    gae_ts_mon.NonCumulativeDistributionMetric(
        'buildbucket/builds/run_durations',
        'Duration between build start and completion',
        _BUILD_DURATION_FIELDS,
        bucketer=BUCKETER_48_HR,
        units=gae_ts_mon.MetricsDataUnits.SECONDS), lambda b:
    (b.complete_time - b.start_time).total_seconds())

CURRENTLY_PENDING = gae_ts_mon.GaugeMetric('buildbucket/builds/pending',
Exemplo n.º 5
0
from status import build_result, util
import time_functions.timestamp

requests_metric = gae_ts_mon.CounterMetric(
    'flakiness_pipeline/cq_status/requests',
    'Requests made to the chromium-cq-status API',
    [gae_ts_mon.StringField('status')])
flakes_metric = gae_ts_mon.CounterMetric(
    'flakiness_pipeline/flake_occurrences_detected',
    'Detected flake occurrences', None)
parsing_errors = gae_ts_mon.CounterMetric(
    'flakiness_pipeline/cq_status_parsing_errors',
    'Number of errors when parsing records returned by chromium-cq-status',
    None)
occurrences_per_flake_day = gae_ts_mon.NonCumulativeDistributionMetric(
    'flakiness_pipeline/occurrences_per_flake/day',
    'Distribution of flake occurrence counts, calculated over all flakes in '
    'the last day', None)
occurrences_per_flake_week = gae_ts_mon.NonCumulativeDistributionMetric(
    'flakiness_pipeline/occurrences_per_flake/week',
    'Distribution of flake occurrence counts, calculated over all flakes in '
    'the last week', None)
occurrences_per_flake_month = gae_ts_mon.NonCumulativeDistributionMetric(
    'flakiness_pipeline/occurrences_per_flake/month',
    'Distribution of flake occurrence counts, calculated over all flakes in '
    'the last month', None)


@ndb.transactional
def get_patchset_builder_runs(issue, patchset, master, builder):
    patchset_builder_runs_id = PatchsetBuilderRuns.getId(
        issue, patchset, master, builder)