コード例 #1
0
def _load_perf_datum(job, perf_datum):
    validate(perf_datum, PERFHERDER_SCHEMA)

    extra_properties = {}
    extra_options = ''
    reference_data = {
        'option_collection_hash': job.signature.option_collection_hash,
        'machine_platform': job.signature.machine_platform
    }

    option_collection = OptionCollection.objects.get(
        option_collection_hash=job.signature.option_collection_hash)

    try:
        framework = PerformanceFramework.objects.get(
            name=perf_datum['framework']['name'])
    except PerformanceFramework.DoesNotExist:
        logger.warning(
            "Performance framework %s does not exist, skipping "
            "load of performance artifacts", perf_datum['framework']['name'])
        return
    if not framework.enabled:
        logger.info("Performance framework %s is not enabled, skipping",
                    perf_datum['framework']['name'])
        return
    for suite in perf_datum['suites']:
        suite_extra_properties = copy.copy(extra_properties)
        suite_extra_options = copy.copy(extra_options)
        if suite.get('extraOptions'):
            suite_extra_properties = {
                'test_options': sorted(suite['extraOptions'])
            }
            # store extraOptions list as space separated string
            suite_extra_options = ' '.join(sorted(suite['extraOptions']))

        summary_signature_hash = None

        # if we have a summary value, create or get its signature by all its subtest
        # properties.
        if suite.get('value') is not None:
            # summary series
            summary_properties = {'suite': suite['name']}
            summary_properties.update(reference_data)
            summary_properties.update(suite_extra_properties)
            summary_signature_hash = _get_signature_hash(summary_properties)
            signature = _create_or_update_signature(
                job.repository,
                summary_signature_hash,
                framework,
                {
                    'test':
                    '',
                    'suite':
                    suite['name'],
                    'option_collection':
                    option_collection,
                    'platform':
                    job.machine_platform,
                    'extra_options':
                    suite_extra_options,
                    'lower_is_better':
                    suite.get('lowerIsBetter', True),
                    'has_subtests':
                    True,
                    # these properties below can be either True, False, or null
                    # (None). Null indicates no preference has been set.
                    'should_alert':
                    suite.get('shouldAlert'),
                    'alert_change_type':
                    PerformanceSignature._get_alert_change_type(
                        suite.get('alertChangeType')),
                    'alert_threshold':
                    suite.get('alertThreshold'),
                    'min_back_window':
                    suite.get('minBackWindow'),
                    'max_back_window':
                    suite.get('maxBackWindow'),
                    'fore_window':
                    suite.get('foreWindow'),
                    'last_updated':
                    job.push.time
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=job.repository,
                job=job,
                push=job.push,
                signature=signature,
                push_timestamp=job.push.time,
                defaults={'value': suite['value']})
            if signature.should_alert is not False and datum_created and \
               job.repository.performance_alerts_enabled:
                generate_alerts.apply_async(args=[signature.id],
                                            queue='generate_perf_alerts')

        for subtest in suite['subtests']:
            subtest_properties = {
                'suite': suite['name'],
                'test': subtest['name']
            }
            subtest_properties.update(reference_data)
            subtest_properties.update(suite_extra_properties)

            summary_signature = None
            if summary_signature_hash is not None:
                subtest_properties.update(
                    {'parent_signature': summary_signature_hash})
                summary_signature = PerformanceSignature.objects.get(
                    repository=job.repository,
                    framework=framework,
                    signature_hash=summary_signature_hash)
            subtest_signature_hash = _get_signature_hash(subtest_properties)
            value = list(subtest['value'] for subtest in suite['subtests']
                         if subtest['name'] == subtest_properties['test'])
            signature = _create_or_update_signature(
                job.repository,
                subtest_signature_hash,
                framework,
                {
                    'test':
                    subtest_properties['test'],
                    'suite':
                    suite['name'],
                    'option_collection':
                    option_collection,
                    'platform':
                    job.machine_platform,
                    'extra_options':
                    suite_extra_options,
                    'lower_is_better':
                    subtest.get('lowerIsBetter', True),
                    'has_subtests':
                    False,
                    # these properties below can be either True, False, or
                    # null (None). Null indicates no preference has been
                    # set.
                    'should_alert':
                    subtest.get('shouldAlert'),
                    'alert_change_type':
                    PerformanceSignature._get_alert_change_type(
                        subtest.get('alertChangeType')),
                    'alert_threshold':
                    subtest.get('alertThreshold'),
                    'min_back_window':
                    subtest.get('minBackWindow'),
                    'max_back_window':
                    subtest.get('maxBackWindow'),
                    'fore_window':
                    subtest.get('foreWindow'),
                    'parent_signature':
                    summary_signature,
                    'last_updated':
                    job.push.time
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=job.repository,
                job=job,
                push=job.push,
                signature=signature,
                push_timestamp=job.push.time,
                defaults={'value': value[0]})

            # by default if there is no summary, we should schedule a
            # generate alerts task for the subtest, since we have new data
            # (this can be over-ridden by the optional "should alert"
            # property)
            if ((signature.should_alert or (signature.should_alert is None
                                            and suite.get('value') is None))
                    and datum_created
                    and job.repository.performance_alerts_enabled):
                generate_alerts.apply_async(args=[signature.id],
                                            queue='generate_perf_alerts')
コード例 #2
0
def _load_perf_datum(job: Job, perf_datum: dict):
    validate_perf_data(perf_datum)

    extra_properties = {}
    reference_data = {
        'option_collection_hash': job.signature.option_collection_hash,
        'machine_platform': job.signature.machine_platform,
    }

    option_collection = OptionCollection.objects.get(
        option_collection_hash=job.signature.option_collection_hash)

    try:
        framework = PerformanceFramework.objects.get(
            name=perf_datum['framework']['name'])
    except PerformanceFramework.DoesNotExist:
        if perf_datum['framework']['name'] == "job_resource_usage":
            return
        logger.warning(
            "Performance framework %s does not exist, skipping "
            "load of performance artifacts",
            perf_datum['framework']['name'],
        )
        return
    if not framework.enabled:
        logger.info("Performance framework %s is not enabled, skipping",
                    perf_datum['framework']['name'])
        return
    application = _get_application_name(perf_datum)
    for suite in perf_datum['suites']:
        suite_extra_properties = copy.copy(extra_properties)
        ordered_tags = _order_and_concat(suite.get('tags', []))
        deduced_timestamp, is_multi_commit = _deduce_push_timestamp(
            perf_datum, job.push.time)
        suite_extra_options = ''

        if suite.get('extraOptions'):
            suite_extra_properties = {
                'test_options': sorted(suite['extraOptions'])
            }
            suite_extra_options = _order_and_concat(suite['extraOptions'])
        summary_signature_hash = None

        # if we have a summary value, create or get its signature by all its subtest
        # properties.
        if suite.get('value') is not None:
            # summary series
            summary_properties = {'suite': suite['name']}
            summary_properties.update(reference_data)
            summary_properties.update(suite_extra_properties)
            summary_signature_hash = _get_signature_hash(summary_properties)
            signature = _create_or_update_signature(
                job.repository,
                summary_signature_hash,
                framework,
                application,
                {
                    'test':
                    '',
                    'suite':
                    suite['name'],
                    'suite_public_name':
                    suite.get('publicName'),
                    'option_collection':
                    option_collection,
                    'platform':
                    job.machine_platform,
                    'tags':
                    ordered_tags,
                    'extra_options':
                    suite_extra_options,
                    'measurement_unit':
                    suite.get('unit'),
                    'lower_is_better':
                    suite.get('lowerIsBetter', True),
                    'has_subtests':
                    True,
                    # these properties below can be either True, False, or null
                    # (None). Null indicates no preference has been set.
                    'should_alert':
                    suite.get('shouldAlert'),
                    'alert_change_type':
                    PerformanceSignature._get_alert_change_type(
                        suite.get('alertChangeType')),
                    'alert_threshold':
                    suite.get('alertThreshold'),
                    'min_back_window':
                    suite.get('minBackWindow'),
                    'max_back_window':
                    suite.get('maxBackWindow'),
                    'fore_window':
                    suite.get('foreWindow'),
                    'last_updated':
                    job.push.time,
                },
            )

            (suite_datum,
             datum_created) = PerformanceDatum.objects.get_or_create(
                 repository=job.repository,
                 job=job,
                 push=job.push,
                 signature=signature,
                 push_timestamp=deduced_timestamp,
                 defaults={'value': suite['value']},
             )
            if suite_datum.should_mark_as_multi_commit(is_multi_commit,
                                                       datum_created):
                # keep a register with all multi commit perf data
                MultiCommitDatum.objects.create(perf_datum=suite_datum)
            if (signature.should_alert is not False and datum_created
                    and job.repository.performance_alerts_enabled):
                generate_alerts.apply_async(args=[signature.id],
                                            queue='generate_perf_alerts')

        for subtest in suite['subtests']:
            subtest_properties = {
                'suite': suite['name'],
                'test': subtest['name']
            }
            subtest_properties.update(reference_data)
            subtest_properties.update(suite_extra_properties)

            summary_signature = None
            if summary_signature_hash is not None:
                subtest_properties.update(
                    {'parent_signature': summary_signature_hash})
                summary_signature = PerformanceSignature.objects.get(
                    repository=job.repository,
                    framework=framework,
                    signature_hash=summary_signature_hash,
                    application=application,
                )
            subtest_signature_hash = _get_signature_hash(subtest_properties)
            value = list(subtest['value'] for subtest in suite['subtests']
                         if subtest['name'] == subtest_properties['test'])
            signature = _create_or_update_signature(
                job.repository,
                subtest_signature_hash,
                framework,
                application,
                {
                    'test':
                    subtest_properties['test'],
                    'suite':
                    suite['name'],
                    'test_public_name':
                    subtest.get('publicName'),
                    'suite_public_name':
                    suite.get('publicName'),
                    'option_collection':
                    option_collection,
                    'platform':
                    job.machine_platform,
                    'tags':
                    ordered_tags,
                    'extra_options':
                    suite_extra_options,
                    'measurement_unit':
                    subtest.get('unit'),
                    'lower_is_better':
                    subtest.get('lowerIsBetter', True),
                    'has_subtests':
                    False,
                    # these properties below can be either True, False, or
                    # null (None). Null indicates no preference has been
                    # set.
                    'should_alert':
                    subtest.get('shouldAlert'),
                    'alert_change_type':
                    PerformanceSignature._get_alert_change_type(
                        subtest.get('alertChangeType')),
                    'alert_threshold':
                    subtest.get('alertThreshold'),
                    'min_back_window':
                    subtest.get('minBackWindow'),
                    'max_back_window':
                    subtest.get('maxBackWindow'),
                    'fore_window':
                    subtest.get('foreWindow'),
                    'parent_signature':
                    summary_signature,
                    'last_updated':
                    job.push.time,
                },
            )
            (subtest_datum,
             datum_created) = PerformanceDatum.objects.get_or_create(
                 repository=job.repository,
                 job=job,
                 push=job.push,
                 signature=signature,
                 push_timestamp=deduced_timestamp,
                 defaults={'value': value[0]},
             )
            if subtest_datum.should_mark_as_multi_commit(
                    is_multi_commit, datum_created):
                # keep a register with all multi commit perf data
                MultiCommitDatum.objects.create(perf_datum=subtest_datum)

            # by default if there is no summary, we should schedule a
            # generate alerts task for the subtest, since we have new data
            # (this can be over-ridden by the optional "should alert"
            # property)
            if ((signature.should_alert or (signature.should_alert is None
                                            and suite.get('value') is None))
                    and datum_created
                    and job.repository.performance_alerts_enabled):
                generate_alerts.apply_async(args=[signature.id],
                                            queue='generate_perf_alerts')
コード例 #3
0
ファイル: perf.py プロジェクト: SebastinSanty/treeherder
def _load_perf_datum(job, perf_datum):
    validate(perf_datum, PERFHERDER_SCHEMA)

    extra_properties = {}
    extra_options = ''
    reference_data = {
        'option_collection_hash': job.signature.option_collection_hash,
        'machine_platform': job.signature.machine_platform
    }

    option_collection = OptionCollection.objects.get(
        option_collection_hash=job.signature.option_collection_hash)

    try:
        framework = PerformanceFramework.objects.get(
            name=perf_datum['framework']['name'])
    except PerformanceFramework.DoesNotExist:
        logger.warning("Performance framework {} does not exist, skipping "
                       "load of performance artifacts".format(
                           perf_datum['framework']['name']))
        return
    if not framework.enabled:
        logger.info("Performance framework {} is not enabled, skipping"
                    .format(perf_datum['framework']['name']))
        return
    for suite in perf_datum['suites']:
        suite_extra_properties = copy.copy(extra_properties)
        suite_extra_options = copy.copy(extra_options)
        if suite.get('extraOptions'):
            suite_extra_properties = {
                'test_options': sorted(suite['extraOptions'])
            }
            # store extraOptions list as comma-separated str
            suite_extra_options = ' '.join(sorted(suite['extraOptions']))

        summary_signature_hash = None

        # if we have a summary value, create or get its signature by all its subtest
        # properties.
        if suite.get('value') is not None:
            # summary series
            summary_properties = {
                'suite': suite['name']
            }
            summary_properties.update(reference_data)
            summary_properties.update(suite_extra_properties)
            summary_signature_hash = _get_signature_hash(
                summary_properties)

            signature, _ = PerformanceSignature.objects.update_or_create(
                repository=job.repository,
                signature_hash=summary_signature_hash,
                framework=framework,
                defaults={
                    'test': '',
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': job.machine_platform,
                    'extra_options': suite_extra_options,
                    'lower_is_better': suite.get('lowerIsBetter', True),
                    'has_subtests': True,
                    # these properties below can be either True, False, or null
                    # (None). Null indicates no preference has been set.
                    'should_alert': suite.get('shouldAlert'),
                    'alert_threshold': suite.get('alertThreshold'),
                    'min_back_window': suite.get('minBackWindow'),
                    'max_back_window': suite.get('maxBackWindow'),
                    'fore_window': suite.get('foreWindow'),
                    'last_updated': job.push.time
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=job.repository,
                job=job,
                push=job.push,
                signature=signature,
                push_timestamp=job.push.time,
                defaults={'value': suite['value']})
            if signature.should_alert is not False and datum_created and \
               job.repository.performance_alerts_enabled:
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')

        for subtest in suite['subtests']:
            subtest_properties = {
                'suite': suite['name'],
                'test': subtest['name']
            }
            subtest_properties.update(reference_data)
            subtest_properties.update(suite_extra_properties)

            summary_signature = None
            if summary_signature_hash is not None:
                subtest_properties.update({'parent_signature': summary_signature_hash})
                summary_signature = PerformanceSignature.objects.get(
                    repository=job.repository,
                    framework=framework,
                    signature_hash=summary_signature_hash)
            subtest_signature_hash = _get_signature_hash(subtest_properties)
            value = list(subtest['value'] for subtest in suite['subtests'] if
                         subtest['name'] == subtest_properties['test'])
            signature, _ = PerformanceSignature.objects.update_or_create(
                repository=job.repository,
                signature_hash=subtest_signature_hash,
                framework=framework,
                defaults={
                    'test': subtest_properties['test'],
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': job.machine_platform,
                    'extra_options': suite_extra_options,
                    'lower_is_better': subtest.get('lowerIsBetter', True),
                    'has_subtests': False,
                    # these properties below can be either True, False, or
                    # null (None). Null indicates no preference has been
                    # set.
                    'should_alert': subtest.get('shouldAlert'),
                    'alert_threshold': subtest.get('alertThreshold'),
                    'min_back_window': subtest.get('minBackWindow'),
                    'max_back_window': subtest.get('maxBackWindow'),
                    'fore_window': subtest.get('foreWindow'),
                    'parent_signature': summary_signature,
                    'last_updated': job.push.time
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=job.repository,
                job=job,
                push=job.push,
                signature=signature,
                push_timestamp=job.push.time,
                defaults={'value': value[0]})

            # by default if there is no summary, we should schedule a
            # generate alerts task for the subtest, since we have new data
            # (this can be over-ridden by the optional "should alert"
            # property)
            if signature.should_alert or (signature.should_alert is None and
                                          (datum_created and
                                           job.repository.performance_alerts_enabled and
                                           suite.get('value') is None)):
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')
コード例 #4
0
ファイル: perf.py プロジェクト: AnthonyMeaux/treeherder
def _load_perf_artifact(project_name, reference_data, job_data, job_guid,
                        perf_datum):
    validate(perf_datum, PERFHERDER_SCHEMA)

    if 'e10s' in reference_data.get('job_group_symbol', ''):
        extra_properties = {'test_options': ['e10s']}
    else:
        extra_properties = {}

    # transform the reference data so it only contains what we actually
    # care about (for calculating the signature hash reproducibly), then
    # get the associated models
    reference_data = _transform_signature_properties(reference_data)
    option_collection = OptionCollection.objects.get(
        option_collection_hash=reference_data['option_collection_hash'])
    # there may be multiple machine platforms with the same platform: use
    # the first
    platform = MachinePlatform.objects.filter(
        platform=reference_data['machine_platform'])[0]
    repository = Repository.objects.get(
        name=project_name)

    # data for performance series
    job_id = job_data[job_guid]['id']
    result_set_id = job_data[job_guid]['result_set_id']
    push_timestamp = datetime.datetime.fromtimestamp(
        job_data[job_guid]['push_timestamp'])

    try:
        framework = PerformanceFramework.objects.get(
            name=perf_datum['framework']['name'])
    except PerformanceFramework.DoesNotExist:
        logger.warning("Performance framework {} does not exist, skipping "
                       "load of performance artifacts".format(
                           perf_datum['framework']['name']))
        return
    if not framework.enabled:
        logger.info("Performance framework {} is not enabled, skipping"
                    .format(perf_datum['framework']['name']))
        return
    for suite in perf_datum['suites']:
        suite_extra_properties = copy.copy(extra_properties)
        if suite.get('extraOptions'):
            suite_extra_properties = {
                'test_options': sorted(suite['extraOptions'])
            }
        subtest_properties = []
        summary_signature_hash = None
        for subtest in suite['subtests']:
            subtest_metadata = {
                'suite': suite['name'],
                'test': subtest['name'],
                'lowerIsBetter': subtest.get('lowerIsBetter', True)
            }
            subtest_metadata.update(reference_data)
            subtest_properties.append(subtest_metadata)
        subtest_properties.sort(key=lambda s: s['test'])

        # if we have a summary value, create or get its signature by all its subtest
        # properties.
        if suite.get('value') is not None:
            # summary series
            summary_properties = {
                'suite': suite['name'],
                'subtest_properties': subtest_properties
            }
            summary_properties.update(reference_data)
            summary_properties.update(suite_extra_properties)
            summary_signature_hash = _get_signature_hash(
                summary_properties)

            signature, _ = PerformanceSignature.objects.update_or_create(
                repository=repository, signature_hash=summary_signature_hash,
                framework=framework,
                defaults={
                    'test': '',
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': platform,
                    'extra_properties': suite_extra_properties,
                    'lower_is_better': suite.get('lowerIsBetter', True),
                    'has_subtests': True,
                    # these properties below can be either True, False, or null
                    # (None). Null indicates no preference has been set.
                    'should_alert': suite.get('shouldAlert'),
                    'alert_threshold': suite.get('alertThreshold'),
                    'min_back_window': suite.get('minBackWindow'),
                    'max_back_window': suite.get('maxBackWindow'),
                    'fore_window': suite.get('foreWindow'),
                    'last_updated': push_timestamp
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=repository,
                result_set_id=result_set_id,
                job_id=job_id,
                signature=signature,
                push_timestamp=push_timestamp,
                defaults={'value': suite['value']})
            if (signature.should_alert is not False and datum_created and
                (repository.performance_alerts_enabled)):
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')

        for (subtest, subtest_metadata) in zip(sorted(
                suite['subtests'], key=lambda s: s['name']),
                                               subtest_properties):
            # we calculate the subtest signature incorporating
            # the hash of the parent.
            summary_signature = None
            if summary_signature_hash is not None:
                subtest_metadata.update({'parent_signature': summary_signature_hash})
                summary_signature = PerformanceSignature.objects.get(
                    repository=repository,
                    framework=framework,
                    signature_hash=summary_signature_hash)
            subtest_signature_hash = _get_signature_hash(subtest_metadata)
            value = list(subtest['value'] for subtest in suite['subtests'] if
                         subtest['name'] == subtest_metadata['test'])
            signature, _ = PerformanceSignature.objects.update_or_create(
                repository=repository,
                signature_hash=subtest_signature_hash,
                framework=framework,
                defaults={
                    'test': subtest_metadata['test'],
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': platform,
                    'extra_properties': suite_extra_properties,
                    'lower_is_better': subtest_metadata['lowerIsBetter'],
                    'has_subtests': False,
                    # these properties below can be either True, False, or
                    # null (None). Null indicates no preference has been
                    # set.
                    'should_alert': subtest.get('shouldAlert'),
                    'alert_threshold': subtest.get('alertThreshold'),
                    'min_back_window': subtest.get('minBackWindow'),
                    'max_back_window': subtest.get('maxBackWindow'),
                    'fore_window': subtest.get('foreWindow'),
                    'parent_signature': summary_signature,
                    'last_updated': push_timestamp
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=repository,
                result_set_id=result_set_id,
                job_id=job_id,
                signature=signature,
                push_timestamp=push_timestamp,
                defaults={'value': value[0]})

            # by default if there is no summary, we should schedule a
            # generate alerts task for the subtest, since we have new data
            # (this can be over-ridden by the optional "should alert"
            # property)
            if signature.should_alert or (signature.should_alert is None and
                                          (datum_created and
                                           repository.performance_alerts_enabled and
                                           suite.get('value') is None)):
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')
コード例 #5
0
def _load_perf_artifact(project_name, reference_data, job_data, job_guid,
                        perf_datum):
    validate(perf_datum, PERFHERDER_SCHEMA)

    if 'e10s' in reference_data.get('job_group_symbol', ''):
        extra_properties = {'test_options': ['e10s']}
    else:
        extra_properties = {}

    # transform the reference data so it only contains what we actually
    # care about (for calculating the signature hash reproducibly), then
    # get the associated models
    reference_data = _transform_signature_properties(reference_data)
    option_collection = OptionCollection.objects.get(
        option_collection_hash=reference_data['option_collection_hash'])
    # there may be multiple machine platforms with the same platform: use
    # the first
    platform = MachinePlatform.objects.filter(
        platform=reference_data['machine_platform'])[0]
    repository = Repository.objects.get(name=project_name)
    is_try_repository = repository.repository_group.name == 'try'

    # data for performance series
    job_id = job_data[job_guid]['id']
    result_set_id = job_data[job_guid]['result_set_id']
    push_timestamp = datetime.datetime.fromtimestamp(
        job_data[job_guid]['push_timestamp'])

    try:
        framework = PerformanceFramework.objects.get(
            name=perf_datum['framework']['name'])
    except PerformanceFramework.DoesNotExist:
        logger.warning("Performance framework {} does not exist, skipping "
                       "load of performance artifacts".format(
                           perf_datum['framework']['name']))
        return
    for suite in perf_datum['suites']:
        subtest_properties = []
        summary_signature_hash = None
        for subtest in suite['subtests']:
            subtest_metadata = {
                'suite': suite['name'],
                'test': subtest['name'],
                'lowerIsBetter': subtest.get('lowerIsBetter', True)
            }
            subtest_metadata.update(reference_data)
            subtest_properties.append(subtest_metadata)
        subtest_properties.sort(key=lambda s: s['test'])

        # if we have a summary value, create or get its signature by all its subtest
        # properties.
        if suite.get('value') is not None:
            # summary series
            summary_properties = {
                'suite': suite['name'],
                'subtest_properties': subtest_properties
            }
            summary_properties.update(reference_data)
            summary_properties.update(extra_properties)
            summary_signature_hash = _get_signature_hash(summary_properties)

            signature, _ = PerformanceSignature.objects.update_or_create(
                repository=repository,
                signature_hash=summary_signature_hash,
                framework=framework,
                defaults={
                    'test': '',
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': platform,
                    'extra_properties': extra_properties,
                    'lower_is_better': suite.get('lowerIsBetter', True),
                    'has_subtests': True,
                    # these properties below can be either True, False, or null
                    # (None). Null indicates no preference has been set.
                    'should_alert': suite.get('shouldAlert'),
                    'alert_threshold': suite.get('alertThreshold'),
                    'min_back_window': suite.get('minBackWindow'),
                    'max_back_window': suite.get('maxBackWindow'),
                    'fore_window': suite.get('foreWindow'),
                    'last_updated': push_timestamp
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=repository,
                result_set_id=result_set_id,
                job_id=job_id,
                signature=signature,
                push_timestamp=push_timestamp,
                defaults={'value': suite['value']})
            if (signature.should_alert is not False and datum_created
                    and (not is_try_repository)):
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')

        for (subtest, subtest_metadata) in zip(
                sorted(suite['subtests'], key=lambda s: s['name']),
                subtest_properties):
            # we calculate the subtest signature incorporating
            # the hash of the parent.
            summary_signature = None
            if summary_signature_hash is not None:
                subtest_metadata.update(
                    {'parent_signature': summary_signature_hash})
                summary_signature = PerformanceSignature.objects.get(
                    repository=repository,
                    framework=framework,
                    signature_hash=summary_signature_hash)
            subtest_signature_hash = _get_signature_hash(subtest_metadata)
            value = list(subtest['value'] for subtest in suite['subtests']
                         if subtest['name'] == subtest_metadata['test'])
            signature, _ = PerformanceSignature.objects.update_or_create(
                repository=repository,
                signature_hash=subtest_signature_hash,
                framework=framework,
                defaults={
                    'test': subtest_metadata['test'],
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': platform,
                    'extra_properties': extra_properties,
                    'lower_is_better': subtest_metadata['lowerIsBetter'],
                    'has_subtests': False,
                    # these properties below can be either True, False, or
                    # null (None). Null indicates no preference has been
                    # set.
                    'should_alert': subtest.get('shouldAlert'),
                    'alert_threshold': subtest.get('alertThreshold'),
                    'min_back_window': subtest.get('minBackWindow'),
                    'max_back_window': subtest.get('maxBackWindow'),
                    'fore_window': subtest.get('foreWindow'),
                    'parent_signature': summary_signature,
                    'last_updated': push_timestamp
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=repository,
                result_set_id=result_set_id,
                job_id=job_id,
                signature=signature,
                push_timestamp=push_timestamp,
                defaults={'value': value[0]})

            # by default if there is no summary, we should schedule a
            # generate alerts task for the subtest, since we have new data
            # (this can be over-ridden by the optional "should alert"
            # property)
            if signature.should_alert or (signature.should_alert is None and
                                          (datum_created and
                                           (not is_try_repository)
                                           and suite.get('value') is None)):
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')
コード例 #6
0
ファイル: perf.py プロジェクト: anurag619/treeherder
def _load_perf_artifact(project_name, reference_data, job_data, job_guid,
                        perf_datum):
    validate(perf_datum, PERFHERDER_SCHEMA)

    if 'e10s' in reference_data.get('job_group_symbol', ''):
        extra_properties = {'test_options': ['e10s']}
    else:
        extra_properties = {}

    # transform the reference data so it only contains what we actually
    # care about (for calculating the signature hash reproducibly), then
    # get the associated models
    reference_data = _transform_signature_properties(reference_data)
    option_collection = OptionCollection.objects.get(
        option_collection_hash=reference_data['option_collection_hash'])
    # there may be multiple machine platforms with the same platform: use
    # the first
    platform = MachinePlatform.objects.filter(
        platform=reference_data['machine_platform'])[0]
    repository = Repository.objects.get(
        name=project_name)
    is_try_repository = repository.repository_group.name == 'try'

    # data for performance series
    job_id = job_data[job_guid]['id']
    result_set_id = job_data[job_guid]['result_set_id']
    push_timestamp = datetime.datetime.fromtimestamp(
        job_data[job_guid]['push_timestamp'])

    try:
        framework = PerformanceFramework.objects.get(
            name=perf_datum['framework']['name'])
    except PerformanceFramework.DoesNotExist:
        logger.warning("Performance framework {} does not exist, skipping "
                       "load of performance artifacts".format(
                           perf_datum['framework']['name']))
        return
    for suite in perf_datum['suites']:
        subtest_properties = []
        summary_signature_hash = None
        for subtest in suite['subtests']:
            subtest_metadata = {
                'suite': suite['name'],
                'test': subtest['name'],
                'lowerIsBetter': subtest.get('lowerIsBetter', True)
            }
            subtest_metadata.update(reference_data)
            subtest_properties.append(subtest_metadata)
        subtest_properties.sort(key=lambda s: s['test'])

        # if we have a summary value, create or get its signature by all its subtest
        # properties.
        if suite.get('value') is not None:
            # summary series
            summary_properties = {
                'suite': suite['name'],
                'subtest_properties': subtest_properties
            }
            summary_properties.update(reference_data)
            summary_properties.update(extra_properties)
            summary_signature_hash = _get_signature_hash(
                    summary_properties)
            signature, _ = PerformanceSignature.objects.get_or_create(
                repository=repository, signature_hash=summary_signature_hash,
                defaults={
                    'test': '',
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': platform,
                    'framework': framework,
                    'extra_properties': extra_properties,
                    'lower_is_better': suite.get('lowerIsBetter', True),
                    'last_updated': push_timestamp
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=repository,
                result_set_id=result_set_id,
                job_id=job_id,
                signature=signature,
                push_timestamp=push_timestamp,
                defaults={'value': suite['value']})
            if datum_created and (not is_try_repository):
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')

        for subtest_metadata in subtest_properties:
            # we calculate the subtest signature incorporate
            # the hash of the parent.
            summary_signature = None
            if summary_signature_hash is not None:
                subtest_metadata.update({'parent_signature': summary_signature_hash})
                summary_signature = PerformanceSignature.objects.get(
                    repository=repository,
                    signature_hash=summary_signature_hash)
            subtest_signature_hash = _get_signature_hash(subtest_metadata)
            value = list(subtest['value'] for subtest in suite['subtests'] if
                         subtest['name'] == subtest_metadata['test'])
            signature, _ = PerformanceSignature.objects.update_or_create(
                    repository=repository,
                    signature_hash=subtest_signature_hash,
                    defaults={
                        'test': subtest_metadata['test'],
                        'suite': suite['name'],
                        'option_collection': option_collection,
                        'platform': platform,
                        'framework': framework,
                        'extra_properties': extra_properties,
                        'lower_is_better': subtest_metadata['lowerIsBetter'],
                        'parent_signature': summary_signature,
                        'last_updated': push_timestamp
                    })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=repository,
                result_set_id=result_set_id,
                job_id=job_id,
                signature=signature,
                push_timestamp=push_timestamp,
                defaults={'value': value[0]})

            # if there is no summary, we should schedule a generate alerts
            # task for the subtest, since we have new data
            if (datum_created and (not is_try_repository) and
                    suite.get('value') is None):
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')
コード例 #7
0
def load_perf_artifacts(project_name, reference_data, job_data, datum):
    blob = json.loads(datum['blob'])
    perf_datum = blob['performance_data']
    validate(perf_datum, PERFHERDER_SCHEMA)

    if 'e10s' in reference_data.get('job_group_symbol', ''):
        extra_properties = {'test_options': ['e10s']}
    else:
        extra_properties = {}

    # transform the reference data so it only contains what we actually
    # care about (for calculating the signature hash reproducibly), then
    # get the associated models
    reference_data = _transform_signature_properties(reference_data)
    option_collection = OptionCollection.objects.get(
        option_collection_hash=reference_data['option_collection_hash'])
    # there may be multiple machine platforms with the same platform: use
    # the first
    platform = MachinePlatform.objects.filter(
        platform=reference_data['machine_platform'])[0]
    repository = Repository.objects.get(name=project_name)
    is_try_repository = repository.repository_group.name == 'try'

    # data for performance series
    job_guid = datum["job_guid"]
    job_id = job_data[job_guid]['id']
    result_set_id = job_data[job_guid]['result_set_id']
    push_timestamp = datetime.datetime.fromtimestamp(
        job_data[job_guid]['push_timestamp'])

    try:
        framework = PerformanceFramework.objects.get(
            name=perf_datum['framework']['name'])
    except PerformanceFramework.DoesNotExist:
        logger.warning("Performance framework {} does not exist, skipping "
                       "load of performance artifacts".format(
                           perf_datum['framework']['name']))
        return
    for suite in perf_datum['suites']:
        subtest_signatures = []
        for subtest in suite['subtests']:
            subtest_properties = {
                'suite': suite['name'],
                'test': subtest['name']
            }
            subtest_properties.update(reference_data)
            subtest_signature_hash = _get_signature_hash(subtest_properties)
            subtest_signatures.append(subtest_signature_hash)

            signature, _ = PerformanceSignature.objects.update_or_create(
                repository=repository,
                signature_hash=subtest_signature_hash,
                defaults={
                    'test': subtest['name'],
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': platform,
                    'framework': framework,
                    'extra_properties': extra_properties,
                    'lower_is_better': subtest.get('lowerIsBetter', True)
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=repository,
                result_set_id=result_set_id,
                job_id=job_id,
                signature=signature,
                push_timestamp=push_timestamp,
                defaults={'value': subtest['value']})

            # if there is no summary, we should schedule a generate alerts
            # task for the subtest, since we have new data
            if (datum_created and (not is_try_repository)
                    and suite.get('value') is None):
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')

        # if we have a summary value, create or get its signature and insert
        # it too
        if suite.get('value') is not None:
            # summary series
            extra_summary_properties = {
                'subtest_signatures': sorted(subtest_signatures)
            }
            extra_summary_properties.update(extra_properties)
            summary_properties = {'suite': suite['name']}
            summary_properties.update(reference_data)
            summary_properties.update(extra_summary_properties)
            summary_signature_hash = _get_signature_hash(summary_properties)

            signature, _ = PerformanceSignature.objects.get_or_create(
                repository=repository,
                signature_hash=summary_signature_hash,
                defaults={
                    'test': '',
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': platform,
                    'framework': framework,
                    'extra_properties': extra_summary_properties,
                    'last_updated': push_timestamp
                })
            (_, datum_created) = PerformanceDatum.objects.get_or_create(
                repository=repository,
                result_set_id=result_set_id,
                job_id=job_id,
                signature=signature,
                push_timestamp=push_timestamp,
                defaults={'value': suite['value']})
            if datum_created and (not is_try_repository):
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')
コード例 #8
0
ファイル: perf.py プロジェクト: parkouss/treeherder
def load_perf_artifacts(project_name, reference_data, job_data, datum):
    blob = json.loads(datum['blob'])
    perf_datum = blob['performance_data']
    validate(perf_datum, PERFHERDER_SCHEMA)

    if 'e10s' in reference_data.get('job_group_symbol', ''):
        extra_properties = {'test_options': ['e10s']}
    else:
        extra_properties = {}

    # transform the reference data so it only contains what we actually
    # care about (for calculating the signature hash reproducibly), then
    # get the associated models
    reference_data = _transform_signature_properties(reference_data)
    option_collection = OptionCollection.objects.get(
        option_collection_hash=reference_data['option_collection_hash'])
    # there may be multiple machine platforms with the same platform: use
    # the first
    platform = MachinePlatform.objects.filter(
        platform=reference_data['machine_platform'])[0]
    repository = Repository.objects.get(
        name=project_name)
    is_try_repository = repository.repository_group.name == 'try'

    # data for performance series
    job_guid = datum["job_guid"]
    job_id = job_data[job_guid]['id']
    result_set_id = job_data[job_guid]['result_set_id']
    push_timestamp = datetime.datetime.fromtimestamp(
        job_data[job_guid]['push_timestamp'])

    framework = PerformanceFramework.objects.get(name=perf_datum['framework']['name'])
    for suite in perf_datum['suites']:
        subtest_signatures = []
        for subtest in suite['subtests']:
            subtest_properties = {
                'suite': suite['name'],
                'test': subtest['name']
            }
            subtest_properties.update(reference_data)
            subtest_signature_hash = _get_signature_hash(
                subtest_properties)
            subtest_signatures.append(subtest_signature_hash)

            signature, _ = PerformanceSignature.objects.update_or_create(
                repository=repository,
                signature_hash=subtest_signature_hash,
                defaults={
                    'test': subtest['name'],
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': platform,
                    'framework': framework,
                    'extra_properties': extra_properties,
                    'lower_is_better': subtest.get('lowerIsBetter', True)
                })
            PerformanceDatum.objects.get_or_create(
                repository=repository,
                result_set_id=result_set_id,
                job_id=job_id,
                signature=signature,
                push_timestamp=push_timestamp,
                defaults={'value': subtest['value']})

            # if there is no summary, we should schedule a generate alerts
            # task for the subtest, since we have new data
            if not is_try_repository and suite.get('value') is None:
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')

        # if we have a summary value, create or get its signature and insert
        # it too
        if suite.get('value') is not None:
            # summary series
            extra_summary_properties = {
                'subtest_signatures': sorted(subtest_signatures)
            }
            extra_summary_properties.update(extra_properties)
            summary_properties = {'suite': suite['name']}
            summary_properties.update(reference_data)
            summary_properties.update(extra_summary_properties)
            summary_signature_hash = _get_signature_hash(
                summary_properties)

            signature, _ = PerformanceSignature.objects.get_or_create(
                repository=repository, signature_hash=summary_signature_hash,
                defaults={
                    'test': '',
                    'suite': suite['name'],
                    'option_collection': option_collection,
                    'platform': platform,
                    'framework': framework,
                    'extra_properties': extra_summary_properties,
                    'last_updated': push_timestamp
                })
            PerformanceDatum.objects.get_or_create(
                repository=repository,
                result_set_id=result_set_id,
                job_id=job_id,
                signature=signature,
                push_timestamp=push_timestamp,
                defaults={'value': suite['value']})
            if not is_try_repository:
                generate_alerts.apply_async(args=[signature.id],
                                            routing_key='generate_perf_alerts')