コード例 #1
0
ファイル: conftest.py プロジェクト: sylvestre/services
def mock_stats(mock_config):
    '''
    Mock Datadog authentication and stats management
    '''
    from static_analysis_bot import stats

    # Configure Datadog with a dummy token
    # and an ultra fast flushing cycle
    stats.auth('test_token')
    stats.api.stop()
    stats.api.start(flush_interval=0.001)
    assert not stats.api._disabled
    assert stats.api._is_auto_flushing

    class MemoryReporter(object):
        '''
        A reporting class that reports to memory for testing.
        Used in datadog unit tests:
        https://github.com/DataDog/datadogpy/blob/master/tests/unit/threadstats/test_threadstats.py
        '''
        def __init__(self, api):
            self.metrics = []
            self.events = []
            self.api = api

        def flush_metrics(self, metrics):
            self.metrics += metrics

        def flush_events(self, events):
            self.events += events

        def flush(self):
            # Helper for unit tests to force flush
            self.api.flush(time.time() + 20)

        def get_metrics(self, metric_name):
            return list(itertools.chain(*[
                [
                    [t, point * m['interval']]
                    for t, point in m['points']
                ]
                for m in self.metrics
                if m['metric'] == metric_name
            ]))

    # Gives reporter access to unit tests to access metrics
    stats.api.reporter = MemoryReporter(stats.api)
    yield stats.api.reporter
コード例 #2
0
ファイル: conftest.py プロジェクト: La0/mozilla-relengapi
def mock_stats(mock_config):
    '''
    Mock Datadog authentication and stats management
    '''
    from static_analysis_bot import stats

    # Configure Datadog with a dummy token
    # and an ultra fast flushing cycle
    stats.auth('test_token')
    stats.api.stop()
    stats.api.start(flush_interval=0.001)
    assert not stats.api._disabled
    assert stats.api._is_auto_flushing

    class MemoryReporter(object):
        '''
        A reporting class that reports to memory for testing.
        Used in datadog unit tests:
        https://github.com/DataDog/datadogpy/blob/master/tests/unit/threadstats/test_threadstats.py
        '''
        def __init__(self, api):
            self.metrics = []
            self.events = []
            self.api = api

        def flush_metrics(self, metrics):
            self.metrics += metrics

        def flush_events(self, events):
            self.events += events

        def flush(self):
            # Helper for unit tests to force flush
            self.api.flush(time.time() + 20)

        def get_metrics(self, metric_name):
            return list(itertools.chain(*[
                [
                    [t, point * m['interval']]
                    for t, point in m['points']
                ]
                for m in self.metrics
                if m['metric'] == metric_name
            ]))

    # Gives reporter access to unit tests to access metrics
    stats.api.reporter = MemoryReporter(stats.api)
    yield stats.api.reporter
コード例 #3
0
def main(source,
         id,
         cache_root,
         mozreview_diffset,
         mozreview_revision,
         taskcluster_secret,
         taskcluster_client_id,
         taskcluster_access_token,
         ):

    secrets = get_secrets(taskcluster_secret,
                          config.PROJECT_NAME,
                          required=(
                              'APP_CHANNEL',
                              'REPORTERS',
                              'ANALYZERS',
                              'PHABRICATOR',
                          ),
                          existing={
                              'APP_CHANNEL': 'development',
                              'REPORTERS': [],
                              'ANALYZERS': ['clang-tidy', ],
                              'PUBLICATION': 'IN_PATCH',
                          },
                          taskcluster_client_id=taskcluster_client_id,
                          taskcluster_access_token=taskcluster_access_token,
                          )

    init_logger(config.PROJECT_NAME,
                PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'),
                PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'),
                SENTRY_DSN=secrets.get('SENTRY_DSN'),
                MOZDEF=secrets.get('MOZDEF'),
                )

    # Setup settings before stats
    settings.setup(secrets['APP_CHANNEL'], cache_root, secrets['PUBLICATION'])

    # Setup statistics
    datadog_api_key = secrets.get('DATADOG_API_KEY')
    if datadog_api_key:
        stats.auth(datadog_api_key)

    # Load reporters
    reporters = get_reporters(
        secrets['REPORTERS'],
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load index service
    index_service = get_service(
        'index',
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load Phabricator API
    phabricator_api = PhabricatorAPI(**secrets['PHABRICATOR'])
    if 'phabricator' in reporters:
        reporters['phabricator'].setup_api(phabricator_api)

    # Load unique revision
    if source == 'phabricator':
        revision = PhabricatorRevision(id, phabricator_api)

    elif source == 'mozreview':
        revision = MozReviewRevision(id, mozreview_revision, mozreview_diffset)

    else:
        raise Exception('Unsupported analysis source: {}'.format(source))

    w = Workflow(reporters, secrets['ANALYZERS'], index_service, phabricator_api)
    try:
        w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error(
            'Static analysis failure',
            revision=revision,
            error=e,
        )

        # Index analysis state
        w.index(revision, state='error')

        # Then raise to mark task as erroneous
        raise
コード例 #4
0
def main(taskcluster_secret,
         taskcluster_client_id,
         taskcluster_access_token,
         ):

    secrets = get_secrets(taskcluster_secret,
                          config.PROJECT_NAME,
                          required=(
                              'APP_CHANNEL',
                              'REPORTERS',
                              'PHABRICATOR',
                              'ALLOWED_PATHS',
                          ),
                          existing={
                              'APP_CHANNEL': 'development',
                              'REPORTERS': [],
                              'PUBLICATION': 'IN_PATCH',
                              'ALLOWED_PATHS': ['*', ],
                          },
                          taskcluster_client_id=taskcluster_client_id,
                          taskcluster_access_token=taskcluster_access_token,
                          )

    init_logger(config.PROJECT_NAME,
                PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'),
                PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'),
                SENTRY_DSN=secrets.get('SENTRY_DSN'),
                MOZDEF=secrets.get('MOZDEF'),
                timestamp=True,
                )

    # Setup settings before stats
    phabricator = secrets['PHABRICATOR']
    settings.setup(
        secrets['APP_CHANNEL'],
        secrets['PUBLICATION'],
        secrets['ALLOWED_PATHS'],
    )
    # Setup statistics
    datadog_api_key = secrets.get('DATADOG_API_KEY')
    if datadog_api_key:
        stats.auth(datadog_api_key)

    # Load reporters
    reporters = get_reporters(
        secrets['REPORTERS'],
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load index service
    index_service = get_service(
        'index',
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load queue service
    queue_service = get_service(
        'queue',
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load Phabricator API
    phabricator_reporting_enabled = 'phabricator' in reporters
    phabricator_api = PhabricatorAPI(phabricator['api_key'], phabricator['url'])
    if phabricator_reporting_enabled:
        reporters['phabricator'].setup_api(phabricator_api)

    # Load unique revision
    revision = Revision(
        phabricator_api,
        try_task=queue_service.task(settings.try_task_id),

        # Update build status only when phabricator reporting is enabled
        update_build=phabricator_reporting_enabled,
    )

    # Run workflow according to source
    w = Workflow(reporters, index_service, queue_service, phabricator_api)
    try:
        w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error(
            'Static analysis failure',
            revision=revision,
            error=e,
        )

        # Index analysis state
        extras = {}
        if isinstance(e, AnalysisException):
            extras['error_code'] = e.code
            extras['error_message'] = str(e)
        w.index(revision, state='error', **extras)

        # Update Harbormaster status
        revision.update_status(state=BuildState.Fail)

        # Then raise to mark task as erroneous
        raise
def main(
    id,
    work_dir,
    taskcluster_secret,
    taskcluster_client_id,
    taskcluster_access_token,
):

    secrets = get_secrets(
        taskcluster_secret,
        config.PROJECT_NAME,
        required=(
            'APP_CHANNEL',
            'REPORTERS',
            'ANALYZERS',
            'PHABRICATOR',
            'ALLOWED_PATHS',
            'MAX_CLONE_RUNTIME',
        ),
        existing={
            'APP_CHANNEL': 'development',
            'REPORTERS': [],
            'ANALYZERS': [
                'clang-tidy',
            ],
            'PUBLICATION': 'IN_PATCH',
            'ALLOWED_PATHS': [
                '*',
            ],
            'MAX_CLONE_RUNTIME': 15 * 60,
        },
        taskcluster_client_id=taskcluster_client_id,
        taskcluster_access_token=taskcluster_access_token,
    )

    init_logger(
        config.PROJECT_NAME,
        PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'),
        PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'),
        SENTRY_DSN=secrets.get('SENTRY_DSN'),
        MOZDEF=secrets.get('MOZDEF'),
        timestamp=True,
    )

    # Setup settings before stats
    phabricator = secrets['PHABRICATOR']
    settings.setup(
        secrets['APP_CHANNEL'],
        work_dir,
        secrets['PUBLICATION'],
        secrets['ALLOWED_PATHS'],
        secrets.get('COVERITY_CONFIG'),
        secrets['MAX_CLONE_RUNTIME'],
        phabricator.get('build_plan'),
    )
    # Setup statistics
    datadog_api_key = secrets.get('DATADOG_API_KEY')
    if datadog_api_key:
        stats.auth(datadog_api_key)

    # Load reporters
    reporters = get_reporters(
        secrets['REPORTERS'],
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load index service
    index_service = get_service(
        'index',
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load queue service
    queue_service = get_service(
        'queue',
        taskcluster_client_id,
        taskcluster_access_token,
    )

    # Load Phabricator API
    phabricator_api = PhabricatorAPI(phabricator['api_key'],
                                     phabricator['url'])
    if 'phabricator' in reporters:
        reporters['phabricator'].setup_api(phabricator_api)

    # Load unique revision
    if settings.source == SOURCE_PHABRICATOR:
        revision = PhabricatorRevision(phabricator_api, diff_phid=id)
    elif settings.source == SOURCE_TRY:
        revision = PhabricatorRevision(phabricator_api,
                                       try_task=queue_service.task(
                                           settings.try_task_id))
    else:
        raise Exception('Unsupported source {}'.format(settings.source))

    # Run workflow according to source
    w = Workflow(reporters, secrets['ANALYZERS'], index_service, queue_service,
                 phabricator_api)
    try:
        w.run(revision)
    except Exception as e:
        # Log errors to papertrail
        logger.error(
            'Static analysis failure',
            revision=revision,
            error=e,
        )

        # Index analysis state
        extras = {}
        if isinstance(e, AnalysisException):
            extras['error_code'] = e.code
            extras['error_message'] = str(e)
        w.index(revision, state='error', **extras)

        # Update Harbormaster status
        revision.update_status(state=BuildState.Fail)

        # Then raise to mark task as erroneous
        raise