def mock_config(): ''' Mock configuration for bot Using try source ''' os.environ['TRY_TASK_ID'] = 'remoteTryTask' os.environ['TRY_TASK_GROUP_ID'] = 'remoteTryGroup' third_party = [ 'test/dummy/', '3rdparty/', ] path = os.path.join(MOCK_DIR, 'config.yaml') responses.add( responses.GET, 'https://hg.mozilla.org/mozilla-central/raw-file/tip/tools/clang-tidy/config.yaml', body=open(path).read(), content_type='text/plain', ) responses.add( responses.GET, 'https://hg.mozilla.org/mozilla-central/raw-file/tip/3rdparty.txt', body='\n'.join(third_party), content_type='text/plain', ) from static_analysis_bot.config import settings settings.config = None settings.setup('test', 'IN_PATCH', ['dom/*', 'tests/*.py', 'test/*.c']) return settings
def build_config(): path = os.path.join(MOCK_DIR, 'config.yaml') responses.add( responses.GET, 'https://hg.mozilla.org/mozilla-central/raw-file/tip/tools/clang-tidy/config.yaml', body=open(path).read(), content_type='text/plain', ) from static_analysis_bot.config import settings settings.config = None settings.setup('test', tempfile.mkdtemp(), 'IN_PATCH', ['dom/*', 'tests/*.py', 'test/*.c']) return settings
def build_config(): path = os.path.join(MOCK_DIR, 'config.yaml') responses.add( responses.GET, 'https://hg.mozilla.org/mozilla-central/raw-file/tip/tools/clang-tidy/config.yaml', body=open(path).read(), content_type='text/plain', ) from static_analysis_bot.config import settings settings.config = None settings.setup('test', tempfile.mkdtemp(), 'IN_PATCH', ['dom/*', 'tests/*.py', 'test/*.c']) return settings
def mock_config(): ''' Mock configuration for bot ''' path = os.path.join(MOCK_DIR, 'config.yaml') responses.add( responses.GET, 'https://hg.mozilla.org/mozilla-central/raw-file/tip/tools/clang-tidy/config.yaml', body=open(path).read(), content_type='text/plain', ) from static_analysis_bot.config import settings tempdir = tempfile.mkdtemp() settings.setup('test', tempdir, 'IN_PATCH') return settings
def main(source, id, cache_root, mozreview_diffset, mozreview_revision, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'ANALYZERS', 'PHABRICATOR', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'ANALYZERS': ['clang-tidy', ], 'PUBLICATION': 'IN_PATCH', }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) # Setup settings before stats settings.setup(secrets['APP_CHANNEL'], cache_root, secrets['PUBLICATION']) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load index service index_service = get_service( 'index', taskcluster_client_id, taskcluster_access_token, ) # Load Phabricator API phabricator_api = PhabricatorAPI(**secrets['PHABRICATOR']) if 'phabricator' in reporters: reporters['phabricator'].setup_api(phabricator_api) # Load unique revision if source == 'phabricator': revision = PhabricatorRevision(id, phabricator_api) elif source == 'mozreview': revision = MozReviewRevision(id, mozreview_revision, mozreview_diffset) else: raise Exception('Unsupported analysis source: {}'.format(source)) w = Workflow(reporters, secrets['ANALYZERS'], index_service, phabricator_api) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, ) # Index analysis state w.index(revision, state='error') # Then raise to mark task as erroneous raise
def main(taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'PHABRICATOR', 'ALLOWED_PATHS', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'PUBLICATION': 'IN_PATCH', 'ALLOWED_PATHS': ['*', ], }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), timestamp=True, ) # Setup settings before stats phabricator = secrets['PHABRICATOR'] settings.setup( secrets['APP_CHANNEL'], secrets['PUBLICATION'], secrets['ALLOWED_PATHS'], ) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load index service index_service = get_service( 'index', taskcluster_client_id, taskcluster_access_token, ) # Load queue service queue_service = get_service( 'queue', taskcluster_client_id, taskcluster_access_token, ) # Load Phabricator API phabricator_reporting_enabled = 'phabricator' in reporters phabricator_api = PhabricatorAPI(phabricator['api_key'], phabricator['url']) if phabricator_reporting_enabled: reporters['phabricator'].setup_api(phabricator_api) # Load unique revision revision = Revision( phabricator_api, try_task=queue_service.task(settings.try_task_id), # Update build status only when phabricator reporting is enabled update_build=phabricator_reporting_enabled, ) # Run workflow according to source w = Workflow(reporters, index_service, queue_service, phabricator_api) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, ) # Index analysis state extras = {} if isinstance(e, AnalysisException): extras['error_code'] = e.code extras['error_message'] = str(e) w.index(revision, state='error', **extras) # Update Harbormaster status revision.update_status(state=BuildState.Fail) # Then raise to mark task as erroneous raise
def main( id, work_dir, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'ANALYZERS', 'PHABRICATOR', 'ALLOWED_PATHS', 'MAX_CLONE_RUNTIME', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'ANALYZERS': [ 'clang-tidy', ], 'PUBLICATION': 'IN_PATCH', 'ALLOWED_PATHS': [ '*', ], 'MAX_CLONE_RUNTIME': 15 * 60, }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), timestamp=True, ) # Setup settings before stats phabricator = secrets['PHABRICATOR'] settings.setup( secrets['APP_CHANNEL'], work_dir, secrets['PUBLICATION'], secrets['ALLOWED_PATHS'], secrets.get('COVERITY_CONFIG'), secrets['MAX_CLONE_RUNTIME'], phabricator.get('build_plan'), ) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load index service index_service = get_service( 'index', taskcluster_client_id, taskcluster_access_token, ) # Load queue service queue_service = get_service( 'queue', taskcluster_client_id, taskcluster_access_token, ) # Load Phabricator API phabricator_api = PhabricatorAPI(phabricator['api_key'], phabricator['url']) if 'phabricator' in reporters: reporters['phabricator'].setup_api(phabricator_api) # Load unique revision if settings.source == SOURCE_PHABRICATOR: revision = PhabricatorRevision(phabricator_api, diff_phid=id) elif settings.source == SOURCE_TRY: revision = PhabricatorRevision(phabricator_api, try_task=queue_service.task( settings.try_task_id)) else: raise Exception('Unsupported source {}'.format(settings.source)) # Run workflow according to source w = Workflow(reporters, secrets['ANALYZERS'], index_service, queue_service, phabricator_api) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, ) # Index analysis state extras = {} if isinstance(e, AnalysisException): extras['error_code'] = e.code extras['error_message'] = str(e) w.index(revision, state='error', **extras) # Update Harbormaster status revision.update_status(state=BuildState.Fail) # Then raise to mark task as erroneous raise