def main(taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'PULSE_USER', 'PULSE_PASSWORD', 'PULSE_LISTENER_HOOKS', ), existing=dict( PULSE_LISTENER_HOOKS=[], ), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) pl = PulseListener(secrets['PULSE_USER'], secrets['PULSE_PASSWORD'], secrets['PULSE_LISTENER_HOOKS'], taskcluster_client_id, taskcluster_access_token, ) click.echo('Listening to pulse messages...') pl.run()
def main(commits, cache_root, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=('STATIC_ANALYSIS_NOTIFICATIONS', ), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) w = Workflow(cache_root, secrets['STATIC_ANALYSIS_NOTIFICATIONS'], taskcluster_client_id, taskcluster_access_token, ) for commit in REGEX_COMMIT.findall(commits): w.run(*commit)
def main(revision, cache_root, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( config.COVERALLS_TOKEN_FIELD, config.CODECOV_TOKEN_FIELD, ), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) c = CodeCov(revision, cache_root, secrets[config.COVERALLS_TOKEN_FIELD], secrets[config.CODECOV_TOKEN_FIELD], secrets.get(config.GECKO_DEV_USER_FIELD), secrets.get(config.GECKO_DEV_PWD_FIELD), taskcluster_client_id, taskcluster_access_token, ) c.go()
def main(bugzilla_id, cache_root, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): ''' Run bot to sync bug & analysis on a remote server ''' # load secrets secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'BUGZILLA_URL', 'BUGZILLA_TOKEN', 'BUGZILLA_READ_ONLY', 'BUGZILLA_COMMENT_ONLY', 'API_URL', 'APP_CHANNEL', 'UPLIFT_NOTIFICATIONS', ), existing=dict( APP_CHANNEL='development', BUGZILLA_READ_ONLY=True, BUGZILLA_COMMENT_ONLY=False, UPLIFT_NOTIFICATIONS=['*****@*****.**'], ), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) # setup logging init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) # Setup credentials for Shipit api api_client.setup( secrets['API_URL'], secrets.get('TASKCLUSTER_CLIENT_ID', taskcluster_client_id), secrets.get('TASKCLUSTER_ACCESS_TOKEN', taskcluster_access_token), ) bot = Bot(secrets['APP_CHANNEL'], secrets['UPLIFT_NOTIFICATIONS']) bot.use_bugzilla( secrets['BUGZILLA_URL'], secrets['BUGZILLA_TOKEN'], secrets['BUGZILLA_READ_ONLY'], secrets['BUGZILLA_COMMENT_ONLY'], ) bot.use_cache(cache_root) if bugzilla_id: bot.run(only=[bugzilla_id, ]) else: bot.run()
def main( bugzilla_id, cache_root, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): ''' Run bot to sync bug & analysis on a remote server ''' # load secrets secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( 'BUGZILLA_URL', 'BUGZILLA_TOKEN', 'API_URL', 'APP_CHANNEL', 'UPLIFT_NOTIFICATIONS', ), existing=dict( APP_CHANNEL='development', UPLIFT_NOTIFICATIONS=['*****@*****.**'], ), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) # setup logging init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) # Setup credentials for Shipit api api_client.setup( secrets['API_URL'], secrets.get('TASKCLUSTER_CLIENT_ID', taskcluster_client_id), secrets.get('TASKCLUSTER_ACCESS_TOKEN', taskcluster_access_token), ) bot = Bot(secrets['APP_CHANNEL'], secrets['UPLIFT_NOTIFICATIONS']) bot.use_bugzilla( secrets['BUGZILLA_URL'], secrets['BUGZILLA_TOKEN'], ) bot.use_cache(cache_root) if bugzilla_id: bot.run(only=[ bugzilla_id, ]) else: bot.run()
def main( taskcluster_secret, taskcluster_client_id, taskcluster_access_token, cache_root, phab_revision, ): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( 'PULSE_USER', 'PULSE_PASSWORD', 'HOOKS', 'ADMINS', 'PHABRICATOR', 'MERCURIAL_REMOTE', ), existing=dict( HOOKS=[], ADMINS=['*****@*****.**', '*****@*****.**']), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) task_monitoring.emails = secrets['ADMINS'] phabricator = PhabricatorAPI( api_key=secrets['PHABRICATOR']['token'], url=secrets['PHABRICATOR']['url'], ) pl = PulseListener( secrets['PULSE_USER'], secrets['PULSE_PASSWORD'], secrets['HOOKS'], secrets['MERCURIAL_REMOTE'], phabricator, cache_root, taskcluster_client_id, taskcluster_access_token, ) click.echo('Listening to pulse messages...') if phab_revision: pl.add_revision(phab_revision) pl.run()
def main(taskcluster_secret, taskcluster_client_id, taskcluster_access_token, cache_root, phab_revision, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'PULSE_USER', 'PULSE_PASSWORD', 'HOOKS', 'ADMINS', 'PHABRICATOR', 'MERCURIAL', ), existing=dict( HOOKS=[], ADMINS=['*****@*****.**', '*****@*****.**'], MERCURIAL={ 'enabled': False, }, ), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) task_monitoring.emails = secrets['ADMINS'] phabricator = PhabricatorAPI( api_key=secrets['PHABRICATOR']['token'], url=secrets['PHABRICATOR']['url'], ) pl = PulseListener(secrets['PULSE_USER'], secrets['PULSE_PASSWORD'], secrets['HOOKS'], secrets['MERCURIAL'], phabricator, cache_root, taskcluster_client_id, taskcluster_access_token, ) click.echo('Listening to pulse messages...') if phab_revision: pl.add_revision(phab_revision) pl.run()
def load(self, taskcluster_secret, taskcluster_client_id, taskcluster_access_token): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=(), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) self.update(secrets)
def load(self, taskcluster_secret, taskcluster_client_id, taskcluster_access_token): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( Secrets.APP_CHANNEL, Secrets.BUGZILLA_TOKEN, ), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) self.update(secrets)
def load(self, taskcluster_secret, taskcluster_client_id, taskcluster_access_token): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( Secrets.COVERALLS_TOKEN, Secrets.CODECOV_TOKEN, Secrets.CODECOV_ACCESS_TOKEN, ), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) self.update(secrets)
def main(commits, cache_root, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'STATIC_ANALYSIS_NOTIFICATIONS', 'MOZREVIEW_URL', 'MOZREVIEW_USER', 'MOZREVIEW_API_KEY', ), existing={ 'MOZREVIEW_ENABLED': False, }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) mozreview = build_api_root( secrets['MOZREVIEW_URL'], secrets['MOZREVIEW_USER'], secrets['MOZREVIEW_API_KEY'], ) with LockDir(cache_root, 'shipit-sa-') as work_dir: w = Workflow(work_dir, secrets['STATIC_ANALYSIS_NOTIFICATIONS'], mozreview, secrets['MOZREVIEW_ENABLED'], taskcluster_client_id, taskcluster_access_token, ) for commit in REGEX_COMMIT.findall(commits): w.run(*commit)
def load(self, taskcluster_secret, taskcluster_client_id, taskcluster_access_token): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( Secrets.APP_CHANNEL, Secrets.BACKEND_HOST, Secrets.COVERALLS_TOKEN, Secrets.CODECOV_REPO, Secrets.CODECOV_TOKEN, Secrets.CODECOV_ACCESS_TOKEN, Secrets.PHABRICATOR_URL, Secrets.PHABRICATOR_TOKEN, ), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) self.update(secrets)
def main( taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( 'PULSE_USER', 'PULSE_PASSWORD', 'HOOKS', 'ADMINS', ), existing=dict( HOOKS=[], ADMINS=['*****@*****.**', '*****@*****.**']), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) task_monitoring.emails = secrets['ADMINS'] pl = PulseListener( secrets['PULSE_USER'], secrets['PULSE_PASSWORD'], secrets['HOOKS'], taskcluster_client_id, taskcluster_access_token, ) click.echo('Listening to pulse messages...') pl.run()
def main(work_dir, revisions, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) bot = Bot(work_dir) for revision in revisions.split(' '): bot.run(revision)
def load(self, taskcluster_secret, taskcluster_client_id, taskcluster_access_token): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( Secrets.APP_CHANNEL, Secrets.BACKEND_HOST, Secrets.COVERALLS_TOKEN, Secrets.CODECOV_REPO, Secrets.CODECOV_TOKEN, Secrets.CODECOV_ACCESS_TOKEN, Secrets.PHABRICATOR_ENABLED, Secrets.PHABRICATOR_URL, Secrets.PHABRICATOR_TOKEN, ), existing={ Secrets.REPO_MAPPER_EMAIL_ADDRESSES: [] }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) self.update(secrets)
def load(self, taskcluster_secret, taskcluster_client_id, taskcluster_access_token): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( Secrets.APP_CHANNEL, Secrets.BACKEND_HOST, Secrets.COVERALLS_TOKEN, Secrets.CODECOV_REPO, Secrets.CODECOV_TOKEN, Secrets.CODECOV_ACCESS_TOKEN, Secrets.GOOGLE_CLOUD_STORAGE, Secrets.PHABRICATOR_ENABLED, Secrets.PHABRICATOR_URL, Secrets.PHABRICATOR_TOKEN, ), existing={Secrets.REPO_MAPPER_EMAIL_ADDRESSES: []}, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) self.update(secrets)
def main( phabricator, mozreview, cache_root, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): assert (phabricator is None) ^ (mozreview is None), \ 'Specify a phabricator XOR mozreview parameters' secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'ANALYZERS', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'ANALYZERS': [ 'clang-tidy', ], }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) # Setup settings before stats settings.setup(secrets['APP_CHANNEL']) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load revisions revisions = [] if phabricator: # Only one phabricator revision at a time api = reporters.get('phabricator') assert api is not None, \ 'Cannot use a phabricator revision without a phabricator reporter' revisions.append(PhabricatorRevision(phabricator, api)) if mozreview: # Multiple mozreview revisions are possible revisions += [MozReviewRevision(r) for r in mozreview.split(' ')] w = Workflow(cache_root, reporters, secrets['ANALYZERS']) for revision in revisions: try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, )
def main( id, work_dir, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'ANALYZERS', 'PHABRICATOR', 'ALLOWED_PATHS', 'MAX_CLONE_RUNTIME', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'ANALYZERS': [ 'clang-tidy', ], 'PUBLICATION': 'IN_PATCH', 'ALLOWED_PATHS': [ '*', ], 'MAX_CLONE_RUNTIME': 15 * 60, }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), timestamp=True, ) # Setup settings before stats phabricator = secrets['PHABRICATOR'] settings.setup( secrets['APP_CHANNEL'], work_dir, secrets['PUBLICATION'], secrets['ALLOWED_PATHS'], secrets.get('COVERITY_CONFIG'), secrets['MAX_CLONE_RUNTIME'], phabricator.get('build_plan'), ) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load index service index_service = get_service( 'index', taskcluster_client_id, taskcluster_access_token, ) # Load queue service queue_service = get_service( 'queue', taskcluster_client_id, taskcluster_access_token, ) # Load Phabricator API phabricator_api = PhabricatorAPI(phabricator['api_key'], phabricator['url']) if 'phabricator' in reporters: reporters['phabricator'].setup_api(phabricator_api) # Load unique revision if settings.source == SOURCE_PHABRICATOR: revision = PhabricatorRevision(phabricator_api, diff_phid=id) elif settings.source == SOURCE_TRY: revision = PhabricatorRevision(phabricator_api, try_task=queue_service.task( settings.try_task_id)) else: raise Exception('Unsupported source {}'.format(settings.source)) # Run workflow according to source w = Workflow(reporters, secrets['ANALYZERS'], index_service, queue_service, phabricator_api) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, ) # Index analysis state extras = {} if isinstance(e, AnalysisException): extras['error_code'] = e.code extras['error_message'] = str(e) w.index(revision, state='error', **extras) # Update Harbormaster status revision.update_status(state=BuildState.Fail) # Then raise to mark task as erroneous raise
def main(taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'PHABRICATOR', 'ALLOWED_PATHS', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'PUBLICATION': 'IN_PATCH', 'ALLOWED_PATHS': ['*', ], }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), timestamp=True, ) # Setup settings before stats phabricator = secrets['PHABRICATOR'] settings.setup( secrets['APP_CHANNEL'], secrets['PUBLICATION'], secrets['ALLOWED_PATHS'], ) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load index service index_service = get_service( 'index', taskcluster_client_id, taskcluster_access_token, ) # Load queue service queue_service = get_service( 'queue', taskcluster_client_id, taskcluster_access_token, ) # Load Phabricator API phabricator_reporting_enabled = 'phabricator' in reporters phabricator_api = PhabricatorAPI(phabricator['api_key'], phabricator['url']) if phabricator_reporting_enabled: reporters['phabricator'].setup_api(phabricator_api) # Load unique revision revision = Revision( phabricator_api, try_task=queue_service.task(settings.try_task_id), # Update build status only when phabricator reporting is enabled update_build=phabricator_reporting_enabled, ) # Run workflow according to source w = Workflow(reporters, index_service, queue_service, phabricator_api) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, ) # Index analysis state extras = {} if isinstance(e, AnalysisException): extras['error_code'] = e.code extras['error_message'] = str(e) w.index(revision, state='error', **extras) # Update Harbormaster status revision.update_status(state=BuildState.Fail) # Then raise to mark task as erroneous raise
def main( commits, cache_root, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'STATIC_ANALYSIS_NOTIFICATIONS', 'MOZREVIEW_URL', 'MOZREVIEW_USER', 'MOZREVIEW_API_KEY', ), existing={ 'APP_CHANNEL': 'development', 'MOZREVIEW_ENABLED': False, 'MOZREVIEW_PUBLISH_SUCCESS': False, }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) mozreview = build_api_root( secrets['MOZREVIEW_URL'], secrets['MOZREVIEW_USER'], secrets['MOZREVIEW_API_KEY'], ) with LockDir(cache_root, 'shipit-sa-') as work_dir: w = Workflow( work_dir, secrets['STATIC_ANALYSIS_NOTIFICATIONS'], secrets['APP_CHANNEL'], mozreview, secrets['MOZREVIEW_ENABLED'], secrets['MOZREVIEW_PUBLISH_SUCCESS'], taskcluster_client_id, taskcluster_access_token, ) for commit in REGEX_COMMIT.findall(commits): try: w.run(*commit) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', commit=commit, error=e, )
def main(source, id, cache_root, mozreview_diffset, mozreview_revision, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'ANALYZERS', 'PHABRICATOR', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'ANALYZERS': ['clang-tidy', ], 'PUBLICATION': 'IN_PATCH', }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) # Setup settings before stats settings.setup(secrets['APP_CHANNEL'], cache_root, secrets['PUBLICATION']) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load index service index_service = get_service( 'index', taskcluster_client_id, taskcluster_access_token, ) # Load Phabricator API phabricator_api = PhabricatorAPI(**secrets['PHABRICATOR']) if 'phabricator' in reporters: reporters['phabricator'].setup_api(phabricator_api) # Load unique revision if source == 'phabricator': revision = PhabricatorRevision(id, phabricator_api) elif source == 'mozreview': revision = MozReviewRevision(id, mozreview_revision, mozreview_diffset) else: raise Exception('Unsupported analysis source: {}'.format(source)) w = Workflow(reporters, secrets['ANALYZERS'], index_service, phabricator_api) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, ) # Index analysis state w.index(revision, state='error') # Then raise to mark task as erroneous raise