def __init__(self, configuration): assert 'hookId' in configuration super().__init__( 'project-releng', configuration['hookId'], ) # Connect to Phabricator API assert 'phabricator_url' in configuration assert 'phabricator_token' in configuration self.api = PhabricatorAPI( api_key=configuration['phabricator_token'], url=configuration['phabricator_url'], ) # List enabled repositories enabled = configuration.get('repositories', [ 'mozilla-central', ]) self.repos = { r['phid']: r for r in self.api.list_repositories() if r['fields']['name'] in enabled } assert len(self.repos) > 0, 'No repositories enabled' logger.info('Enabled Phabricator repositories', repos=[r['fields']['name'] for r in self.repos.values()]) # Start by getting top id diffs = self.api.search_diffs(limit=1) assert len(diffs) == 1 self.latest_id = diffs[0]['id']
def phabricator_base_revision_from_phid(revision_phid): try: phabricator = PhabricatorAPI(secrets.PHABRICATOR_TOKEN) diffs = phabricator.search_diffs(revision_phid=revision_phid) if len(diffs) > 0: revision = diffs[-1]['baseRevision'] if revision and revision_exists_on_central(revision): return {'revision': revision}, 200 return {'error': 'Base revision not found.'}, 404 except Exception as e: return { 'error': str(e), 'error_code': getattr(e, 'error_code', 'unknown') }, 500
def main( taskcluster_secret, taskcluster_client_id, taskcluster_access_token, cache_root, phab_revision, ): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( 'PULSE_USER', 'PULSE_PASSWORD', 'HOOKS', 'ADMINS', 'PHABRICATOR', 'MERCURIAL_REMOTE', ), existing=dict( HOOKS=[], ADMINS=['*****@*****.**', '*****@*****.**']), taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) task_monitoring.emails = secrets['ADMINS'] phabricator = PhabricatorAPI( api_key=secrets['PHABRICATOR']['token'], url=secrets['PHABRICATOR']['url'], ) pl = PulseListener( secrets['PULSE_USER'], secrets['PULSE_PASSWORD'], secrets['HOOKS'], secrets['MERCURIAL_REMOTE'], phabricator, cache_root, taskcluster_client_id, taskcluster_access_token, ) click.echo('Listening to pulse messages...') if phab_revision: pl.add_revision(phab_revision) pl.run()
def mock_phabricator(): ''' Mock phabricator authentication process ''' def _response(name): path = os.path.join(MOCK_DIR, 'phabricator_{}.json'.format(name)) assert os.path.exists(path) return open(path).read() responses.add( responses.POST, 'http://phabricator.test/api/user.whoami', body=_response('auth'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.diff.search', body=_response('diff_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.revision.search', body=_response('revision_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.query', body=_response('diff_query'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.getrawdiff', body=_response('diff_raw'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/edge.search', body=_response('edge_search'), content_type='application/json', ) yield PhabricatorAPI( url='http://phabricator.test/api/', api_key='deadbeef', )
def upload(self, report, changesets=None): results = self.generate(report, changesets) if secrets[secrets.PHABRICATOR_ENABLED]: phabricator = PhabricatorAPI(secrets[secrets.PHABRICATOR_TOKEN], secrets[secrets.PHABRICATOR_URL]) else: phabricator = None for rev_id, coverage in results.items(): logger.info('{} coverage: {}'.format(rev_id, coverage)) if not phabricator or not coverage: continue rev_data = phabricator.load_revision(rev_id=rev_id) phabricator.upload_coverage_results(rev_data['fields']['diffPHID'], coverage) # XXX: This is only necessary until https://bugzilla.mozilla.org/show_bug.cgi?id=1487843 is resolved. phabricator.upload_lint_results(rev_data['fields']['diffPHID'], 'pass', [])
def upload(self, report, changesets=None): results = self.generate(report, changesets) if secrets[secrets.PHABRICATOR_ENABLED]: phabricator = PhabricatorAPI(secrets[secrets.PHABRICATOR_TOKEN], secrets[secrets.PHABRICATOR_URL]) else: phabricator = None for rev_id, coverage in results.items(): logger.info('{} coverage: {}'.format(rev_id, coverage)) if not phabricator or not coverage: continue try: rev_data = phabricator.load_revision(rev_id=rev_id) phabricator.upload_coverage_results(rev_data['fields']['diffPHID'], coverage) # XXX: This is only necessary until https://bugzilla.mozilla.org/show_bug.cgi?id=1487843 is resolved. phabricator.upload_lint_results(rev_data['fields']['diffPHID'], BuildState.Pass, []) except PhabricatorRevisionNotFoundException: logger.warn('Phabricator revision not found', rev_id=rev_id)
def main(source, id, cache_root, mozreview_diffset, mozreview_revision, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'ANALYZERS', 'PHABRICATOR', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'ANALYZERS': ['clang-tidy', ], 'PUBLICATION': 'IN_PATCH', }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), ) # Setup settings before stats settings.setup(secrets['APP_CHANNEL'], cache_root, secrets['PUBLICATION']) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load index service index_service = get_service( 'index', taskcluster_client_id, taskcluster_access_token, ) # Load Phabricator API phabricator_api = PhabricatorAPI(**secrets['PHABRICATOR']) if 'phabricator' in reporters: reporters['phabricator'].setup_api(phabricator_api) # Load unique revision if source == 'phabricator': revision = PhabricatorRevision(id, phabricator_api) elif source == 'mozreview': revision = MozReviewRevision(id, mozreview_revision, mozreview_diffset) else: raise Exception('Unsupported analysis source: {}'.format(source)) w = Workflow(reporters, secrets['ANALYZERS'], index_service, phabricator_api) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, ) # Index analysis state w.index(revision, state='error') # Then raise to mark task as erroneous raise
def PhabricatorMock(): ''' Mock phabricator authentication process ''' json_headers = { 'Content-Type': 'application/json', } def _response(name): path = os.path.join(MOCK_DIR, 'phabricator', '{}.json'.format(name)) assert os.path.exists(path), 'Missing mock {}'.format(path) return open(path).read() def _phab_params(request): # What a weird way to send parameters return json.loads(urllib.parse.parse_qs(request.body)['params'][0]) def _diff_search(request): params = _phab_params(request) assert 'constraints' in params if 'revisionPHIDs' in params['constraints']: # Search from revision mock_name = 'search-{}'.format(params['constraints']['revisionPHIDs'][0]) elif 'phids' in params['constraints']: # Search from diffs diffs = '-'.join(params['constraints']['phids']) mock_name = 'search-{}'.format(diffs) else: raise Exception('Unsupported diff mock {}'.format(params)) return (200, json_headers, _response(mock_name)) def _diff_raw(request): params = _phab_params(request) assert 'diffID' in params return (200, json_headers, _response('raw-{}'.format(params['diffID']))) def _edges(request): params = _phab_params(request) assert 'sourcePHIDs' in params return (200, json_headers, _response('edges-{}'.format(params['sourcePHIDs'][0]))) def _create_artifact(request): params = _phab_params(request) assert 'buildTargetPHID' in params return (200, json_headers, _response('artifact-{}'.format(params['buildTargetPHID']))) def _send_message(request): params = _phab_params(request) print(params) assert 'buildTargetPHID' in params name = 'message-{}-{}'.format(params['buildTargetPHID'], params['type']) if params['unit']: name += '-unit' if params['lint']: name += '-lint' return (200, json_headers, _response(name)) with responses.RequestsMock(assert_all_requests_are_fired=False) as resp: resp.add( responses.POST, 'http://phabricator.test/api/user.whoami', body=_response('auth'), content_type='application/json', ) resp.add_callback( responses.POST, 'http://phabricator.test/api/edge.search', callback=_edges, ) resp.add_callback( responses.POST, 'http://phabricator.test/api/differential.diff.search', callback=_diff_search, ) resp.add_callback( responses.POST, 'http://phabricator.test/api/differential.getrawdiff', callback=_diff_raw, ) resp.add_callback( responses.POST, 'http://phabricator.test/api/harbormaster.createartifact', callback=_create_artifact, ) resp.add_callback( responses.POST, 'http://phabricator.test/api/harbormaster.sendmessage', callback=_send_message, ) api = PhabricatorAPI( url='http://phabricator.test/api/', api_key='deadbeef', ) api.mocks = resp # used to assert in tests on callbacks yield api
def mock_phabricator(mock_config): ''' Mock phabricator authentication process ''' def _response(name): path = os.path.join(MOCK_DIR, 'phabricator_{}.json'.format(name)) assert os.path.exists(path) return open(path).read() responses.add( responses.POST, 'http://phabricator.test/api/user.whoami', body=_response('auth'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.diff.search', body=_response('diff_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.revision.search', body=_response('revision_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.query', body=_response('diff_query'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.getrawdiff', body=_response('diff_raw'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/differential.createinline', body=_response('createinline'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/edge.search', body=_response('edge_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/transaction.search', body=_response('transaction_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/harbormaster.target.search', body=_response('target_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/harbormaster.build.search', body=_response('build_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/harbormaster.buildable.search', body=_response('buildable_search'), content_type='application/json', ) responses.add( responses.POST, 'http://phabricator.test/api/harbormaster.sendmessage', body=_response('send_message'), content_type='application/json', ) yield PhabricatorAPI( url='http://phabricator.test/api/', api_key='deadbeef', )
def __init__(self, repo_dir, revision): self.repo_dir = repo_dir self.revision = revision self.phabricator = PhabricatorAPI(secrets[secrets.PHABRICATOR_TOKEN], secrets[secrets.PHABRICATOR_URL])
class HookPhabricator(Hook): ''' Taskcluster hook handling the static analysis for Phabricator differentials ''' latest_id = None def __init__(self, configuration): assert 'hookId' in configuration super().__init__( 'project-releng', configuration['hookId'], ) # Connect to Phabricator API assert 'phabricator_url' in configuration assert 'phabricator_token' in configuration self.api = PhabricatorAPI( api_key=configuration['phabricator_token'], url=configuration['phabricator_url'], ) # List enabled repositories enabled = configuration.get('repositories', [ 'mozilla-central', ]) self.repos = { r['phid']: r for r in self.api.list_repositories() if r['fields']['name'] in enabled } assert len(self.repos) > 0, 'No repositories enabled' logger.info('Enabled Phabricator repositories', repos=[r['fields']['name'] for r in self.repos.values()]) # Start by getting top id diffs = self.api.search_diffs(limit=1) assert len(diffs) == 1 self.latest_id = diffs[0]['id'] def list_differential(self): ''' List new differential items using pagination using an iterator ''' cursor = self.latest_id while cursor is not None: diffs, cursor = self.api.search_diffs( order='oldest', limit=20, after=self.latest_id, output_cursor=True, ) if not diffs: break for diff in diffs: yield diff # Update the latest id if cursor and cursor['after']: self.latest_id = cursor['after'] elif len(diffs) > 0: self.latest_id = diffs[-1]['id'] async def build_consumer(self, *args, **kwargs): ''' Query phabricator differentials regularly ''' while True: # Get new differential ids for diff in self.list_differential(): if diff['type'] != 'DIFF': logger.info('Skipping differential, not a diff', id=diff['id'], type=diff['type']) continue # Load revision to check the repository is authorized rev = self.api.load_revision(diff['revisionPHID']) repo_phid = rev['fields']['repositoryPHID'] if repo_phid not in self.repos: logger.info('Skipping differential, repo not enabled', id=diff['id'], repo=repo_phid) continue # Create new task await self.create_task({ 'ANALYSIS_SOURCE': 'phabricator', 'ANALYSIS_ID': diff['phid'] }) # Sleep a bit before trying new diffs await asyncio.sleep(60)
def main(taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets(taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'PHABRICATOR', 'ALLOWED_PATHS', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'PUBLICATION': 'IN_PATCH', 'ALLOWED_PATHS': ['*', ], }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger(config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), timestamp=True, ) # Setup settings before stats phabricator = secrets['PHABRICATOR'] settings.setup( secrets['APP_CHANNEL'], secrets['PUBLICATION'], secrets['ALLOWED_PATHS'], ) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load index service index_service = get_service( 'index', taskcluster_client_id, taskcluster_access_token, ) # Load queue service queue_service = get_service( 'queue', taskcluster_client_id, taskcluster_access_token, ) # Load Phabricator API phabricator_reporting_enabled = 'phabricator' in reporters phabricator_api = PhabricatorAPI(phabricator['api_key'], phabricator['url']) if phabricator_reporting_enabled: reporters['phabricator'].setup_api(phabricator_api) # Load unique revision revision = Revision( phabricator_api, try_task=queue_service.task(settings.try_task_id), # Update build status only when phabricator reporting is enabled update_build=phabricator_reporting_enabled, ) # Run workflow according to source w = Workflow(reporters, index_service, queue_service, phabricator_api) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, ) # Index analysis state extras = {} if isinstance(e, AnalysisException): extras['error_code'] = e.code extras['error_message'] = str(e) w.index(revision, state='error', **extras) # Update Harbormaster status revision.update_status(state=BuildState.Fail) # Then raise to mark task as erroneous raise
def PhabricatorMock(): ''' Mock phabricator authentication process ''' json_headers = { 'Content-Type': 'application/json', } def _response(name): path = os.path.join(MOCK_DIR, 'phabricator', '{}.json'.format(name)) assert os.path.exists(path) return open(path).read() def _phab_params(request): # What a weird way to send parameters return json.loads(urllib.parse.parse_qs(request.body)['params'][0]) def _diff_search(request): params = _phab_params(request) assert 'constraints' in params rev = params['constraints']['revisionPHIDs'][0] return (200, json_headers, _response('search-{}'.format(rev))) def _diff_raw(request): params = _phab_params(request) assert 'diffID' in params return (200, json_headers, _response('raw-{}'.format(params['diffID']))) def _edges(request): params = _phab_params(request) assert 'sourcePHIDs' in params return (200, json_headers, _response('edges-{}'.format(params['sourcePHIDs'][0]))) with responses.RequestsMock(assert_all_requests_are_fired=False) as resp: resp.add( responses.POST, 'http://phabricator.test/api/user.whoami', body=_response('auth'), content_type='application/json', ) resp.add_callback( responses.POST, 'http://phabricator.test/api/edge.search', callback=_edges, ) resp.add_callback( responses.POST, 'http://phabricator.test/api/differential.diff.search', callback=_diff_search, ) resp.add_callback( responses.POST, 'http://phabricator.test/api/differential.getrawdiff', callback=_diff_raw, ) yield PhabricatorAPI( url='http://phabricator.test/api/', api_key='deadbeef', )
def main( id, work_dir, taskcluster_secret, taskcluster_client_id, taskcluster_access_token, ): secrets = get_secrets( taskcluster_secret, config.PROJECT_NAME, required=( 'APP_CHANNEL', 'REPORTERS', 'ANALYZERS', 'PHABRICATOR', 'ALLOWED_PATHS', 'MAX_CLONE_RUNTIME', ), existing={ 'APP_CHANNEL': 'development', 'REPORTERS': [], 'ANALYZERS': [ 'clang-tidy', ], 'PUBLICATION': 'IN_PATCH', 'ALLOWED_PATHS': [ '*', ], 'MAX_CLONE_RUNTIME': 15 * 60, }, taskcluster_client_id=taskcluster_client_id, taskcluster_access_token=taskcluster_access_token, ) init_logger( config.PROJECT_NAME, PAPERTRAIL_HOST=secrets.get('PAPERTRAIL_HOST'), PAPERTRAIL_PORT=secrets.get('PAPERTRAIL_PORT'), SENTRY_DSN=secrets.get('SENTRY_DSN'), MOZDEF=secrets.get('MOZDEF'), timestamp=True, ) # Setup settings before stats phabricator = secrets['PHABRICATOR'] settings.setup( secrets['APP_CHANNEL'], work_dir, secrets['PUBLICATION'], secrets['ALLOWED_PATHS'], secrets.get('COVERITY_CONFIG'), secrets['MAX_CLONE_RUNTIME'], phabricator.get('build_plan'), ) # Setup statistics datadog_api_key = secrets.get('DATADOG_API_KEY') if datadog_api_key: stats.auth(datadog_api_key) # Load reporters reporters = get_reporters( secrets['REPORTERS'], taskcluster_client_id, taskcluster_access_token, ) # Load index service index_service = get_service( 'index', taskcluster_client_id, taskcluster_access_token, ) # Load queue service queue_service = get_service( 'queue', taskcluster_client_id, taskcluster_access_token, ) # Load Phabricator API phabricator_api = PhabricatorAPI(phabricator['api_key'], phabricator['url']) if 'phabricator' in reporters: reporters['phabricator'].setup_api(phabricator_api) # Load unique revision if settings.source == SOURCE_PHABRICATOR: revision = PhabricatorRevision(phabricator_api, diff_phid=id) elif settings.source == SOURCE_TRY: revision = PhabricatorRevision(phabricator_api, try_task=queue_service.task( settings.try_task_id)) else: raise Exception('Unsupported source {}'.format(settings.source)) # Run workflow according to source w = Workflow(reporters, secrets['ANALYZERS'], index_service, queue_service, phabricator_api) try: w.run(revision) except Exception as e: # Log errors to papertrail logger.error( 'Static analysis failure', revision=revision, error=e, ) # Index analysis state extras = {} if isinstance(e, AnalysisException): extras['error_code'] = e.code extras['error_message'] = str(e) w.index(revision, state='error', **extras) # Update Harbormaster status revision.update_status(state=BuildState.Fail) # Then raise to mark task as erroneous raise